##// END OF EJS Templates
urlutil: extract `url` related code from `util` into the new module...
marmoute -
r47669:ffd3e823 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,198 +1,201 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''pull, update and merge in one command (DEPRECATED)'''
8 '''pull, update and merge in one command (DEPRECATED)'''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.node import short
13 from mercurial.node import short
14 from mercurial import (
14 from mercurial import (
15 cmdutil,
15 cmdutil,
16 error,
16 error,
17 exchange,
17 exchange,
18 hg,
18 hg,
19 lock,
19 lock,
20 pycompat,
20 pycompat,
21 registrar,
21 registrar,
22 util,
23 )
22 )
24 from mercurial.utils import dateutil
23 from mercurial.utils import (
24 dateutil,
25 urlutil,
26 )
25
27
26 release = lock.release
28 release = lock.release
27 cmdtable = {}
29 cmdtable = {}
28 command = registrar.command(cmdtable)
30 command = registrar.command(cmdtable)
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # be specifying the version(s) of Mercurial they are tested with, or
33 # be specifying the version(s) of Mercurial they are tested with, or
32 # leave the attribute unspecified.
34 # leave the attribute unspecified.
33 testedwith = b'ships-with-hg-core'
35 testedwith = b'ships-with-hg-core'
34
36
35
37
36 @command(
38 @command(
37 b'fetch',
39 b'fetch',
38 [
40 [
39 (
41 (
40 b'r',
42 b'r',
41 b'rev',
43 b'rev',
42 [],
44 [],
43 _(b'a specific revision you would like to pull'),
45 _(b'a specific revision you would like to pull'),
44 _(b'REV'),
46 _(b'REV'),
45 ),
47 ),
46 (b'', b'edit', None, _(b'invoke editor on commit messages')),
48 (b'', b'edit', None, _(b'invoke editor on commit messages')),
47 (b'', b'force-editor', None, _(b'edit commit message (DEPRECATED)')),
49 (b'', b'force-editor', None, _(b'edit commit message (DEPRECATED)')),
48 (b'', b'switch-parent', None, _(b'switch parents when merging')),
50 (b'', b'switch-parent', None, _(b'switch parents when merging')),
49 ]
51 ]
50 + cmdutil.commitopts
52 + cmdutil.commitopts
51 + cmdutil.commitopts2
53 + cmdutil.commitopts2
52 + cmdutil.remoteopts,
54 + cmdutil.remoteopts,
53 _(b'hg fetch [SOURCE]'),
55 _(b'hg fetch [SOURCE]'),
54 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
56 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
55 )
57 )
56 def fetch(ui, repo, source=b'default', **opts):
58 def fetch(ui, repo, source=b'default', **opts):
57 """pull changes from a remote repository, merge new changes if needed.
59 """pull changes from a remote repository, merge new changes if needed.
58
60
59 This finds all changes from the repository at the specified path
61 This finds all changes from the repository at the specified path
60 or URL and adds them to the local repository.
62 or URL and adds them to the local repository.
61
63
62 If the pulled changes add a new branch head, the head is
64 If the pulled changes add a new branch head, the head is
63 automatically merged, and the result of the merge is committed.
65 automatically merged, and the result of the merge is committed.
64 Otherwise, the working directory is updated to include the new
66 Otherwise, the working directory is updated to include the new
65 changes.
67 changes.
66
68
67 When a merge is needed, the working directory is first updated to
69 When a merge is needed, the working directory is first updated to
68 the newly pulled changes. Local changes are then merged into the
70 the newly pulled changes. Local changes are then merged into the
69 pulled changes. To switch the merge order, use --switch-parent.
71 pulled changes. To switch the merge order, use --switch-parent.
70
72
71 See :hg:`help dates` for a list of formats valid for -d/--date.
73 See :hg:`help dates` for a list of formats valid for -d/--date.
72
74
73 Returns 0 on success.
75 Returns 0 on success.
74 """
76 """
75
77
76 opts = pycompat.byteskwargs(opts)
78 opts = pycompat.byteskwargs(opts)
77 date = opts.get(b'date')
79 date = opts.get(b'date')
78 if date:
80 if date:
79 opts[b'date'] = dateutil.parsedate(date)
81 opts[b'date'] = dateutil.parsedate(date)
80
82
81 parent = repo.dirstate.p1()
83 parent = repo.dirstate.p1()
82 branch = repo.dirstate.branch()
84 branch = repo.dirstate.branch()
83 try:
85 try:
84 branchnode = repo.branchtip(branch)
86 branchnode = repo.branchtip(branch)
85 except error.RepoLookupError:
87 except error.RepoLookupError:
86 branchnode = None
88 branchnode = None
87 if parent != branchnode:
89 if parent != branchnode:
88 raise error.Abort(
90 raise error.Abort(
89 _(b'working directory not at branch tip'),
91 _(b'working directory not at branch tip'),
90 hint=_(b"use 'hg update' to check out branch tip"),
92 hint=_(b"use 'hg update' to check out branch tip"),
91 )
93 )
92
94
93 wlock = lock = None
95 wlock = lock = None
94 try:
96 try:
95 wlock = repo.wlock()
97 wlock = repo.wlock()
96 lock = repo.lock()
98 lock = repo.lock()
97
99
98 cmdutil.bailifchanged(repo)
100 cmdutil.bailifchanged(repo)
99
101
100 bheads = repo.branchheads(branch)
102 bheads = repo.branchheads(branch)
101 bheads = [head for head in bheads if len(repo[head].children()) == 0]
103 bheads = [head for head in bheads if len(repo[head].children()) == 0]
102 if len(bheads) > 1:
104 if len(bheads) > 1:
103 raise error.Abort(
105 raise error.Abort(
104 _(
106 _(
105 b'multiple heads in this branch '
107 b'multiple heads in this branch '
106 b'(use "hg heads ." and "hg merge" to merge)'
108 b'(use "hg heads ." and "hg merge" to merge)'
107 )
109 )
108 )
110 )
109
111
110 other = hg.peer(repo, opts, ui.expandpath(source))
112 other = hg.peer(repo, opts, ui.expandpath(source))
111 ui.status(
113 ui.status(
112 _(b'pulling from %s\n') % util.hidepassword(ui.expandpath(source))
114 _(b'pulling from %s\n')
115 % urlutil.hidepassword(ui.expandpath(source))
113 )
116 )
114 revs = None
117 revs = None
115 if opts[b'rev']:
118 if opts[b'rev']:
116 try:
119 try:
117 revs = [other.lookup(rev) for rev in opts[b'rev']]
120 revs = [other.lookup(rev) for rev in opts[b'rev']]
118 except error.CapabilityError:
121 except error.CapabilityError:
119 err = _(
122 err = _(
120 b"other repository doesn't support revision lookup, "
123 b"other repository doesn't support revision lookup, "
121 b"so a rev cannot be specified."
124 b"so a rev cannot be specified."
122 )
125 )
123 raise error.Abort(err)
126 raise error.Abort(err)
124
127
125 # Are there any changes at all?
128 # Are there any changes at all?
126 modheads = exchange.pull(repo, other, heads=revs).cgresult
129 modheads = exchange.pull(repo, other, heads=revs).cgresult
127 if modheads == 0:
130 if modheads == 0:
128 return 0
131 return 0
129
132
130 # Is this a simple fast-forward along the current branch?
133 # Is this a simple fast-forward along the current branch?
131 newheads = repo.branchheads(branch)
134 newheads = repo.branchheads(branch)
132 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
135 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
133 if len(newheads) == 1 and len(newchildren):
136 if len(newheads) == 1 and len(newchildren):
134 if newchildren[0] != parent:
137 if newchildren[0] != parent:
135 return hg.update(repo, newchildren[0])
138 return hg.update(repo, newchildren[0])
136 else:
139 else:
137 return 0
140 return 0
138
141
139 # Are there more than one additional branch heads?
142 # Are there more than one additional branch heads?
140 newchildren = [n for n in newchildren if n != parent]
143 newchildren = [n for n in newchildren if n != parent]
141 newparent = parent
144 newparent = parent
142 if newchildren:
145 if newchildren:
143 newparent = newchildren[0]
146 newparent = newchildren[0]
144 hg.clean(repo, newparent)
147 hg.clean(repo, newparent)
145 newheads = [n for n in newheads if n != newparent]
148 newheads = [n for n in newheads if n != newparent]
146 if len(newheads) > 1:
149 if len(newheads) > 1:
147 ui.status(
150 ui.status(
148 _(
151 _(
149 b'not merging with %d other new branch heads '
152 b'not merging with %d other new branch heads '
150 b'(use "hg heads ." and "hg merge" to merge them)\n'
153 b'(use "hg heads ." and "hg merge" to merge them)\n'
151 )
154 )
152 % (len(newheads) - 1)
155 % (len(newheads) - 1)
153 )
156 )
154 return 1
157 return 1
155
158
156 if not newheads:
159 if not newheads:
157 return 0
160 return 0
158
161
159 # Otherwise, let's merge.
162 # Otherwise, let's merge.
160 err = False
163 err = False
161 if newheads:
164 if newheads:
162 # By default, we consider the repository we're pulling
165 # By default, we consider the repository we're pulling
163 # *from* as authoritative, so we merge our changes into
166 # *from* as authoritative, so we merge our changes into
164 # theirs.
167 # theirs.
165 if opts[b'switch_parent']:
168 if opts[b'switch_parent']:
166 firstparent, secondparent = newparent, newheads[0]
169 firstparent, secondparent = newparent, newheads[0]
167 else:
170 else:
168 firstparent, secondparent = newheads[0], newparent
171 firstparent, secondparent = newheads[0], newparent
169 ui.status(
172 ui.status(
170 _(b'updating to %d:%s\n')
173 _(b'updating to %d:%s\n')
171 % (repo.changelog.rev(firstparent), short(firstparent))
174 % (repo.changelog.rev(firstparent), short(firstparent))
172 )
175 )
173 hg.clean(repo, firstparent)
176 hg.clean(repo, firstparent)
174 p2ctx = repo[secondparent]
177 p2ctx = repo[secondparent]
175 ui.status(
178 ui.status(
176 _(b'merging with %d:%s\n') % (p2ctx.rev(), short(secondparent))
179 _(b'merging with %d:%s\n') % (p2ctx.rev(), short(secondparent))
177 )
180 )
178 err = hg.merge(p2ctx, remind=False)
181 err = hg.merge(p2ctx, remind=False)
179
182
180 if not err:
183 if not err:
181 # we don't translate commit messages
184 # we don't translate commit messages
182 message = cmdutil.logmessage(ui, opts) or (
185 message = cmdutil.logmessage(ui, opts) or (
183 b'Automated merge with %s' % util.removeauth(other.url())
186 b'Automated merge with %s' % urlutil.removeauth(other.url())
184 )
187 )
185 editopt = opts.get(b'edit') or opts.get(b'force_editor')
188 editopt = opts.get(b'edit') or opts.get(b'force_editor')
186 editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch')
189 editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch')
187 n = repo.commit(
190 n = repo.commit(
188 message, opts[b'user'], opts[b'date'], editor=editor
191 message, opts[b'user'], opts[b'date'], editor=editor
189 )
192 )
190 ui.status(
193 ui.status(
191 _(b'new changeset %d:%s merges remote changes with local\n')
194 _(b'new changeset %d:%s merges remote changes with local\n')
192 % (repo.changelog.rev(n), short(n))
195 % (repo.changelog.rev(n), short(n))
193 )
196 )
194
197
195 return err
198 return err
196
199
197 finally:
200 finally:
198 release(lock, wlock)
201 release(lock, wlock)
@@ -1,2665 +1,2666 b''
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but allow edits before making new commit
37 # e, edit = use commit, but allow edits before making new commit
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # r, roll = like fold, but discard this commit's description and date
39 # r, roll = like fold, but discard this commit's description and date
40 # d, drop = remove commit from history
40 # d, drop = remove commit from history
41 # m, mess = edit commit message without changing commit content
41 # m, mess = edit commit message without changing commit content
42 # b, base = checkout changeset and apply further changesets from there
42 # b, base = checkout changeset and apply further changesets from there
43 #
43 #
44
44
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
46 for each revision in your history. For example, if you had meant to add gamma
46 for each revision in your history. For example, if you had meant to add gamma
47 before beta, and then wanted to add delta in the same revision as beta, you
47 before beta, and then wanted to add delta in the same revision as beta, you
48 would reorganize the file to look like this::
48 would reorganize the file to look like this::
49
49
50 pick 030b686bedc4 Add gamma
50 pick 030b686bedc4 Add gamma
51 pick c561b4e977df Add beta
51 pick c561b4e977df Add beta
52 fold 7c2fd3b9020c Add delta
52 fold 7c2fd3b9020c Add delta
53
53
54 # Edit history between c561b4e977df and 7c2fd3b9020c
54 # Edit history between c561b4e977df and 7c2fd3b9020c
55 #
55 #
56 # Commits are listed from least to most recent
56 # Commits are listed from least to most recent
57 #
57 #
58 # Commands:
58 # Commands:
59 # p, pick = use commit
59 # p, pick = use commit
60 # e, edit = use commit, but allow edits before making new commit
60 # e, edit = use commit, but allow edits before making new commit
61 # f, fold = use commit, but combine it with the one above
61 # f, fold = use commit, but combine it with the one above
62 # r, roll = like fold, but discard this commit's description and date
62 # r, roll = like fold, but discard this commit's description and date
63 # d, drop = remove commit from history
63 # d, drop = remove commit from history
64 # m, mess = edit commit message without changing commit content
64 # m, mess = edit commit message without changing commit content
65 # b, base = checkout changeset and apply further changesets from there
65 # b, base = checkout changeset and apply further changesets from there
66 #
66 #
67
67
68 At which point you close the editor and ``histedit`` starts working. When you
68 At which point you close the editor and ``histedit`` starts working. When you
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
70 those revisions together, offering you a chance to clean up the commit message::
70 those revisions together, offering you a chance to clean up the commit message::
71
71
72 Add beta
72 Add beta
73 ***
73 ***
74 Add delta
74 Add delta
75
75
76 Edit the commit message to your liking, then close the editor. The date used
76 Edit the commit message to your liking, then close the editor. The date used
77 for the commit will be the later of the two commits' dates. For this example,
77 for the commit will be the later of the two commits' dates. For this example,
78 let's assume that the commit message was changed to ``Add beta and delta.``
78 let's assume that the commit message was changed to ``Add beta and delta.``
79 After histedit has run and had a chance to remove any old or temporary
79 After histedit has run and had a chance to remove any old or temporary
80 revisions it needed, the history looks like this::
80 revisions it needed, the history looks like this::
81
81
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
83 | Add beta and delta.
83 | Add beta and delta.
84 |
84 |
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
86 | Add gamma
86 | Add gamma
87 |
87 |
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
89 Add alpha
89 Add alpha
90
90
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
92 ones) until after it has completed all the editing operations, so it will
92 ones) until after it has completed all the editing operations, so it will
93 probably perform several strip operations when it's done. For the above example,
93 probably perform several strip operations when it's done. For the above example,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
95 so you might need to be a little patient. You can choose to keep the original
95 so you might need to be a little patient. You can choose to keep the original
96 revisions by passing the ``--keep`` flag.
96 revisions by passing the ``--keep`` flag.
97
97
98 The ``edit`` operation will drop you back to a command prompt,
98 The ``edit`` operation will drop you back to a command prompt,
99 allowing you to edit files freely, or even use ``hg record`` to commit
99 allowing you to edit files freely, or even use ``hg record`` to commit
100 some changes as a separate commit. When you're done, any remaining
100 some changes as a separate commit. When you're done, any remaining
101 uncommitted changes will be committed as well. When done, run ``hg
101 uncommitted changes will be committed as well. When done, run ``hg
102 histedit --continue`` to finish this step. If there are uncommitted
102 histedit --continue`` to finish this step. If there are uncommitted
103 changes, you'll be prompted for a new commit message, but the default
103 changes, you'll be prompted for a new commit message, but the default
104 commit message will be the original message for the ``edit`` ed
104 commit message will be the original message for the ``edit`` ed
105 revision, and the date of the original commit will be preserved.
105 revision, and the date of the original commit will be preserved.
106
106
107 The ``message`` operation will give you a chance to revise a commit
107 The ``message`` operation will give you a chance to revise a commit
108 message without changing the contents. It's a shortcut for doing
108 message without changing the contents. It's a shortcut for doing
109 ``edit`` immediately followed by `hg histedit --continue``.
109 ``edit`` immediately followed by `hg histedit --continue``.
110
110
111 If ``histedit`` encounters a conflict when moving a revision (while
111 If ``histedit`` encounters a conflict when moving a revision (while
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
113 ``edit`` with the difference that it won't prompt you for a commit
113 ``edit`` with the difference that it won't prompt you for a commit
114 message when done. If you decide at this point that you don't like how
114 message when done. If you decide at this point that you don't like how
115 much work it will be to rearrange history, or that you made a mistake,
115 much work it will be to rearrange history, or that you made a mistake,
116 you can use ``hg histedit --abort`` to abandon the new changes you
116 you can use ``hg histedit --abort`` to abandon the new changes you
117 have made and return to the state before you attempted to edit your
117 have made and return to the state before you attempted to edit your
118 history.
118 history.
119
119
120 If we clone the histedit-ed example repository above and add four more
120 If we clone the histedit-ed example repository above and add four more
121 changes, such that we have the following history::
121 changes, such that we have the following history::
122
122
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
124 | Add theta
124 | Add theta
125 |
125 |
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
127 | Add eta
127 | Add eta
128 |
128 |
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
130 | Add zeta
130 | Add zeta
131 |
131 |
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
133 | Add epsilon
133 | Add epsilon
134 |
134 |
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
136 | Add beta and delta.
136 | Add beta and delta.
137 |
137 |
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
139 | Add gamma
139 | Add gamma
140 |
140 |
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
142 Add alpha
142 Add alpha
143
143
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
145 as running ``hg histedit 836302820282``. If you need plan to push to a
145 as running ``hg histedit 836302820282``. If you need plan to push to a
146 repository that Mercurial does not detect to be related to the source
146 repository that Mercurial does not detect to be related to the source
147 repo, you can add a ``--force`` option.
147 repo, you can add a ``--force`` option.
148
148
149 Config
149 Config
150 ------
150 ------
151
151
152 Histedit rule lines are truncated to 80 characters by default. You
152 Histedit rule lines are truncated to 80 characters by default. You
153 can customize this behavior by setting a different length in your
153 can customize this behavior by setting a different length in your
154 configuration file::
154 configuration file::
155
155
156 [histedit]
156 [histedit]
157 linelen = 120 # truncate rule lines at 120 characters
157 linelen = 120 # truncate rule lines at 120 characters
158
158
159 The summary of a change can be customized as well::
159 The summary of a change can be customized as well::
160
160
161 [histedit]
161 [histedit]
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
163
163
164 The customized summary should be kept short enough that rule lines
164 The customized summary should be kept short enough that rule lines
165 will fit in the configured line length. See above if that requires
165 will fit in the configured line length. See above if that requires
166 customization.
166 customization.
167
167
168 ``hg histedit`` attempts to automatically choose an appropriate base
168 ``hg histedit`` attempts to automatically choose an appropriate base
169 revision to use. To change which base revision is used, define a
169 revision to use. To change which base revision is used, define a
170 revset in your configuration file::
170 revset in your configuration file::
171
171
172 [histedit]
172 [histedit]
173 defaultrev = only(.) & draft()
173 defaultrev = only(.) & draft()
174
174
175 By default each edited revision needs to be present in histedit commands.
175 By default each edited revision needs to be present in histedit commands.
176 To remove revision you need to use ``drop`` operation. You can configure
176 To remove revision you need to use ``drop`` operation. You can configure
177 the drop to be implicit for missing commits by adding::
177 the drop to be implicit for missing commits by adding::
178
178
179 [histedit]
179 [histedit]
180 dropmissing = True
180 dropmissing = True
181
181
182 By default, histedit will close the transaction after each action. For
182 By default, histedit will close the transaction after each action. For
183 performance purposes, you can configure histedit to use a single transaction
183 performance purposes, you can configure histedit to use a single transaction
184 across the entire histedit. WARNING: This setting introduces a significant risk
184 across the entire histedit. WARNING: This setting introduces a significant risk
185 of losing the work you've done in a histedit if the histedit aborts
185 of losing the work you've done in a histedit if the histedit aborts
186 unexpectedly::
186 unexpectedly::
187
187
188 [histedit]
188 [histedit]
189 singletransaction = True
189 singletransaction = True
190
190
191 """
191 """
192
192
193 from __future__ import absolute_import
193 from __future__ import absolute_import
194
194
195 # chistedit dependencies that are not available everywhere
195 # chistedit dependencies that are not available everywhere
196 try:
196 try:
197 import fcntl
197 import fcntl
198 import termios
198 import termios
199 except ImportError:
199 except ImportError:
200 fcntl = None
200 fcntl = None
201 termios = None
201 termios = None
202
202
203 import functools
203 import functools
204 import os
204 import os
205 import struct
205 import struct
206
206
207 from mercurial.i18n import _
207 from mercurial.i18n import _
208 from mercurial.pycompat import (
208 from mercurial.pycompat import (
209 getattr,
209 getattr,
210 open,
210 open,
211 )
211 )
212 from mercurial.node import (
212 from mercurial.node import (
213 bin,
213 bin,
214 hex,
214 hex,
215 short,
215 short,
216 )
216 )
217 from mercurial import (
217 from mercurial import (
218 bundle2,
218 bundle2,
219 cmdutil,
219 cmdutil,
220 context,
220 context,
221 copies,
221 copies,
222 destutil,
222 destutil,
223 discovery,
223 discovery,
224 encoding,
224 encoding,
225 error,
225 error,
226 exchange,
226 exchange,
227 extensions,
227 extensions,
228 hg,
228 hg,
229 logcmdutil,
229 logcmdutil,
230 merge as mergemod,
230 merge as mergemod,
231 mergestate as mergestatemod,
231 mergestate as mergestatemod,
232 mergeutil,
232 mergeutil,
233 obsolete,
233 obsolete,
234 pycompat,
234 pycompat,
235 registrar,
235 registrar,
236 repair,
236 repair,
237 rewriteutil,
237 rewriteutil,
238 scmutil,
238 scmutil,
239 state as statemod,
239 state as statemod,
240 util,
240 util,
241 )
241 )
242 from mercurial.utils import (
242 from mercurial.utils import (
243 dateutil,
243 dateutil,
244 stringutil,
244 stringutil,
245 urlutil,
245 )
246 )
246
247
247 pickle = util.pickle
248 pickle = util.pickle
248 cmdtable = {}
249 cmdtable = {}
249 command = registrar.command(cmdtable)
250 command = registrar.command(cmdtable)
250
251
251 configtable = {}
252 configtable = {}
252 configitem = registrar.configitem(configtable)
253 configitem = registrar.configitem(configtable)
253 configitem(
254 configitem(
254 b'experimental',
255 b'experimental',
255 b'histedit.autoverb',
256 b'histedit.autoverb',
256 default=False,
257 default=False,
257 )
258 )
258 configitem(
259 configitem(
259 b'histedit',
260 b'histedit',
260 b'defaultrev',
261 b'defaultrev',
261 default=None,
262 default=None,
262 )
263 )
263 configitem(
264 configitem(
264 b'histedit',
265 b'histedit',
265 b'dropmissing',
266 b'dropmissing',
266 default=False,
267 default=False,
267 )
268 )
268 configitem(
269 configitem(
269 b'histedit',
270 b'histedit',
270 b'linelen',
271 b'linelen',
271 default=80,
272 default=80,
272 )
273 )
273 configitem(
274 configitem(
274 b'histedit',
275 b'histedit',
275 b'singletransaction',
276 b'singletransaction',
276 default=False,
277 default=False,
277 )
278 )
278 configitem(
279 configitem(
279 b'ui',
280 b'ui',
280 b'interface.histedit',
281 b'interface.histedit',
281 default=None,
282 default=None,
282 )
283 )
283 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
284 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
284
285
285 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
286 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
286 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
287 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
287 # be specifying the version(s) of Mercurial they are tested with, or
288 # be specifying the version(s) of Mercurial they are tested with, or
288 # leave the attribute unspecified.
289 # leave the attribute unspecified.
289 testedwith = b'ships-with-hg-core'
290 testedwith = b'ships-with-hg-core'
290
291
291 actiontable = {}
292 actiontable = {}
292 primaryactions = set()
293 primaryactions = set()
293 secondaryactions = set()
294 secondaryactions = set()
294 tertiaryactions = set()
295 tertiaryactions = set()
295 internalactions = set()
296 internalactions = set()
296
297
297
298
298 def geteditcomment(ui, first, last):
299 def geteditcomment(ui, first, last):
299 """construct the editor comment
300 """construct the editor comment
300 The comment includes::
301 The comment includes::
301 - an intro
302 - an intro
302 - sorted primary commands
303 - sorted primary commands
303 - sorted short commands
304 - sorted short commands
304 - sorted long commands
305 - sorted long commands
305 - additional hints
306 - additional hints
306
307
307 Commands are only included once.
308 Commands are only included once.
308 """
309 """
309 intro = _(
310 intro = _(
310 b"""Edit history between %s and %s
311 b"""Edit history between %s and %s
311
312
312 Commits are listed from least to most recent
313 Commits are listed from least to most recent
313
314
314 You can reorder changesets by reordering the lines
315 You can reorder changesets by reordering the lines
315
316
316 Commands:
317 Commands:
317 """
318 """
318 )
319 )
319 actions = []
320 actions = []
320
321
321 def addverb(v):
322 def addverb(v):
322 a = actiontable[v]
323 a = actiontable[v]
323 lines = a.message.split(b"\n")
324 lines = a.message.split(b"\n")
324 if len(a.verbs):
325 if len(a.verbs):
325 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
326 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
326 actions.append(b" %s = %s" % (v, lines[0]))
327 actions.append(b" %s = %s" % (v, lines[0]))
327 actions.extend([b' %s'] * (len(lines) - 1))
328 actions.extend([b' %s'] * (len(lines) - 1))
328
329
329 for v in (
330 for v in (
330 sorted(primaryactions)
331 sorted(primaryactions)
331 + sorted(secondaryactions)
332 + sorted(secondaryactions)
332 + sorted(tertiaryactions)
333 + sorted(tertiaryactions)
333 ):
334 ):
334 addverb(v)
335 addverb(v)
335 actions.append(b'')
336 actions.append(b'')
336
337
337 hints = []
338 hints = []
338 if ui.configbool(b'histedit', b'dropmissing'):
339 if ui.configbool(b'histedit', b'dropmissing'):
339 hints.append(
340 hints.append(
340 b"Deleting a changeset from the list "
341 b"Deleting a changeset from the list "
341 b"will DISCARD it from the edited history!"
342 b"will DISCARD it from the edited history!"
342 )
343 )
343
344
344 lines = (intro % (first, last)).split(b'\n') + actions + hints
345 lines = (intro % (first, last)).split(b'\n') + actions + hints
345
346
346 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
347 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
347
348
348
349
349 class histeditstate(object):
350 class histeditstate(object):
350 def __init__(self, repo):
351 def __init__(self, repo):
351 self.repo = repo
352 self.repo = repo
352 self.actions = None
353 self.actions = None
353 self.keep = None
354 self.keep = None
354 self.topmost = None
355 self.topmost = None
355 self.parentctxnode = None
356 self.parentctxnode = None
356 self.lock = None
357 self.lock = None
357 self.wlock = None
358 self.wlock = None
358 self.backupfile = None
359 self.backupfile = None
359 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
360 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
360 self.replacements = []
361 self.replacements = []
361
362
362 def read(self):
363 def read(self):
363 """Load histedit state from disk and set fields appropriately."""
364 """Load histedit state from disk and set fields appropriately."""
364 if not self.stateobj.exists():
365 if not self.stateobj.exists():
365 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
366 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
366
367
367 data = self._read()
368 data = self._read()
368
369
369 self.parentctxnode = data[b'parentctxnode']
370 self.parentctxnode = data[b'parentctxnode']
370 actions = parserules(data[b'rules'], self)
371 actions = parserules(data[b'rules'], self)
371 self.actions = actions
372 self.actions = actions
372 self.keep = data[b'keep']
373 self.keep = data[b'keep']
373 self.topmost = data[b'topmost']
374 self.topmost = data[b'topmost']
374 self.replacements = data[b'replacements']
375 self.replacements = data[b'replacements']
375 self.backupfile = data[b'backupfile']
376 self.backupfile = data[b'backupfile']
376
377
377 def _read(self):
378 def _read(self):
378 fp = self.repo.vfs.read(b'histedit-state')
379 fp = self.repo.vfs.read(b'histedit-state')
379 if fp.startswith(b'v1\n'):
380 if fp.startswith(b'v1\n'):
380 data = self._load()
381 data = self._load()
381 parentctxnode, rules, keep, topmost, replacements, backupfile = data
382 parentctxnode, rules, keep, topmost, replacements, backupfile = data
382 else:
383 else:
383 data = pickle.loads(fp)
384 data = pickle.loads(fp)
384 parentctxnode, rules, keep, topmost, replacements = data
385 parentctxnode, rules, keep, topmost, replacements = data
385 backupfile = None
386 backupfile = None
386 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
387 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
387
388
388 return {
389 return {
389 b'parentctxnode': parentctxnode,
390 b'parentctxnode': parentctxnode,
390 b"rules": rules,
391 b"rules": rules,
391 b"keep": keep,
392 b"keep": keep,
392 b"topmost": topmost,
393 b"topmost": topmost,
393 b"replacements": replacements,
394 b"replacements": replacements,
394 b"backupfile": backupfile,
395 b"backupfile": backupfile,
395 }
396 }
396
397
397 def write(self, tr=None):
398 def write(self, tr=None):
398 if tr:
399 if tr:
399 tr.addfilegenerator(
400 tr.addfilegenerator(
400 b'histedit-state',
401 b'histedit-state',
401 (b'histedit-state',),
402 (b'histedit-state',),
402 self._write,
403 self._write,
403 location=b'plain',
404 location=b'plain',
404 )
405 )
405 else:
406 else:
406 with self.repo.vfs(b"histedit-state", b"w") as f:
407 with self.repo.vfs(b"histedit-state", b"w") as f:
407 self._write(f)
408 self._write(f)
408
409
409 def _write(self, fp):
410 def _write(self, fp):
410 fp.write(b'v1\n')
411 fp.write(b'v1\n')
411 fp.write(b'%s\n' % hex(self.parentctxnode))
412 fp.write(b'%s\n' % hex(self.parentctxnode))
412 fp.write(b'%s\n' % hex(self.topmost))
413 fp.write(b'%s\n' % hex(self.topmost))
413 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
414 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
414 fp.write(b'%d\n' % len(self.actions))
415 fp.write(b'%d\n' % len(self.actions))
415 for action in self.actions:
416 for action in self.actions:
416 fp.write(b'%s\n' % action.tostate())
417 fp.write(b'%s\n' % action.tostate())
417 fp.write(b'%d\n' % len(self.replacements))
418 fp.write(b'%d\n' % len(self.replacements))
418 for replacement in self.replacements:
419 for replacement in self.replacements:
419 fp.write(
420 fp.write(
420 b'%s%s\n'
421 b'%s%s\n'
421 % (
422 % (
422 hex(replacement[0]),
423 hex(replacement[0]),
423 b''.join(hex(r) for r in replacement[1]),
424 b''.join(hex(r) for r in replacement[1]),
424 )
425 )
425 )
426 )
426 backupfile = self.backupfile
427 backupfile = self.backupfile
427 if not backupfile:
428 if not backupfile:
428 backupfile = b''
429 backupfile = b''
429 fp.write(b'%s\n' % backupfile)
430 fp.write(b'%s\n' % backupfile)
430
431
431 def _load(self):
432 def _load(self):
432 fp = self.repo.vfs(b'histedit-state', b'r')
433 fp = self.repo.vfs(b'histedit-state', b'r')
433 lines = [l[:-1] for l in fp.readlines()]
434 lines = [l[:-1] for l in fp.readlines()]
434
435
435 index = 0
436 index = 0
436 lines[index] # version number
437 lines[index] # version number
437 index += 1
438 index += 1
438
439
439 parentctxnode = bin(lines[index])
440 parentctxnode = bin(lines[index])
440 index += 1
441 index += 1
441
442
442 topmost = bin(lines[index])
443 topmost = bin(lines[index])
443 index += 1
444 index += 1
444
445
445 keep = lines[index] == b'True'
446 keep = lines[index] == b'True'
446 index += 1
447 index += 1
447
448
448 # Rules
449 # Rules
449 rules = []
450 rules = []
450 rulelen = int(lines[index])
451 rulelen = int(lines[index])
451 index += 1
452 index += 1
452 for i in pycompat.xrange(rulelen):
453 for i in pycompat.xrange(rulelen):
453 ruleaction = lines[index]
454 ruleaction = lines[index]
454 index += 1
455 index += 1
455 rule = lines[index]
456 rule = lines[index]
456 index += 1
457 index += 1
457 rules.append((ruleaction, rule))
458 rules.append((ruleaction, rule))
458
459
459 # Replacements
460 # Replacements
460 replacements = []
461 replacements = []
461 replacementlen = int(lines[index])
462 replacementlen = int(lines[index])
462 index += 1
463 index += 1
463 for i in pycompat.xrange(replacementlen):
464 for i in pycompat.xrange(replacementlen):
464 replacement = lines[index]
465 replacement = lines[index]
465 original = bin(replacement[:40])
466 original = bin(replacement[:40])
466 succ = [
467 succ = [
467 bin(replacement[i : i + 40])
468 bin(replacement[i : i + 40])
468 for i in range(40, len(replacement), 40)
469 for i in range(40, len(replacement), 40)
469 ]
470 ]
470 replacements.append((original, succ))
471 replacements.append((original, succ))
471 index += 1
472 index += 1
472
473
473 backupfile = lines[index]
474 backupfile = lines[index]
474 index += 1
475 index += 1
475
476
476 fp.close()
477 fp.close()
477
478
478 return parentctxnode, rules, keep, topmost, replacements, backupfile
479 return parentctxnode, rules, keep, topmost, replacements, backupfile
479
480
480 def clear(self):
481 def clear(self):
481 if self.inprogress():
482 if self.inprogress():
482 self.repo.vfs.unlink(b'histedit-state')
483 self.repo.vfs.unlink(b'histedit-state')
483
484
484 def inprogress(self):
485 def inprogress(self):
485 return self.repo.vfs.exists(b'histedit-state')
486 return self.repo.vfs.exists(b'histedit-state')
486
487
487
488
488 class histeditaction(object):
489 class histeditaction(object):
489 def __init__(self, state, node):
490 def __init__(self, state, node):
490 self.state = state
491 self.state = state
491 self.repo = state.repo
492 self.repo = state.repo
492 self.node = node
493 self.node = node
493
494
494 @classmethod
495 @classmethod
495 def fromrule(cls, state, rule):
496 def fromrule(cls, state, rule):
496 """Parses the given rule, returning an instance of the histeditaction."""
497 """Parses the given rule, returning an instance of the histeditaction."""
497 ruleid = rule.strip().split(b' ', 1)[0]
498 ruleid = rule.strip().split(b' ', 1)[0]
498 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
499 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
499 # Check for validation of rule ids and get the rulehash
500 # Check for validation of rule ids and get the rulehash
500 try:
501 try:
501 rev = bin(ruleid)
502 rev = bin(ruleid)
502 except TypeError:
503 except TypeError:
503 try:
504 try:
504 _ctx = scmutil.revsingle(state.repo, ruleid)
505 _ctx = scmutil.revsingle(state.repo, ruleid)
505 rulehash = _ctx.hex()
506 rulehash = _ctx.hex()
506 rev = bin(rulehash)
507 rev = bin(rulehash)
507 except error.RepoLookupError:
508 except error.RepoLookupError:
508 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
509 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
509 return cls(state, rev)
510 return cls(state, rev)
510
511
511 def verify(self, prev, expected, seen):
512 def verify(self, prev, expected, seen):
512 """ Verifies semantic correctness of the rule"""
513 """ Verifies semantic correctness of the rule"""
513 repo = self.repo
514 repo = self.repo
514 ha = hex(self.node)
515 ha = hex(self.node)
515 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
516 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
516 if self.node is None:
517 if self.node is None:
517 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
518 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
518 self._verifynodeconstraints(prev, expected, seen)
519 self._verifynodeconstraints(prev, expected, seen)
519
520
520 def _verifynodeconstraints(self, prev, expected, seen):
521 def _verifynodeconstraints(self, prev, expected, seen):
521 # by default command need a node in the edited list
522 # by default command need a node in the edited list
522 if self.node not in expected:
523 if self.node not in expected:
523 raise error.ParseError(
524 raise error.ParseError(
524 _(b'%s "%s" changeset was not a candidate')
525 _(b'%s "%s" changeset was not a candidate')
525 % (self.verb, short(self.node)),
526 % (self.verb, short(self.node)),
526 hint=_(b'only use listed changesets'),
527 hint=_(b'only use listed changesets'),
527 )
528 )
528 # and only one command per node
529 # and only one command per node
529 if self.node in seen:
530 if self.node in seen:
530 raise error.ParseError(
531 raise error.ParseError(
531 _(b'duplicated command for changeset %s') % short(self.node)
532 _(b'duplicated command for changeset %s') % short(self.node)
532 )
533 )
533
534
534 def torule(self):
535 def torule(self):
535 """build a histedit rule line for an action
536 """build a histedit rule line for an action
536
537
537 by default lines are in the form:
538 by default lines are in the form:
538 <hash> <rev> <summary>
539 <hash> <rev> <summary>
539 """
540 """
540 ctx = self.repo[self.node]
541 ctx = self.repo[self.node]
541 ui = self.repo.ui
542 ui = self.repo.ui
542 # We don't want color codes in the commit message template, so
543 # We don't want color codes in the commit message template, so
543 # disable the label() template function while we render it.
544 # disable the label() template function while we render it.
544 with ui.configoverride(
545 with ui.configoverride(
545 {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
546 {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
546 ):
547 ):
547 summary = cmdutil.rendertemplate(
548 summary = cmdutil.rendertemplate(
548 ctx, ui.config(b'histedit', b'summary-template')
549 ctx, ui.config(b'histedit', b'summary-template')
549 )
550 )
550 # Handle the fact that `''.splitlines() => []`
551 # Handle the fact that `''.splitlines() => []`
551 summary = summary.splitlines()[0] if summary else b''
552 summary = summary.splitlines()[0] if summary else b''
552 line = b'%s %s %s' % (self.verb, ctx, summary)
553 line = b'%s %s %s' % (self.verb, ctx, summary)
553 # trim to 75 columns by default so it's not stupidly wide in my editor
554 # trim to 75 columns by default so it's not stupidly wide in my editor
554 # (the 5 more are left for verb)
555 # (the 5 more are left for verb)
555 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
556 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
556 maxlen = max(maxlen, 22) # avoid truncating hash
557 maxlen = max(maxlen, 22) # avoid truncating hash
557 return stringutil.ellipsis(line, maxlen)
558 return stringutil.ellipsis(line, maxlen)
558
559
559 def tostate(self):
560 def tostate(self):
560 """Print an action in format used by histedit state files
561 """Print an action in format used by histedit state files
561 (the first line is a verb, the remainder is the second)
562 (the first line is a verb, the remainder is the second)
562 """
563 """
563 return b"%s\n%s" % (self.verb, hex(self.node))
564 return b"%s\n%s" % (self.verb, hex(self.node))
564
565
565 def run(self):
566 def run(self):
566 """Runs the action. The default behavior is simply apply the action's
567 """Runs the action. The default behavior is simply apply the action's
567 rulectx onto the current parentctx."""
568 rulectx onto the current parentctx."""
568 self.applychange()
569 self.applychange()
569 self.continuedirty()
570 self.continuedirty()
570 return self.continueclean()
571 return self.continueclean()
571
572
572 def applychange(self):
573 def applychange(self):
573 """Applies the changes from this action's rulectx onto the current
574 """Applies the changes from this action's rulectx onto the current
574 parentctx, but does not commit them."""
575 parentctx, but does not commit them."""
575 repo = self.repo
576 repo = self.repo
576 rulectx = repo[self.node]
577 rulectx = repo[self.node]
577 repo.ui.pushbuffer(error=True, labeled=True)
578 repo.ui.pushbuffer(error=True, labeled=True)
578 hg.update(repo, self.state.parentctxnode, quietempty=True)
579 hg.update(repo, self.state.parentctxnode, quietempty=True)
579 repo.ui.popbuffer()
580 repo.ui.popbuffer()
580 stats = applychanges(repo.ui, repo, rulectx, {})
581 stats = applychanges(repo.ui, repo, rulectx, {})
581 repo.dirstate.setbranch(rulectx.branch())
582 repo.dirstate.setbranch(rulectx.branch())
582 if stats.unresolvedcount:
583 if stats.unresolvedcount:
583 raise error.InterventionRequired(
584 raise error.InterventionRequired(
584 _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
585 _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
585 hint=_(b'hg histedit --continue to resume'),
586 hint=_(b'hg histedit --continue to resume'),
586 )
587 )
587
588
588 def continuedirty(self):
589 def continuedirty(self):
589 """Continues the action when changes have been applied to the working
590 """Continues the action when changes have been applied to the working
590 copy. The default behavior is to commit the dirty changes."""
591 copy. The default behavior is to commit the dirty changes."""
591 repo = self.repo
592 repo = self.repo
592 rulectx = repo[self.node]
593 rulectx = repo[self.node]
593
594
594 editor = self.commiteditor()
595 editor = self.commiteditor()
595 commit = commitfuncfor(repo, rulectx)
596 commit = commitfuncfor(repo, rulectx)
596 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
597 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
597 date = dateutil.makedate()
598 date = dateutil.makedate()
598 else:
599 else:
599 date = rulectx.date()
600 date = rulectx.date()
600 commit(
601 commit(
601 text=rulectx.description(),
602 text=rulectx.description(),
602 user=rulectx.user(),
603 user=rulectx.user(),
603 date=date,
604 date=date,
604 extra=rulectx.extra(),
605 extra=rulectx.extra(),
605 editor=editor,
606 editor=editor,
606 )
607 )
607
608
608 def commiteditor(self):
609 def commiteditor(self):
609 """The editor to be used to edit the commit message."""
610 """The editor to be used to edit the commit message."""
610 return False
611 return False
611
612
612 def continueclean(self):
613 def continueclean(self):
613 """Continues the action when the working copy is clean. The default
614 """Continues the action when the working copy is clean. The default
614 behavior is to accept the current commit as the new version of the
615 behavior is to accept the current commit as the new version of the
615 rulectx."""
616 rulectx."""
616 ctx = self.repo[b'.']
617 ctx = self.repo[b'.']
617 if ctx.node() == self.state.parentctxnode:
618 if ctx.node() == self.state.parentctxnode:
618 self.repo.ui.warn(
619 self.repo.ui.warn(
619 _(b'%s: skipping changeset (no changes)\n') % short(self.node)
620 _(b'%s: skipping changeset (no changes)\n') % short(self.node)
620 )
621 )
621 return ctx, [(self.node, tuple())]
622 return ctx, [(self.node, tuple())]
622 if ctx.node() == self.node:
623 if ctx.node() == self.node:
623 # Nothing changed
624 # Nothing changed
624 return ctx, []
625 return ctx, []
625 return ctx, [(self.node, (ctx.node(),))]
626 return ctx, [(self.node, (ctx.node(),))]
626
627
627
628
628 def commitfuncfor(repo, src):
629 def commitfuncfor(repo, src):
629 """Build a commit function for the replacement of <src>
630 """Build a commit function for the replacement of <src>
630
631
631 This function ensure we apply the same treatment to all changesets.
632 This function ensure we apply the same treatment to all changesets.
632
633
633 - Add a 'histedit_source' entry in extra.
634 - Add a 'histedit_source' entry in extra.
634
635
635 Note that fold has its own separated logic because its handling is a bit
636 Note that fold has its own separated logic because its handling is a bit
636 different and not easily factored out of the fold method.
637 different and not easily factored out of the fold method.
637 """
638 """
638 phasemin = src.phase()
639 phasemin = src.phase()
639
640
640 def commitfunc(**kwargs):
641 def commitfunc(**kwargs):
641 overrides = {(b'phases', b'new-commit'): phasemin}
642 overrides = {(b'phases', b'new-commit'): phasemin}
642 with repo.ui.configoverride(overrides, b'histedit'):
643 with repo.ui.configoverride(overrides, b'histedit'):
643 extra = kwargs.get('extra', {}).copy()
644 extra = kwargs.get('extra', {}).copy()
644 extra[b'histedit_source'] = src.hex()
645 extra[b'histedit_source'] = src.hex()
645 kwargs['extra'] = extra
646 kwargs['extra'] = extra
646 return repo.commit(**kwargs)
647 return repo.commit(**kwargs)
647
648
648 return commitfunc
649 return commitfunc
649
650
650
651
651 def applychanges(ui, repo, ctx, opts):
652 def applychanges(ui, repo, ctx, opts):
652 """Merge changeset from ctx (only) in the current working directory"""
653 """Merge changeset from ctx (only) in the current working directory"""
653 if ctx.p1().node() == repo.dirstate.p1():
654 if ctx.p1().node() == repo.dirstate.p1():
654 # edits are "in place" we do not need to make any merge,
655 # edits are "in place" we do not need to make any merge,
655 # just applies changes on parent for editing
656 # just applies changes on parent for editing
656 ui.pushbuffer()
657 ui.pushbuffer()
657 cmdutil.revert(ui, repo, ctx, all=True)
658 cmdutil.revert(ui, repo, ctx, all=True)
658 stats = mergemod.updateresult(0, 0, 0, 0)
659 stats = mergemod.updateresult(0, 0, 0, 0)
659 ui.popbuffer()
660 ui.popbuffer()
660 else:
661 else:
661 try:
662 try:
662 # ui.forcemerge is an internal variable, do not document
663 # ui.forcemerge is an internal variable, do not document
663 repo.ui.setconfig(
664 repo.ui.setconfig(
664 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
665 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
665 )
666 )
666 stats = mergemod.graft(repo, ctx, labels=[b'local', b'histedit'])
667 stats = mergemod.graft(repo, ctx, labels=[b'local', b'histedit'])
667 finally:
668 finally:
668 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
669 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
669 return stats
670 return stats
670
671
671
672
672 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
673 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
673 """collapse the set of revisions from first to last as new one.
674 """collapse the set of revisions from first to last as new one.
674
675
675 Expected commit options are:
676 Expected commit options are:
676 - message
677 - message
677 - date
678 - date
678 - username
679 - username
679 Commit message is edited in all cases.
680 Commit message is edited in all cases.
680
681
681 This function works in memory."""
682 This function works in memory."""
682 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
683 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
683 if not ctxs:
684 if not ctxs:
684 return None
685 return None
685 for c in ctxs:
686 for c in ctxs:
686 if not c.mutable():
687 if not c.mutable():
687 raise error.ParseError(
688 raise error.ParseError(
688 _(b"cannot fold into public change %s") % short(c.node())
689 _(b"cannot fold into public change %s") % short(c.node())
689 )
690 )
690 base = firstctx.p1()
691 base = firstctx.p1()
691
692
692 # commit a new version of the old changeset, including the update
693 # commit a new version of the old changeset, including the update
693 # collect all files which might be affected
694 # collect all files which might be affected
694 files = set()
695 files = set()
695 for ctx in ctxs:
696 for ctx in ctxs:
696 files.update(ctx.files())
697 files.update(ctx.files())
697
698
698 # Recompute copies (avoid recording a -> b -> a)
699 # Recompute copies (avoid recording a -> b -> a)
699 copied = copies.pathcopies(base, lastctx)
700 copied = copies.pathcopies(base, lastctx)
700
701
701 # prune files which were reverted by the updates
702 # prune files which were reverted by the updates
702 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
703 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
703 # commit version of these files as defined by head
704 # commit version of these files as defined by head
704 headmf = lastctx.manifest()
705 headmf = lastctx.manifest()
705
706
706 def filectxfn(repo, ctx, path):
707 def filectxfn(repo, ctx, path):
707 if path in headmf:
708 if path in headmf:
708 fctx = lastctx[path]
709 fctx = lastctx[path]
709 flags = fctx.flags()
710 flags = fctx.flags()
710 mctx = context.memfilectx(
711 mctx = context.memfilectx(
711 repo,
712 repo,
712 ctx,
713 ctx,
713 fctx.path(),
714 fctx.path(),
714 fctx.data(),
715 fctx.data(),
715 islink=b'l' in flags,
716 islink=b'l' in flags,
716 isexec=b'x' in flags,
717 isexec=b'x' in flags,
717 copysource=copied.get(path),
718 copysource=copied.get(path),
718 )
719 )
719 return mctx
720 return mctx
720 return None
721 return None
721
722
722 if commitopts.get(b'message'):
723 if commitopts.get(b'message'):
723 message = commitopts[b'message']
724 message = commitopts[b'message']
724 else:
725 else:
725 message = firstctx.description()
726 message = firstctx.description()
726 user = commitopts.get(b'user')
727 user = commitopts.get(b'user')
727 date = commitopts.get(b'date')
728 date = commitopts.get(b'date')
728 extra = commitopts.get(b'extra')
729 extra = commitopts.get(b'extra')
729
730
730 parents = (firstctx.p1().node(), firstctx.p2().node())
731 parents = (firstctx.p1().node(), firstctx.p2().node())
731 editor = None
732 editor = None
732 if not skipprompt:
733 if not skipprompt:
733 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
734 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
734 new = context.memctx(
735 new = context.memctx(
735 repo,
736 repo,
736 parents=parents,
737 parents=parents,
737 text=message,
738 text=message,
738 files=files,
739 files=files,
739 filectxfn=filectxfn,
740 filectxfn=filectxfn,
740 user=user,
741 user=user,
741 date=date,
742 date=date,
742 extra=extra,
743 extra=extra,
743 editor=editor,
744 editor=editor,
744 )
745 )
745 return repo.commitctx(new)
746 return repo.commitctx(new)
746
747
747
748
748 def _isdirtywc(repo):
749 def _isdirtywc(repo):
749 return repo[None].dirty(missing=True)
750 return repo[None].dirty(missing=True)
750
751
751
752
752 def abortdirty():
753 def abortdirty():
753 raise error.Abort(
754 raise error.Abort(
754 _(b'working copy has pending changes'),
755 _(b'working copy has pending changes'),
755 hint=_(
756 hint=_(
756 b'amend, commit, or revert them and run histedit '
757 b'amend, commit, or revert them and run histedit '
757 b'--continue, or abort with histedit --abort'
758 b'--continue, or abort with histedit --abort'
758 ),
759 ),
759 )
760 )
760
761
761
762
762 def action(verbs, message, priority=False, internal=False):
763 def action(verbs, message, priority=False, internal=False):
763 def wrap(cls):
764 def wrap(cls):
764 assert not priority or not internal
765 assert not priority or not internal
765 verb = verbs[0]
766 verb = verbs[0]
766 if priority:
767 if priority:
767 primaryactions.add(verb)
768 primaryactions.add(verb)
768 elif internal:
769 elif internal:
769 internalactions.add(verb)
770 internalactions.add(verb)
770 elif len(verbs) > 1:
771 elif len(verbs) > 1:
771 secondaryactions.add(verb)
772 secondaryactions.add(verb)
772 else:
773 else:
773 tertiaryactions.add(verb)
774 tertiaryactions.add(verb)
774
775
775 cls.verb = verb
776 cls.verb = verb
776 cls.verbs = verbs
777 cls.verbs = verbs
777 cls.message = message
778 cls.message = message
778 for verb in verbs:
779 for verb in verbs:
779 actiontable[verb] = cls
780 actiontable[verb] = cls
780 return cls
781 return cls
781
782
782 return wrap
783 return wrap
783
784
784
785
785 @action([b'pick', b'p'], _(b'use commit'), priority=True)
786 @action([b'pick', b'p'], _(b'use commit'), priority=True)
786 class pick(histeditaction):
787 class pick(histeditaction):
787 def run(self):
788 def run(self):
788 rulectx = self.repo[self.node]
789 rulectx = self.repo[self.node]
789 if rulectx.p1().node() == self.state.parentctxnode:
790 if rulectx.p1().node() == self.state.parentctxnode:
790 self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
791 self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
791 return rulectx, []
792 return rulectx, []
792
793
793 return super(pick, self).run()
794 return super(pick, self).run()
794
795
795
796
796 @action(
797 @action(
797 [b'edit', b'e'],
798 [b'edit', b'e'],
798 _(b'use commit, but allow edits before making new commit'),
799 _(b'use commit, but allow edits before making new commit'),
799 priority=True,
800 priority=True,
800 )
801 )
801 class edit(histeditaction):
802 class edit(histeditaction):
802 def run(self):
803 def run(self):
803 repo = self.repo
804 repo = self.repo
804 rulectx = repo[self.node]
805 rulectx = repo[self.node]
805 hg.update(repo, self.state.parentctxnode, quietempty=True)
806 hg.update(repo, self.state.parentctxnode, quietempty=True)
806 applychanges(repo.ui, repo, rulectx, {})
807 applychanges(repo.ui, repo, rulectx, {})
807 hint = _(b'to edit %s, `hg histedit --continue` after making changes')
808 hint = _(b'to edit %s, `hg histedit --continue` after making changes')
808 raise error.InterventionRequired(
809 raise error.InterventionRequired(
809 _(b'Editing (%s), commit as needed now to split the change')
810 _(b'Editing (%s), commit as needed now to split the change')
810 % short(self.node),
811 % short(self.node),
811 hint=hint % short(self.node),
812 hint=hint % short(self.node),
812 )
813 )
813
814
814 def commiteditor(self):
815 def commiteditor(self):
815 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
816 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
816
817
817
818
818 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
819 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
819 class fold(histeditaction):
820 class fold(histeditaction):
820 def verify(self, prev, expected, seen):
821 def verify(self, prev, expected, seen):
821 """ Verifies semantic correctness of the fold rule"""
822 """ Verifies semantic correctness of the fold rule"""
822 super(fold, self).verify(prev, expected, seen)
823 super(fold, self).verify(prev, expected, seen)
823 repo = self.repo
824 repo = self.repo
824 if not prev:
825 if not prev:
825 c = repo[self.node].p1()
826 c = repo[self.node].p1()
826 elif not prev.verb in (b'pick', b'base'):
827 elif not prev.verb in (b'pick', b'base'):
827 return
828 return
828 else:
829 else:
829 c = repo[prev.node]
830 c = repo[prev.node]
830 if not c.mutable():
831 if not c.mutable():
831 raise error.ParseError(
832 raise error.ParseError(
832 _(b"cannot fold into public change %s") % short(c.node())
833 _(b"cannot fold into public change %s") % short(c.node())
833 )
834 )
834
835
835 def continuedirty(self):
836 def continuedirty(self):
836 repo = self.repo
837 repo = self.repo
837 rulectx = repo[self.node]
838 rulectx = repo[self.node]
838
839
839 commit = commitfuncfor(repo, rulectx)
840 commit = commitfuncfor(repo, rulectx)
840 commit(
841 commit(
841 text=b'fold-temp-revision %s' % short(self.node),
842 text=b'fold-temp-revision %s' % short(self.node),
842 user=rulectx.user(),
843 user=rulectx.user(),
843 date=rulectx.date(),
844 date=rulectx.date(),
844 extra=rulectx.extra(),
845 extra=rulectx.extra(),
845 )
846 )
846
847
847 def continueclean(self):
848 def continueclean(self):
848 repo = self.repo
849 repo = self.repo
849 ctx = repo[b'.']
850 ctx = repo[b'.']
850 rulectx = repo[self.node]
851 rulectx = repo[self.node]
851 parentctxnode = self.state.parentctxnode
852 parentctxnode = self.state.parentctxnode
852 if ctx.node() == parentctxnode:
853 if ctx.node() == parentctxnode:
853 repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
854 repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
854 return ctx, [(self.node, (parentctxnode,))]
855 return ctx, [(self.node, (parentctxnode,))]
855
856
856 parentctx = repo[parentctxnode]
857 parentctx = repo[parentctxnode]
857 newcommits = {
858 newcommits = {
858 c.node()
859 c.node()
859 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
860 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
860 }
861 }
861 if not newcommits:
862 if not newcommits:
862 repo.ui.warn(
863 repo.ui.warn(
863 _(
864 _(
864 b'%s: cannot fold - working copy is not a '
865 b'%s: cannot fold - working copy is not a '
865 b'descendant of previous commit %s\n'
866 b'descendant of previous commit %s\n'
866 )
867 )
867 % (short(self.node), short(parentctxnode))
868 % (short(self.node), short(parentctxnode))
868 )
869 )
869 return ctx, [(self.node, (ctx.node(),))]
870 return ctx, [(self.node, (ctx.node(),))]
870
871
871 middlecommits = newcommits.copy()
872 middlecommits = newcommits.copy()
872 middlecommits.discard(ctx.node())
873 middlecommits.discard(ctx.node())
873
874
874 return self.finishfold(
875 return self.finishfold(
875 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
876 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
876 )
877 )
877
878
878 def skipprompt(self):
879 def skipprompt(self):
879 """Returns true if the rule should skip the message editor.
880 """Returns true if the rule should skip the message editor.
880
881
881 For example, 'fold' wants to show an editor, but 'rollup'
882 For example, 'fold' wants to show an editor, but 'rollup'
882 doesn't want to.
883 doesn't want to.
883 """
884 """
884 return False
885 return False
885
886
886 def mergedescs(self):
887 def mergedescs(self):
887 """Returns true if the rule should merge messages of multiple changes.
888 """Returns true if the rule should merge messages of multiple changes.
888
889
889 This exists mainly so that 'rollup' rules can be a subclass of
890 This exists mainly so that 'rollup' rules can be a subclass of
890 'fold'.
891 'fold'.
891 """
892 """
892 return True
893 return True
893
894
894 def firstdate(self):
895 def firstdate(self):
895 """Returns true if the rule should preserve the date of the first
896 """Returns true if the rule should preserve the date of the first
896 change.
897 change.
897
898
898 This exists mainly so that 'rollup' rules can be a subclass of
899 This exists mainly so that 'rollup' rules can be a subclass of
899 'fold'.
900 'fold'.
900 """
901 """
901 return False
902 return False
902
903
903 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
904 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
904 mergemod.update(ctx.p1())
905 mergemod.update(ctx.p1())
905 ### prepare new commit data
906 ### prepare new commit data
906 commitopts = {}
907 commitopts = {}
907 commitopts[b'user'] = ctx.user()
908 commitopts[b'user'] = ctx.user()
908 # commit message
909 # commit message
909 if not self.mergedescs():
910 if not self.mergedescs():
910 newmessage = ctx.description()
911 newmessage = ctx.description()
911 else:
912 else:
912 newmessage = (
913 newmessage = (
913 b'\n***\n'.join(
914 b'\n***\n'.join(
914 [ctx.description()]
915 [ctx.description()]
915 + [repo[r].description() for r in internalchanges]
916 + [repo[r].description() for r in internalchanges]
916 + [oldctx.description()]
917 + [oldctx.description()]
917 )
918 )
918 + b'\n'
919 + b'\n'
919 )
920 )
920 commitopts[b'message'] = newmessage
921 commitopts[b'message'] = newmessage
921 # date
922 # date
922 if self.firstdate():
923 if self.firstdate():
923 commitopts[b'date'] = ctx.date()
924 commitopts[b'date'] = ctx.date()
924 else:
925 else:
925 commitopts[b'date'] = max(ctx.date(), oldctx.date())
926 commitopts[b'date'] = max(ctx.date(), oldctx.date())
926 # if date is to be updated to current
927 # if date is to be updated to current
927 if ui.configbool(b'rewrite', b'update-timestamp'):
928 if ui.configbool(b'rewrite', b'update-timestamp'):
928 commitopts[b'date'] = dateutil.makedate()
929 commitopts[b'date'] = dateutil.makedate()
929
930
930 extra = ctx.extra().copy()
931 extra = ctx.extra().copy()
931 # histedit_source
932 # histedit_source
932 # note: ctx is likely a temporary commit but that the best we can do
933 # note: ctx is likely a temporary commit but that the best we can do
933 # here. This is sufficient to solve issue3681 anyway.
934 # here. This is sufficient to solve issue3681 anyway.
934 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
935 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
935 commitopts[b'extra'] = extra
936 commitopts[b'extra'] = extra
936 phasemin = max(ctx.phase(), oldctx.phase())
937 phasemin = max(ctx.phase(), oldctx.phase())
937 overrides = {(b'phases', b'new-commit'): phasemin}
938 overrides = {(b'phases', b'new-commit'): phasemin}
938 with repo.ui.configoverride(overrides, b'histedit'):
939 with repo.ui.configoverride(overrides, b'histedit'):
939 n = collapse(
940 n = collapse(
940 repo,
941 repo,
941 ctx,
942 ctx,
942 repo[newnode],
943 repo[newnode],
943 commitopts,
944 commitopts,
944 skipprompt=self.skipprompt(),
945 skipprompt=self.skipprompt(),
945 )
946 )
946 if n is None:
947 if n is None:
947 return ctx, []
948 return ctx, []
948 mergemod.update(repo[n])
949 mergemod.update(repo[n])
949 replacements = [
950 replacements = [
950 (oldctx.node(), (newnode,)),
951 (oldctx.node(), (newnode,)),
951 (ctx.node(), (n,)),
952 (ctx.node(), (n,)),
952 (newnode, (n,)),
953 (newnode, (n,)),
953 ]
954 ]
954 for ich in internalchanges:
955 for ich in internalchanges:
955 replacements.append((ich, (n,)))
956 replacements.append((ich, (n,)))
956 return repo[n], replacements
957 return repo[n], replacements
957
958
958
959
959 @action(
960 @action(
960 [b'base', b'b'],
961 [b'base', b'b'],
961 _(b'checkout changeset and apply further changesets from there'),
962 _(b'checkout changeset and apply further changesets from there'),
962 )
963 )
963 class base(histeditaction):
964 class base(histeditaction):
964 def run(self):
965 def run(self):
965 if self.repo[b'.'].node() != self.node:
966 if self.repo[b'.'].node() != self.node:
966 mergemod.clean_update(self.repo[self.node])
967 mergemod.clean_update(self.repo[self.node])
967 return self.continueclean()
968 return self.continueclean()
968
969
969 def continuedirty(self):
970 def continuedirty(self):
970 abortdirty()
971 abortdirty()
971
972
972 def continueclean(self):
973 def continueclean(self):
973 basectx = self.repo[b'.']
974 basectx = self.repo[b'.']
974 return basectx, []
975 return basectx, []
975
976
976 def _verifynodeconstraints(self, prev, expected, seen):
977 def _verifynodeconstraints(self, prev, expected, seen):
977 # base can only be use with a node not in the edited set
978 # base can only be use with a node not in the edited set
978 if self.node in expected:
979 if self.node in expected:
979 msg = _(b'%s "%s" changeset was an edited list candidate')
980 msg = _(b'%s "%s" changeset was an edited list candidate')
980 raise error.ParseError(
981 raise error.ParseError(
981 msg % (self.verb, short(self.node)),
982 msg % (self.verb, short(self.node)),
982 hint=_(b'base must only use unlisted changesets'),
983 hint=_(b'base must only use unlisted changesets'),
983 )
984 )
984
985
985
986
986 @action(
987 @action(
987 [b'_multifold'],
988 [b'_multifold'],
988 _(
989 _(
989 """fold subclass used for when multiple folds happen in a row
990 """fold subclass used for when multiple folds happen in a row
990
991
991 We only want to fire the editor for the folded message once when
992 We only want to fire the editor for the folded message once when
992 (say) four changes are folded down into a single change. This is
993 (say) four changes are folded down into a single change. This is
993 similar to rollup, but we should preserve both messages so that
994 similar to rollup, but we should preserve both messages so that
994 when the last fold operation runs we can show the user all the
995 when the last fold operation runs we can show the user all the
995 commit messages in their editor.
996 commit messages in their editor.
996 """
997 """
997 ),
998 ),
998 internal=True,
999 internal=True,
999 )
1000 )
1000 class _multifold(fold):
1001 class _multifold(fold):
1001 def skipprompt(self):
1002 def skipprompt(self):
1002 return True
1003 return True
1003
1004
1004
1005
1005 @action(
1006 @action(
1006 [b"roll", b"r"],
1007 [b"roll", b"r"],
1007 _(b"like fold, but discard this commit's description and date"),
1008 _(b"like fold, but discard this commit's description and date"),
1008 )
1009 )
1009 class rollup(fold):
1010 class rollup(fold):
1010 def mergedescs(self):
1011 def mergedescs(self):
1011 return False
1012 return False
1012
1013
1013 def skipprompt(self):
1014 def skipprompt(self):
1014 return True
1015 return True
1015
1016
1016 def firstdate(self):
1017 def firstdate(self):
1017 return True
1018 return True
1018
1019
1019
1020
1020 @action([b"drop", b"d"], _(b'remove commit from history'))
1021 @action([b"drop", b"d"], _(b'remove commit from history'))
1021 class drop(histeditaction):
1022 class drop(histeditaction):
1022 def run(self):
1023 def run(self):
1023 parentctx = self.repo[self.state.parentctxnode]
1024 parentctx = self.repo[self.state.parentctxnode]
1024 return parentctx, [(self.node, tuple())]
1025 return parentctx, [(self.node, tuple())]
1025
1026
1026
1027
1027 @action(
1028 @action(
1028 [b"mess", b"m"],
1029 [b"mess", b"m"],
1029 _(b'edit commit message without changing commit content'),
1030 _(b'edit commit message without changing commit content'),
1030 priority=True,
1031 priority=True,
1031 )
1032 )
1032 class message(histeditaction):
1033 class message(histeditaction):
1033 def commiteditor(self):
1034 def commiteditor(self):
1034 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1035 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1035
1036
1036
1037
1037 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1038 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1038 """utility function to find the first outgoing changeset
1039 """utility function to find the first outgoing changeset
1039
1040
1040 Used by initialization code"""
1041 Used by initialization code"""
1041 if opts is None:
1042 if opts is None:
1042 opts = {}
1043 opts = {}
1043 dest = ui.expandpath(remote or b'default-push', remote or b'default')
1044 dest = ui.expandpath(remote or b'default-push', remote or b'default')
1044 dest, branches = hg.parseurl(dest, None)[:2]
1045 dest, branches = hg.parseurl(dest, None)[:2]
1045 ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
1046 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest))
1046
1047
1047 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1048 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1048 other = hg.peer(repo, opts, dest)
1049 other = hg.peer(repo, opts, dest)
1049
1050
1050 if revs:
1051 if revs:
1051 revs = [repo.lookup(rev) for rev in revs]
1052 revs = [repo.lookup(rev) for rev in revs]
1052
1053
1053 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1054 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1054 if not outgoing.missing:
1055 if not outgoing.missing:
1055 raise error.Abort(_(b'no outgoing ancestors'))
1056 raise error.Abort(_(b'no outgoing ancestors'))
1056 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1057 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1057 if len(roots) > 1:
1058 if len(roots) > 1:
1058 msg = _(b'there are ambiguous outgoing revisions')
1059 msg = _(b'there are ambiguous outgoing revisions')
1059 hint = _(b"see 'hg help histedit' for more detail")
1060 hint = _(b"see 'hg help histedit' for more detail")
1060 raise error.Abort(msg, hint=hint)
1061 raise error.Abort(msg, hint=hint)
1061 return repo[roots[0]].node()
1062 return repo[roots[0]].node()
1062
1063
1063
1064
1064 # Curses Support
1065 # Curses Support
1065 try:
1066 try:
1066 import curses
1067 import curses
1067 except ImportError:
1068 except ImportError:
1068 curses = None
1069 curses = None
1069
1070
1070 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1071 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1071 ACTION_LABELS = {
1072 ACTION_LABELS = {
1072 b'fold': b'^fold',
1073 b'fold': b'^fold',
1073 b'roll': b'^roll',
1074 b'roll': b'^roll',
1074 }
1075 }
1075
1076
1076 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1077 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1077 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1078 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1078 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1079 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1079
1080
1080 E_QUIT, E_HISTEDIT = 1, 2
1081 E_QUIT, E_HISTEDIT = 1, 2
1081 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1082 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1082 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1083 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1083
1084
1084 KEYTABLE = {
1085 KEYTABLE = {
1085 b'global': {
1086 b'global': {
1086 b'h': b'next-action',
1087 b'h': b'next-action',
1087 b'KEY_RIGHT': b'next-action',
1088 b'KEY_RIGHT': b'next-action',
1088 b'l': b'prev-action',
1089 b'l': b'prev-action',
1089 b'KEY_LEFT': b'prev-action',
1090 b'KEY_LEFT': b'prev-action',
1090 b'q': b'quit',
1091 b'q': b'quit',
1091 b'c': b'histedit',
1092 b'c': b'histedit',
1092 b'C': b'histedit',
1093 b'C': b'histedit',
1093 b'v': b'showpatch',
1094 b'v': b'showpatch',
1094 b'?': b'help',
1095 b'?': b'help',
1095 },
1096 },
1096 MODE_RULES: {
1097 MODE_RULES: {
1097 b'd': b'action-drop',
1098 b'd': b'action-drop',
1098 b'e': b'action-edit',
1099 b'e': b'action-edit',
1099 b'f': b'action-fold',
1100 b'f': b'action-fold',
1100 b'm': b'action-mess',
1101 b'm': b'action-mess',
1101 b'p': b'action-pick',
1102 b'p': b'action-pick',
1102 b'r': b'action-roll',
1103 b'r': b'action-roll',
1103 b' ': b'select',
1104 b' ': b'select',
1104 b'j': b'down',
1105 b'j': b'down',
1105 b'k': b'up',
1106 b'k': b'up',
1106 b'KEY_DOWN': b'down',
1107 b'KEY_DOWN': b'down',
1107 b'KEY_UP': b'up',
1108 b'KEY_UP': b'up',
1108 b'J': b'move-down',
1109 b'J': b'move-down',
1109 b'K': b'move-up',
1110 b'K': b'move-up',
1110 b'KEY_NPAGE': b'move-down',
1111 b'KEY_NPAGE': b'move-down',
1111 b'KEY_PPAGE': b'move-up',
1112 b'KEY_PPAGE': b'move-up',
1112 b'0': b'goto', # Used for 0..9
1113 b'0': b'goto', # Used for 0..9
1113 },
1114 },
1114 MODE_PATCH: {
1115 MODE_PATCH: {
1115 b' ': b'page-down',
1116 b' ': b'page-down',
1116 b'KEY_NPAGE': b'page-down',
1117 b'KEY_NPAGE': b'page-down',
1117 b'KEY_PPAGE': b'page-up',
1118 b'KEY_PPAGE': b'page-up',
1118 b'j': b'line-down',
1119 b'j': b'line-down',
1119 b'k': b'line-up',
1120 b'k': b'line-up',
1120 b'KEY_DOWN': b'line-down',
1121 b'KEY_DOWN': b'line-down',
1121 b'KEY_UP': b'line-up',
1122 b'KEY_UP': b'line-up',
1122 b'J': b'down',
1123 b'J': b'down',
1123 b'K': b'up',
1124 b'K': b'up',
1124 },
1125 },
1125 MODE_HELP: {},
1126 MODE_HELP: {},
1126 }
1127 }
1127
1128
1128
1129
1129 def screen_size():
1130 def screen_size():
1130 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1131 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1131
1132
1132
1133
1133 class histeditrule(object):
1134 class histeditrule(object):
1134 def __init__(self, ui, ctx, pos, action=b'pick'):
1135 def __init__(self, ui, ctx, pos, action=b'pick'):
1135 self.ui = ui
1136 self.ui = ui
1136 self.ctx = ctx
1137 self.ctx = ctx
1137 self.action = action
1138 self.action = action
1138 self.origpos = pos
1139 self.origpos = pos
1139 self.pos = pos
1140 self.pos = pos
1140 self.conflicts = []
1141 self.conflicts = []
1141
1142
1142 def __bytes__(self):
1143 def __bytes__(self):
1143 # Example display of several histeditrules:
1144 # Example display of several histeditrules:
1144 #
1145 #
1145 # #10 pick 316392:06a16c25c053 add option to skip tests
1146 # #10 pick 316392:06a16c25c053 add option to skip tests
1146 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1147 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1147 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1148 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1148 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1149 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1149 #
1150 #
1150 # The carets point to the changeset being folded into ("roll this
1151 # The carets point to the changeset being folded into ("roll this
1151 # changeset into the changeset above").
1152 # changeset into the changeset above").
1152 return b'%s%s' % (self.prefix, self.desc)
1153 return b'%s%s' % (self.prefix, self.desc)
1153
1154
1154 __str__ = encoding.strmethod(__bytes__)
1155 __str__ = encoding.strmethod(__bytes__)
1155
1156
1156 @property
1157 @property
1157 def prefix(self):
1158 def prefix(self):
1158 # Some actions ('fold' and 'roll') combine a patch with a
1159 # Some actions ('fold' and 'roll') combine a patch with a
1159 # previous one. Add a marker showing which patch they apply
1160 # previous one. Add a marker showing which patch they apply
1160 # to.
1161 # to.
1161 action = ACTION_LABELS.get(self.action, self.action)
1162 action = ACTION_LABELS.get(self.action, self.action)
1162
1163
1163 h = self.ctx.hex()[0:12]
1164 h = self.ctx.hex()[0:12]
1164 r = self.ctx.rev()
1165 r = self.ctx.rev()
1165
1166
1166 return b"#%s %s %d:%s " % (
1167 return b"#%s %s %d:%s " % (
1167 (b'%d' % self.origpos).ljust(2),
1168 (b'%d' % self.origpos).ljust(2),
1168 action.ljust(6),
1169 action.ljust(6),
1169 r,
1170 r,
1170 h,
1171 h,
1171 )
1172 )
1172
1173
1173 @util.propertycache
1174 @util.propertycache
1174 def desc(self):
1175 def desc(self):
1175 summary = cmdutil.rendertemplate(
1176 summary = cmdutil.rendertemplate(
1176 self.ctx, self.ui.config(b'histedit', b'summary-template')
1177 self.ctx, self.ui.config(b'histedit', b'summary-template')
1177 )
1178 )
1178 if summary:
1179 if summary:
1179 return summary
1180 return summary
1180 # This is split off from the prefix property so that we can
1181 # This is split off from the prefix property so that we can
1181 # separately make the description for 'roll' red (since it
1182 # separately make the description for 'roll' red (since it
1182 # will get discarded).
1183 # will get discarded).
1183 return self.ctx.description().splitlines()[0].strip()
1184 return self.ctx.description().splitlines()[0].strip()
1184
1185
1185 def checkconflicts(self, other):
1186 def checkconflicts(self, other):
1186 if other.pos > self.pos and other.origpos <= self.origpos:
1187 if other.pos > self.pos and other.origpos <= self.origpos:
1187 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1188 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1188 self.conflicts.append(other)
1189 self.conflicts.append(other)
1189 return self.conflicts
1190 return self.conflicts
1190
1191
1191 if other in self.conflicts:
1192 if other in self.conflicts:
1192 self.conflicts.remove(other)
1193 self.conflicts.remove(other)
1193 return self.conflicts
1194 return self.conflicts
1194
1195
1195
1196
1196 # ============ EVENTS ===============
1197 # ============ EVENTS ===============
1197 def movecursor(state, oldpos, newpos):
1198 def movecursor(state, oldpos, newpos):
1198 """Change the rule/changeset that the cursor is pointing to, regardless of
1199 """Change the rule/changeset that the cursor is pointing to, regardless of
1199 current mode (you can switch between patches from the view patch window)."""
1200 current mode (you can switch between patches from the view patch window)."""
1200 state[b'pos'] = newpos
1201 state[b'pos'] = newpos
1201
1202
1202 mode, _ = state[b'mode']
1203 mode, _ = state[b'mode']
1203 if mode == MODE_RULES:
1204 if mode == MODE_RULES:
1204 # Scroll through the list by updating the view for MODE_RULES, so that
1205 # Scroll through the list by updating the view for MODE_RULES, so that
1205 # even if we are not currently viewing the rules, switching back will
1206 # even if we are not currently viewing the rules, switching back will
1206 # result in the cursor's rule being visible.
1207 # result in the cursor's rule being visible.
1207 modestate = state[b'modes'][MODE_RULES]
1208 modestate = state[b'modes'][MODE_RULES]
1208 if newpos < modestate[b'line_offset']:
1209 if newpos < modestate[b'line_offset']:
1209 modestate[b'line_offset'] = newpos
1210 modestate[b'line_offset'] = newpos
1210 elif newpos > modestate[b'line_offset'] + state[b'page_height'] - 1:
1211 elif newpos > modestate[b'line_offset'] + state[b'page_height'] - 1:
1211 modestate[b'line_offset'] = newpos - state[b'page_height'] + 1
1212 modestate[b'line_offset'] = newpos - state[b'page_height'] + 1
1212
1213
1213 # Reset the patch view region to the top of the new patch.
1214 # Reset the patch view region to the top of the new patch.
1214 state[b'modes'][MODE_PATCH][b'line_offset'] = 0
1215 state[b'modes'][MODE_PATCH][b'line_offset'] = 0
1215
1216
1216
1217
1217 def changemode(state, mode):
1218 def changemode(state, mode):
1218 curmode, _ = state[b'mode']
1219 curmode, _ = state[b'mode']
1219 state[b'mode'] = (mode, curmode)
1220 state[b'mode'] = (mode, curmode)
1220 if mode == MODE_PATCH:
1221 if mode == MODE_PATCH:
1221 state[b'modes'][MODE_PATCH][b'patchcontents'] = patchcontents(state)
1222 state[b'modes'][MODE_PATCH][b'patchcontents'] = patchcontents(state)
1222
1223
1223
1224
1224 def makeselection(state, pos):
1225 def makeselection(state, pos):
1225 state[b'selected'] = pos
1226 state[b'selected'] = pos
1226
1227
1227
1228
1228 def swap(state, oldpos, newpos):
1229 def swap(state, oldpos, newpos):
1229 """Swap two positions and calculate necessary conflicts in
1230 """Swap two positions and calculate necessary conflicts in
1230 O(|newpos-oldpos|) time"""
1231 O(|newpos-oldpos|) time"""
1231
1232
1232 rules = state[b'rules']
1233 rules = state[b'rules']
1233 assert 0 <= oldpos < len(rules) and 0 <= newpos < len(rules)
1234 assert 0 <= oldpos < len(rules) and 0 <= newpos < len(rules)
1234
1235
1235 rules[oldpos], rules[newpos] = rules[newpos], rules[oldpos]
1236 rules[oldpos], rules[newpos] = rules[newpos], rules[oldpos]
1236
1237
1237 # TODO: swap should not know about histeditrule's internals
1238 # TODO: swap should not know about histeditrule's internals
1238 rules[newpos].pos = newpos
1239 rules[newpos].pos = newpos
1239 rules[oldpos].pos = oldpos
1240 rules[oldpos].pos = oldpos
1240
1241
1241 start = min(oldpos, newpos)
1242 start = min(oldpos, newpos)
1242 end = max(oldpos, newpos)
1243 end = max(oldpos, newpos)
1243 for r in pycompat.xrange(start, end + 1):
1244 for r in pycompat.xrange(start, end + 1):
1244 rules[newpos].checkconflicts(rules[r])
1245 rules[newpos].checkconflicts(rules[r])
1245 rules[oldpos].checkconflicts(rules[r])
1246 rules[oldpos].checkconflicts(rules[r])
1246
1247
1247 if state[b'selected']:
1248 if state[b'selected']:
1248 makeselection(state, newpos)
1249 makeselection(state, newpos)
1249
1250
1250
1251
1251 def changeaction(state, pos, action):
1252 def changeaction(state, pos, action):
1252 """Change the action state on the given position to the new action"""
1253 """Change the action state on the given position to the new action"""
1253 rules = state[b'rules']
1254 rules = state[b'rules']
1254 assert 0 <= pos < len(rules)
1255 assert 0 <= pos < len(rules)
1255 rules[pos].action = action
1256 rules[pos].action = action
1256
1257
1257
1258
1258 def cycleaction(state, pos, next=False):
1259 def cycleaction(state, pos, next=False):
1259 """Changes the action state the next or the previous action from
1260 """Changes the action state the next or the previous action from
1260 the action list"""
1261 the action list"""
1261 rules = state[b'rules']
1262 rules = state[b'rules']
1262 assert 0 <= pos < len(rules)
1263 assert 0 <= pos < len(rules)
1263 current = rules[pos].action
1264 current = rules[pos].action
1264
1265
1265 assert current in KEY_LIST
1266 assert current in KEY_LIST
1266
1267
1267 index = KEY_LIST.index(current)
1268 index = KEY_LIST.index(current)
1268 if next:
1269 if next:
1269 index += 1
1270 index += 1
1270 else:
1271 else:
1271 index -= 1
1272 index -= 1
1272 changeaction(state, pos, KEY_LIST[index % len(KEY_LIST)])
1273 changeaction(state, pos, KEY_LIST[index % len(KEY_LIST)])
1273
1274
1274
1275
1275 def changeview(state, delta, unit):
1276 def changeview(state, delta, unit):
1276 """Change the region of whatever is being viewed (a patch or the list of
1277 """Change the region of whatever is being viewed (a patch or the list of
1277 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1278 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1278 mode, _ = state[b'mode']
1279 mode, _ = state[b'mode']
1279 if mode != MODE_PATCH:
1280 if mode != MODE_PATCH:
1280 return
1281 return
1281 mode_state = state[b'modes'][mode]
1282 mode_state = state[b'modes'][mode]
1282 num_lines = len(mode_state[b'patchcontents'])
1283 num_lines = len(mode_state[b'patchcontents'])
1283 page_height = state[b'page_height']
1284 page_height = state[b'page_height']
1284 unit = page_height if unit == b'page' else 1
1285 unit = page_height if unit == b'page' else 1
1285 num_pages = 1 + (num_lines - 1) // page_height
1286 num_pages = 1 + (num_lines - 1) // page_height
1286 max_offset = (num_pages - 1) * page_height
1287 max_offset = (num_pages - 1) * page_height
1287 newline = mode_state[b'line_offset'] + delta * unit
1288 newline = mode_state[b'line_offset'] + delta * unit
1288 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1289 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1289
1290
1290
1291
1291 def event(state, ch):
1292 def event(state, ch):
1292 """Change state based on the current character input
1293 """Change state based on the current character input
1293
1294
1294 This takes the current state and based on the current character input from
1295 This takes the current state and based on the current character input from
1295 the user we change the state.
1296 the user we change the state.
1296 """
1297 """
1297 selected = state[b'selected']
1298 selected = state[b'selected']
1298 oldpos = state[b'pos']
1299 oldpos = state[b'pos']
1299 rules = state[b'rules']
1300 rules = state[b'rules']
1300
1301
1301 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1302 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1302 return E_RESIZE
1303 return E_RESIZE
1303
1304
1304 lookup_ch = ch
1305 lookup_ch = ch
1305 if ch is not None and b'0' <= ch <= b'9':
1306 if ch is not None and b'0' <= ch <= b'9':
1306 lookup_ch = b'0'
1307 lookup_ch = b'0'
1307
1308
1308 curmode, prevmode = state[b'mode']
1309 curmode, prevmode = state[b'mode']
1309 action = KEYTABLE[curmode].get(
1310 action = KEYTABLE[curmode].get(
1310 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1311 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1311 )
1312 )
1312 if action is None:
1313 if action is None:
1313 return
1314 return
1314 if action in (b'down', b'move-down'):
1315 if action in (b'down', b'move-down'):
1315 newpos = min(oldpos + 1, len(rules) - 1)
1316 newpos = min(oldpos + 1, len(rules) - 1)
1316 movecursor(state, oldpos, newpos)
1317 movecursor(state, oldpos, newpos)
1317 if selected is not None or action == b'move-down':
1318 if selected is not None or action == b'move-down':
1318 swap(state, oldpos, newpos)
1319 swap(state, oldpos, newpos)
1319 elif action in (b'up', b'move-up'):
1320 elif action in (b'up', b'move-up'):
1320 newpos = max(0, oldpos - 1)
1321 newpos = max(0, oldpos - 1)
1321 movecursor(state, oldpos, newpos)
1322 movecursor(state, oldpos, newpos)
1322 if selected is not None or action == b'move-up':
1323 if selected is not None or action == b'move-up':
1323 swap(state, oldpos, newpos)
1324 swap(state, oldpos, newpos)
1324 elif action == b'next-action':
1325 elif action == b'next-action':
1325 cycleaction(state, oldpos, next=True)
1326 cycleaction(state, oldpos, next=True)
1326 elif action == b'prev-action':
1327 elif action == b'prev-action':
1327 cycleaction(state, oldpos, next=False)
1328 cycleaction(state, oldpos, next=False)
1328 elif action == b'select':
1329 elif action == b'select':
1329 selected = oldpos if selected is None else None
1330 selected = oldpos if selected is None else None
1330 makeselection(state, selected)
1331 makeselection(state, selected)
1331 elif action == b'goto' and int(ch) < len(rules) and len(rules) <= 10:
1332 elif action == b'goto' and int(ch) < len(rules) and len(rules) <= 10:
1332 newrule = next((r for r in rules if r.origpos == int(ch)))
1333 newrule = next((r for r in rules if r.origpos == int(ch)))
1333 movecursor(state, oldpos, newrule.pos)
1334 movecursor(state, oldpos, newrule.pos)
1334 if selected is not None:
1335 if selected is not None:
1335 swap(state, oldpos, newrule.pos)
1336 swap(state, oldpos, newrule.pos)
1336 elif action.startswith(b'action-'):
1337 elif action.startswith(b'action-'):
1337 changeaction(state, oldpos, action[7:])
1338 changeaction(state, oldpos, action[7:])
1338 elif action == b'showpatch':
1339 elif action == b'showpatch':
1339 changemode(state, MODE_PATCH if curmode != MODE_PATCH else prevmode)
1340 changemode(state, MODE_PATCH if curmode != MODE_PATCH else prevmode)
1340 elif action == b'help':
1341 elif action == b'help':
1341 changemode(state, MODE_HELP if curmode != MODE_HELP else prevmode)
1342 changemode(state, MODE_HELP if curmode != MODE_HELP else prevmode)
1342 elif action == b'quit':
1343 elif action == b'quit':
1343 return E_QUIT
1344 return E_QUIT
1344 elif action == b'histedit':
1345 elif action == b'histedit':
1345 return E_HISTEDIT
1346 return E_HISTEDIT
1346 elif action == b'page-down':
1347 elif action == b'page-down':
1347 return E_PAGEDOWN
1348 return E_PAGEDOWN
1348 elif action == b'page-up':
1349 elif action == b'page-up':
1349 return E_PAGEUP
1350 return E_PAGEUP
1350 elif action == b'line-down':
1351 elif action == b'line-down':
1351 return E_LINEDOWN
1352 return E_LINEDOWN
1352 elif action == b'line-up':
1353 elif action == b'line-up':
1353 return E_LINEUP
1354 return E_LINEUP
1354
1355
1355
1356
1356 def makecommands(rules):
1357 def makecommands(rules):
1357 """Returns a list of commands consumable by histedit --commands based on
1358 """Returns a list of commands consumable by histedit --commands based on
1358 our list of rules"""
1359 our list of rules"""
1359 commands = []
1360 commands = []
1360 for rules in rules:
1361 for rules in rules:
1361 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1362 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1362 return commands
1363 return commands
1363
1364
1364
1365
1365 def addln(win, y, x, line, color=None):
1366 def addln(win, y, x, line, color=None):
1366 """Add a line to the given window left padding but 100% filled with
1367 """Add a line to the given window left padding but 100% filled with
1367 whitespace characters, so that the color appears on the whole line"""
1368 whitespace characters, so that the color appears on the whole line"""
1368 maxy, maxx = win.getmaxyx()
1369 maxy, maxx = win.getmaxyx()
1369 length = maxx - 1 - x
1370 length = maxx - 1 - x
1370 line = bytes(line).ljust(length)[:length]
1371 line = bytes(line).ljust(length)[:length]
1371 if y < 0:
1372 if y < 0:
1372 y = maxy + y
1373 y = maxy + y
1373 if x < 0:
1374 if x < 0:
1374 x = maxx + x
1375 x = maxx + x
1375 if color:
1376 if color:
1376 win.addstr(y, x, line, color)
1377 win.addstr(y, x, line, color)
1377 else:
1378 else:
1378 win.addstr(y, x, line)
1379 win.addstr(y, x, line)
1379
1380
1380
1381
1381 def _trunc_head(line, n):
1382 def _trunc_head(line, n):
1382 if len(line) <= n:
1383 if len(line) <= n:
1383 return line
1384 return line
1384 return b'> ' + line[-(n - 2) :]
1385 return b'> ' + line[-(n - 2) :]
1385
1386
1386
1387
1387 def _trunc_tail(line, n):
1388 def _trunc_tail(line, n):
1388 if len(line) <= n:
1389 if len(line) <= n:
1389 return line
1390 return line
1390 return line[: n - 2] + b' >'
1391 return line[: n - 2] + b' >'
1391
1392
1392
1393
1393 def patchcontents(state):
1394 def patchcontents(state):
1394 repo = state[b'repo']
1395 repo = state[b'repo']
1395 rule = state[b'rules'][state[b'pos']]
1396 rule = state[b'rules'][state[b'pos']]
1396 displayer = logcmdutil.changesetdisplayer(
1397 displayer = logcmdutil.changesetdisplayer(
1397 repo.ui, repo, {b"patch": True, b"template": b"status"}, buffered=True
1398 repo.ui, repo, {b"patch": True, b"template": b"status"}, buffered=True
1398 )
1399 )
1399 overrides = {(b'ui', b'verbose'): True}
1400 overrides = {(b'ui', b'verbose'): True}
1400 with repo.ui.configoverride(overrides, source=b'histedit'):
1401 with repo.ui.configoverride(overrides, source=b'histedit'):
1401 displayer.show(rule.ctx)
1402 displayer.show(rule.ctx)
1402 displayer.close()
1403 displayer.close()
1403 return displayer.hunk[rule.ctx.rev()].splitlines()
1404 return displayer.hunk[rule.ctx.rev()].splitlines()
1404
1405
1405
1406
1406 def _chisteditmain(repo, rules, stdscr):
1407 def _chisteditmain(repo, rules, stdscr):
1407 try:
1408 try:
1408 curses.use_default_colors()
1409 curses.use_default_colors()
1409 except curses.error:
1410 except curses.error:
1410 pass
1411 pass
1411
1412
1412 # initialize color pattern
1413 # initialize color pattern
1413 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1414 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1414 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1415 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1415 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1416 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1416 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1417 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1417 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1418 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1418 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1419 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1419 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1420 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1420 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1421 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1421 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1422 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1422 curses.init_pair(
1423 curses.init_pair(
1423 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1424 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1424 )
1425 )
1425 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1426 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1426
1427
1427 # don't display the cursor
1428 # don't display the cursor
1428 try:
1429 try:
1429 curses.curs_set(0)
1430 curses.curs_set(0)
1430 except curses.error:
1431 except curses.error:
1431 pass
1432 pass
1432
1433
1433 def rendercommit(win, state):
1434 def rendercommit(win, state):
1434 """Renders the commit window that shows the log of the current selected
1435 """Renders the commit window that shows the log of the current selected
1435 commit"""
1436 commit"""
1436 pos = state[b'pos']
1437 pos = state[b'pos']
1437 rules = state[b'rules']
1438 rules = state[b'rules']
1438 rule = rules[pos]
1439 rule = rules[pos]
1439
1440
1440 ctx = rule.ctx
1441 ctx = rule.ctx
1441 win.box()
1442 win.box()
1442
1443
1443 maxy, maxx = win.getmaxyx()
1444 maxy, maxx = win.getmaxyx()
1444 length = maxx - 3
1445 length = maxx - 3
1445
1446
1446 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1447 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1447 win.addstr(1, 1, line[:length])
1448 win.addstr(1, 1, line[:length])
1448
1449
1449 line = b"user: %s" % ctx.user()
1450 line = b"user: %s" % ctx.user()
1450 win.addstr(2, 1, line[:length])
1451 win.addstr(2, 1, line[:length])
1451
1452
1452 bms = repo.nodebookmarks(ctx.node())
1453 bms = repo.nodebookmarks(ctx.node())
1453 line = b"bookmark: %s" % b' '.join(bms)
1454 line = b"bookmark: %s" % b' '.join(bms)
1454 win.addstr(3, 1, line[:length])
1455 win.addstr(3, 1, line[:length])
1455
1456
1456 line = b"summary: %s" % (ctx.description().splitlines()[0])
1457 line = b"summary: %s" % (ctx.description().splitlines()[0])
1457 win.addstr(4, 1, line[:length])
1458 win.addstr(4, 1, line[:length])
1458
1459
1459 line = b"files: "
1460 line = b"files: "
1460 win.addstr(5, 1, line)
1461 win.addstr(5, 1, line)
1461 fnx = 1 + len(line)
1462 fnx = 1 + len(line)
1462 fnmaxx = length - fnx + 1
1463 fnmaxx = length - fnx + 1
1463 y = 5
1464 y = 5
1464 fnmaxn = maxy - (1 + y) - 1
1465 fnmaxn = maxy - (1 + y) - 1
1465 files = ctx.files()
1466 files = ctx.files()
1466 for i, line1 in enumerate(files):
1467 for i, line1 in enumerate(files):
1467 if len(files) > fnmaxn and i == fnmaxn - 1:
1468 if len(files) > fnmaxn and i == fnmaxn - 1:
1468 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1469 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1469 y = y + 1
1470 y = y + 1
1470 break
1471 break
1471 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1472 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1472 y = y + 1
1473 y = y + 1
1473
1474
1474 conflicts = rule.conflicts
1475 conflicts = rule.conflicts
1475 if len(conflicts) > 0:
1476 if len(conflicts) > 0:
1476 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1477 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1477 conflictstr = b"changed files overlap with %s" % conflictstr
1478 conflictstr = b"changed files overlap with %s" % conflictstr
1478 else:
1479 else:
1479 conflictstr = b'no overlap'
1480 conflictstr = b'no overlap'
1480
1481
1481 win.addstr(y, 1, conflictstr[:length])
1482 win.addstr(y, 1, conflictstr[:length])
1482 win.noutrefresh()
1483 win.noutrefresh()
1483
1484
1484 def helplines(mode):
1485 def helplines(mode):
1485 if mode == MODE_PATCH:
1486 if mode == MODE_PATCH:
1486 help = b"""\
1487 help = b"""\
1487 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1488 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1488 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1489 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1489 """
1490 """
1490 else:
1491 else:
1491 help = b"""\
1492 help = b"""\
1492 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1493 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1493 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1494 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1494 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1495 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1495 """
1496 """
1496 return help.splitlines()
1497 return help.splitlines()
1497
1498
1498 def renderhelp(win, state):
1499 def renderhelp(win, state):
1499 maxy, maxx = win.getmaxyx()
1500 maxy, maxx = win.getmaxyx()
1500 mode, _ = state[b'mode']
1501 mode, _ = state[b'mode']
1501 for y, line in enumerate(helplines(mode)):
1502 for y, line in enumerate(helplines(mode)):
1502 if y >= maxy:
1503 if y >= maxy:
1503 break
1504 break
1504 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1505 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1505 win.noutrefresh()
1506 win.noutrefresh()
1506
1507
1507 def renderrules(rulesscr, state):
1508 def renderrules(rulesscr, state):
1508 rules = state[b'rules']
1509 rules = state[b'rules']
1509 pos = state[b'pos']
1510 pos = state[b'pos']
1510 selected = state[b'selected']
1511 selected = state[b'selected']
1511 start = state[b'modes'][MODE_RULES][b'line_offset']
1512 start = state[b'modes'][MODE_RULES][b'line_offset']
1512
1513
1513 conflicts = [r.ctx for r in rules if r.conflicts]
1514 conflicts = [r.ctx for r in rules if r.conflicts]
1514 if len(conflicts) > 0:
1515 if len(conflicts) > 0:
1515 line = b"potential conflict in %s" % b','.join(
1516 line = b"potential conflict in %s" % b','.join(
1516 map(pycompat.bytestr, conflicts)
1517 map(pycompat.bytestr, conflicts)
1517 )
1518 )
1518 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1519 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1519
1520
1520 for y, rule in enumerate(rules[start:]):
1521 for y, rule in enumerate(rules[start:]):
1521 if y >= state[b'page_height']:
1522 if y >= state[b'page_height']:
1522 break
1523 break
1523 if len(rule.conflicts) > 0:
1524 if len(rule.conflicts) > 0:
1524 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1525 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1525 else:
1526 else:
1526 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1527 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1527
1528
1528 if y + start == selected:
1529 if y + start == selected:
1529 rollcolor = COLOR_ROLL_SELECTED
1530 rollcolor = COLOR_ROLL_SELECTED
1530 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1531 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1531 elif y + start == pos:
1532 elif y + start == pos:
1532 rollcolor = COLOR_ROLL_CURRENT
1533 rollcolor = COLOR_ROLL_CURRENT
1533 addln(
1534 addln(
1534 rulesscr,
1535 rulesscr,
1535 y,
1536 y,
1536 2,
1537 2,
1537 rule,
1538 rule,
1538 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1539 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1539 )
1540 )
1540 else:
1541 else:
1541 rollcolor = COLOR_ROLL
1542 rollcolor = COLOR_ROLL
1542 addln(rulesscr, y, 2, rule)
1543 addln(rulesscr, y, 2, rule)
1543
1544
1544 if rule.action == b'roll':
1545 if rule.action == b'roll':
1545 rulesscr.addstr(
1546 rulesscr.addstr(
1546 y,
1547 y,
1547 2 + len(rule.prefix),
1548 2 + len(rule.prefix),
1548 rule.desc,
1549 rule.desc,
1549 curses.color_pair(rollcolor),
1550 curses.color_pair(rollcolor),
1550 )
1551 )
1551
1552
1552 rulesscr.noutrefresh()
1553 rulesscr.noutrefresh()
1553
1554
1554 def renderstring(win, state, output, diffcolors=False):
1555 def renderstring(win, state, output, diffcolors=False):
1555 maxy, maxx = win.getmaxyx()
1556 maxy, maxx = win.getmaxyx()
1556 length = min(maxy - 1, len(output))
1557 length = min(maxy - 1, len(output))
1557 for y in range(0, length):
1558 for y in range(0, length):
1558 line = output[y]
1559 line = output[y]
1559 if diffcolors:
1560 if diffcolors:
1560 if line and line[0] == b'+':
1561 if line and line[0] == b'+':
1561 win.addstr(
1562 win.addstr(
1562 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1563 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1563 )
1564 )
1564 elif line and line[0] == b'-':
1565 elif line and line[0] == b'-':
1565 win.addstr(
1566 win.addstr(
1566 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1567 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1567 )
1568 )
1568 elif line.startswith(b'@@ '):
1569 elif line.startswith(b'@@ '):
1569 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1570 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1570 else:
1571 else:
1571 win.addstr(y, 0, line)
1572 win.addstr(y, 0, line)
1572 else:
1573 else:
1573 win.addstr(y, 0, line)
1574 win.addstr(y, 0, line)
1574 win.noutrefresh()
1575 win.noutrefresh()
1575
1576
1576 def renderpatch(win, state):
1577 def renderpatch(win, state):
1577 start = state[b'modes'][MODE_PATCH][b'line_offset']
1578 start = state[b'modes'][MODE_PATCH][b'line_offset']
1578 content = state[b'modes'][MODE_PATCH][b'patchcontents']
1579 content = state[b'modes'][MODE_PATCH][b'patchcontents']
1579 renderstring(win, state, content[start:], diffcolors=True)
1580 renderstring(win, state, content[start:], diffcolors=True)
1580
1581
1581 def layout(mode):
1582 def layout(mode):
1582 maxy, maxx = stdscr.getmaxyx()
1583 maxy, maxx = stdscr.getmaxyx()
1583 helplen = len(helplines(mode))
1584 helplen = len(helplines(mode))
1584 mainlen = maxy - helplen - 12
1585 mainlen = maxy - helplen - 12
1585 if mainlen < 1:
1586 if mainlen < 1:
1586 raise error.Abort(
1587 raise error.Abort(
1587 _(b"terminal dimensions %d by %d too small for curses histedit")
1588 _(b"terminal dimensions %d by %d too small for curses histedit")
1588 % (maxy, maxx),
1589 % (maxy, maxx),
1589 hint=_(
1590 hint=_(
1590 b"enlarge your terminal or use --config ui.interface=text"
1591 b"enlarge your terminal or use --config ui.interface=text"
1591 ),
1592 ),
1592 )
1593 )
1593 return {
1594 return {
1594 b'commit': (12, maxx),
1595 b'commit': (12, maxx),
1595 b'help': (helplen, maxx),
1596 b'help': (helplen, maxx),
1596 b'main': (mainlen, maxx),
1597 b'main': (mainlen, maxx),
1597 }
1598 }
1598
1599
1599 def drawvertwin(size, y, x):
1600 def drawvertwin(size, y, x):
1600 win = curses.newwin(size[0], size[1], y, x)
1601 win = curses.newwin(size[0], size[1], y, x)
1601 y += size[0]
1602 y += size[0]
1602 return win, y, x
1603 return win, y, x
1603
1604
1604 state = {
1605 state = {
1605 b'pos': 0,
1606 b'pos': 0,
1606 b'rules': rules,
1607 b'rules': rules,
1607 b'selected': None,
1608 b'selected': None,
1608 b'mode': (MODE_INIT, MODE_INIT),
1609 b'mode': (MODE_INIT, MODE_INIT),
1609 b'page_height': None,
1610 b'page_height': None,
1610 b'modes': {
1611 b'modes': {
1611 MODE_RULES: {
1612 MODE_RULES: {
1612 b'line_offset': 0,
1613 b'line_offset': 0,
1613 },
1614 },
1614 MODE_PATCH: {
1615 MODE_PATCH: {
1615 b'line_offset': 0,
1616 b'line_offset': 0,
1616 },
1617 },
1617 },
1618 },
1618 b'repo': repo,
1619 b'repo': repo,
1619 }
1620 }
1620
1621
1621 # eventloop
1622 # eventloop
1622 ch = None
1623 ch = None
1623 stdscr.clear()
1624 stdscr.clear()
1624 stdscr.refresh()
1625 stdscr.refresh()
1625 while True:
1626 while True:
1626 oldmode, unused = state[b'mode']
1627 oldmode, unused = state[b'mode']
1627 if oldmode == MODE_INIT:
1628 if oldmode == MODE_INIT:
1628 changemode(state, MODE_RULES)
1629 changemode(state, MODE_RULES)
1629 e = event(state, ch)
1630 e = event(state, ch)
1630
1631
1631 if e == E_QUIT:
1632 if e == E_QUIT:
1632 return False
1633 return False
1633 if e == E_HISTEDIT:
1634 if e == E_HISTEDIT:
1634 return state[b'rules']
1635 return state[b'rules']
1635 else:
1636 else:
1636 if e == E_RESIZE:
1637 if e == E_RESIZE:
1637 size = screen_size()
1638 size = screen_size()
1638 if size != stdscr.getmaxyx():
1639 if size != stdscr.getmaxyx():
1639 curses.resizeterm(*size)
1640 curses.resizeterm(*size)
1640
1641
1641 curmode, unused = state[b'mode']
1642 curmode, unused = state[b'mode']
1642 sizes = layout(curmode)
1643 sizes = layout(curmode)
1643 if curmode != oldmode:
1644 if curmode != oldmode:
1644 state[b'page_height'] = sizes[b'main'][0]
1645 state[b'page_height'] = sizes[b'main'][0]
1645 # Adjust the view to fit the current screen size.
1646 # Adjust the view to fit the current screen size.
1646 movecursor(state, state[b'pos'], state[b'pos'])
1647 movecursor(state, state[b'pos'], state[b'pos'])
1647
1648
1648 # Pack the windows against the top, each pane spread across the
1649 # Pack the windows against the top, each pane spread across the
1649 # full width of the screen.
1650 # full width of the screen.
1650 y, x = (0, 0)
1651 y, x = (0, 0)
1651 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1652 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1652 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1653 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1653 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1654 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1654
1655
1655 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1656 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1656 if e == E_PAGEDOWN:
1657 if e == E_PAGEDOWN:
1657 changeview(state, +1, b'page')
1658 changeview(state, +1, b'page')
1658 elif e == E_PAGEUP:
1659 elif e == E_PAGEUP:
1659 changeview(state, -1, b'page')
1660 changeview(state, -1, b'page')
1660 elif e == E_LINEDOWN:
1661 elif e == E_LINEDOWN:
1661 changeview(state, +1, b'line')
1662 changeview(state, +1, b'line')
1662 elif e == E_LINEUP:
1663 elif e == E_LINEUP:
1663 changeview(state, -1, b'line')
1664 changeview(state, -1, b'line')
1664
1665
1665 # start rendering
1666 # start rendering
1666 commitwin.erase()
1667 commitwin.erase()
1667 helpwin.erase()
1668 helpwin.erase()
1668 mainwin.erase()
1669 mainwin.erase()
1669 if curmode == MODE_PATCH:
1670 if curmode == MODE_PATCH:
1670 renderpatch(mainwin, state)
1671 renderpatch(mainwin, state)
1671 elif curmode == MODE_HELP:
1672 elif curmode == MODE_HELP:
1672 renderstring(mainwin, state, __doc__.strip().splitlines())
1673 renderstring(mainwin, state, __doc__.strip().splitlines())
1673 else:
1674 else:
1674 renderrules(mainwin, state)
1675 renderrules(mainwin, state)
1675 rendercommit(commitwin, state)
1676 rendercommit(commitwin, state)
1676 renderhelp(helpwin, state)
1677 renderhelp(helpwin, state)
1677 curses.doupdate()
1678 curses.doupdate()
1678 # done rendering
1679 # done rendering
1679 ch = encoding.strtolocal(stdscr.getkey())
1680 ch = encoding.strtolocal(stdscr.getkey())
1680
1681
1681
1682
1682 def _chistedit(ui, repo, freeargs, opts):
1683 def _chistedit(ui, repo, freeargs, opts):
1683 """interactively edit changeset history via a curses interface
1684 """interactively edit changeset history via a curses interface
1684
1685
1685 Provides a ncurses interface to histedit. Press ? in chistedit mode
1686 Provides a ncurses interface to histedit. Press ? in chistedit mode
1686 to see an extensive help. Requires python-curses to be installed."""
1687 to see an extensive help. Requires python-curses to be installed."""
1687
1688
1688 if curses is None:
1689 if curses is None:
1689 raise error.Abort(_(b"Python curses library required"))
1690 raise error.Abort(_(b"Python curses library required"))
1690
1691
1691 # disable color
1692 # disable color
1692 ui._colormode = None
1693 ui._colormode = None
1693
1694
1694 try:
1695 try:
1695 keep = opts.get(b'keep')
1696 keep = opts.get(b'keep')
1696 revs = opts.get(b'rev', [])[:]
1697 revs = opts.get(b'rev', [])[:]
1697 cmdutil.checkunfinished(repo)
1698 cmdutil.checkunfinished(repo)
1698 cmdutil.bailifchanged(repo)
1699 cmdutil.bailifchanged(repo)
1699
1700
1700 if os.path.exists(os.path.join(repo.path, b'histedit-state')):
1701 if os.path.exists(os.path.join(repo.path, b'histedit-state')):
1701 raise error.Abort(
1702 raise error.Abort(
1702 _(
1703 _(
1703 b'history edit already in progress, try '
1704 b'history edit already in progress, try '
1704 b'--continue or --abort'
1705 b'--continue or --abort'
1705 )
1706 )
1706 )
1707 )
1707 revs.extend(freeargs)
1708 revs.extend(freeargs)
1708 if not revs:
1709 if not revs:
1709 defaultrev = destutil.desthistedit(ui, repo)
1710 defaultrev = destutil.desthistedit(ui, repo)
1710 if defaultrev is not None:
1711 if defaultrev is not None:
1711 revs.append(defaultrev)
1712 revs.append(defaultrev)
1712 if len(revs) != 1:
1713 if len(revs) != 1:
1713 raise error.Abort(
1714 raise error.Abort(
1714 _(b'histedit requires exactly one ancestor revision')
1715 _(b'histedit requires exactly one ancestor revision')
1715 )
1716 )
1716
1717
1717 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
1718 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
1718 if len(rr) != 1:
1719 if len(rr) != 1:
1719 raise error.Abort(
1720 raise error.Abort(
1720 _(
1721 _(
1721 b'The specified revisions must have '
1722 b'The specified revisions must have '
1722 b'exactly one common root'
1723 b'exactly one common root'
1723 )
1724 )
1724 )
1725 )
1725 root = rr[0].node()
1726 root = rr[0].node()
1726
1727
1727 topmost = repo.dirstate.p1()
1728 topmost = repo.dirstate.p1()
1728 revs = between(repo, root, topmost, keep)
1729 revs = between(repo, root, topmost, keep)
1729 if not revs:
1730 if not revs:
1730 raise error.Abort(
1731 raise error.Abort(
1731 _(b'%s is not an ancestor of working directory') % short(root)
1732 _(b'%s is not an ancestor of working directory') % short(root)
1732 )
1733 )
1733
1734
1734 ctxs = []
1735 ctxs = []
1735 for i, r in enumerate(revs):
1736 for i, r in enumerate(revs):
1736 ctxs.append(histeditrule(ui, repo[r], i))
1737 ctxs.append(histeditrule(ui, repo[r], i))
1737 with util.with_lc_ctype():
1738 with util.with_lc_ctype():
1738 rc = curses.wrapper(functools.partial(_chisteditmain, repo, ctxs))
1739 rc = curses.wrapper(functools.partial(_chisteditmain, repo, ctxs))
1739 curses.echo()
1740 curses.echo()
1740 curses.endwin()
1741 curses.endwin()
1741 if rc is False:
1742 if rc is False:
1742 ui.write(_(b"histedit aborted\n"))
1743 ui.write(_(b"histedit aborted\n"))
1743 return 0
1744 return 0
1744 if type(rc) is list:
1745 if type(rc) is list:
1745 ui.status(_(b"performing changes\n"))
1746 ui.status(_(b"performing changes\n"))
1746 rules = makecommands(rc)
1747 rules = makecommands(rc)
1747 with repo.vfs(b'chistedit', b'w+') as fp:
1748 with repo.vfs(b'chistedit', b'w+') as fp:
1748 for r in rules:
1749 for r in rules:
1749 fp.write(r)
1750 fp.write(r)
1750 opts[b'commands'] = fp.name
1751 opts[b'commands'] = fp.name
1751 return _texthistedit(ui, repo, freeargs, opts)
1752 return _texthistedit(ui, repo, freeargs, opts)
1752 except KeyboardInterrupt:
1753 except KeyboardInterrupt:
1753 pass
1754 pass
1754 return -1
1755 return -1
1755
1756
1756
1757
1757 @command(
1758 @command(
1758 b'histedit',
1759 b'histedit',
1759 [
1760 [
1760 (
1761 (
1761 b'',
1762 b'',
1762 b'commands',
1763 b'commands',
1763 b'',
1764 b'',
1764 _(b'read history edits from the specified file'),
1765 _(b'read history edits from the specified file'),
1765 _(b'FILE'),
1766 _(b'FILE'),
1766 ),
1767 ),
1767 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1768 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1768 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1769 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1769 (
1770 (
1770 b'k',
1771 b'k',
1771 b'keep',
1772 b'keep',
1772 False,
1773 False,
1773 _(b"don't strip old nodes after edit is complete"),
1774 _(b"don't strip old nodes after edit is complete"),
1774 ),
1775 ),
1775 (b'', b'abort', False, _(b'abort an edit in progress')),
1776 (b'', b'abort', False, _(b'abort an edit in progress')),
1776 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1777 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1777 (
1778 (
1778 b'f',
1779 b'f',
1779 b'force',
1780 b'force',
1780 False,
1781 False,
1781 _(b'force outgoing even for unrelated repositories'),
1782 _(b'force outgoing even for unrelated repositories'),
1782 ),
1783 ),
1783 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1784 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1784 ]
1785 ]
1785 + cmdutil.formatteropts,
1786 + cmdutil.formatteropts,
1786 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1787 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1787 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1788 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1788 )
1789 )
1789 def histedit(ui, repo, *freeargs, **opts):
1790 def histedit(ui, repo, *freeargs, **opts):
1790 """interactively edit changeset history
1791 """interactively edit changeset history
1791
1792
1792 This command lets you edit a linear series of changesets (up to
1793 This command lets you edit a linear series of changesets (up to
1793 and including the working directory, which should be clean).
1794 and including the working directory, which should be clean).
1794 You can:
1795 You can:
1795
1796
1796 - `pick` to [re]order a changeset
1797 - `pick` to [re]order a changeset
1797
1798
1798 - `drop` to omit changeset
1799 - `drop` to omit changeset
1799
1800
1800 - `mess` to reword the changeset commit message
1801 - `mess` to reword the changeset commit message
1801
1802
1802 - `fold` to combine it with the preceding changeset (using the later date)
1803 - `fold` to combine it with the preceding changeset (using the later date)
1803
1804
1804 - `roll` like fold, but discarding this commit's description and date
1805 - `roll` like fold, but discarding this commit's description and date
1805
1806
1806 - `edit` to edit this changeset (preserving date)
1807 - `edit` to edit this changeset (preserving date)
1807
1808
1808 - `base` to checkout changeset and apply further changesets from there
1809 - `base` to checkout changeset and apply further changesets from there
1809
1810
1810 There are a number of ways to select the root changeset:
1811 There are a number of ways to select the root changeset:
1811
1812
1812 - Specify ANCESTOR directly
1813 - Specify ANCESTOR directly
1813
1814
1814 - Use --outgoing -- it will be the first linear changeset not
1815 - Use --outgoing -- it will be the first linear changeset not
1815 included in destination. (See :hg:`help config.paths.default-push`)
1816 included in destination. (See :hg:`help config.paths.default-push`)
1816
1817
1817 - Otherwise, the value from the "histedit.defaultrev" config option
1818 - Otherwise, the value from the "histedit.defaultrev" config option
1818 is used as a revset to select the base revision when ANCESTOR is not
1819 is used as a revset to select the base revision when ANCESTOR is not
1819 specified. The first revision returned by the revset is used. By
1820 specified. The first revision returned by the revset is used. By
1820 default, this selects the editable history that is unique to the
1821 default, this selects the editable history that is unique to the
1821 ancestry of the working directory.
1822 ancestry of the working directory.
1822
1823
1823 .. container:: verbose
1824 .. container:: verbose
1824
1825
1825 If you use --outgoing, this command will abort if there are ambiguous
1826 If you use --outgoing, this command will abort if there are ambiguous
1826 outgoing revisions. For example, if there are multiple branches
1827 outgoing revisions. For example, if there are multiple branches
1827 containing outgoing revisions.
1828 containing outgoing revisions.
1828
1829
1829 Use "min(outgoing() and ::.)" or similar revset specification
1830 Use "min(outgoing() and ::.)" or similar revset specification
1830 instead of --outgoing to specify edit target revision exactly in
1831 instead of --outgoing to specify edit target revision exactly in
1831 such ambiguous situation. See :hg:`help revsets` for detail about
1832 such ambiguous situation. See :hg:`help revsets` for detail about
1832 selecting revisions.
1833 selecting revisions.
1833
1834
1834 .. container:: verbose
1835 .. container:: verbose
1835
1836
1836 Examples:
1837 Examples:
1837
1838
1838 - A number of changes have been made.
1839 - A number of changes have been made.
1839 Revision 3 is no longer needed.
1840 Revision 3 is no longer needed.
1840
1841
1841 Start history editing from revision 3::
1842 Start history editing from revision 3::
1842
1843
1843 hg histedit -r 3
1844 hg histedit -r 3
1844
1845
1845 An editor opens, containing the list of revisions,
1846 An editor opens, containing the list of revisions,
1846 with specific actions specified::
1847 with specific actions specified::
1847
1848
1848 pick 5339bf82f0ca 3 Zworgle the foobar
1849 pick 5339bf82f0ca 3 Zworgle the foobar
1849 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1850 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1850 pick 0a9639fcda9d 5 Morgify the cromulancy
1851 pick 0a9639fcda9d 5 Morgify the cromulancy
1851
1852
1852 Additional information about the possible actions
1853 Additional information about the possible actions
1853 to take appears below the list of revisions.
1854 to take appears below the list of revisions.
1854
1855
1855 To remove revision 3 from the history,
1856 To remove revision 3 from the history,
1856 its action (at the beginning of the relevant line)
1857 its action (at the beginning of the relevant line)
1857 is changed to 'drop'::
1858 is changed to 'drop'::
1858
1859
1859 drop 5339bf82f0ca 3 Zworgle the foobar
1860 drop 5339bf82f0ca 3 Zworgle the foobar
1860 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1861 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1861 pick 0a9639fcda9d 5 Morgify the cromulancy
1862 pick 0a9639fcda9d 5 Morgify the cromulancy
1862
1863
1863 - A number of changes have been made.
1864 - A number of changes have been made.
1864 Revision 2 and 4 need to be swapped.
1865 Revision 2 and 4 need to be swapped.
1865
1866
1866 Start history editing from revision 2::
1867 Start history editing from revision 2::
1867
1868
1868 hg histedit -r 2
1869 hg histedit -r 2
1869
1870
1870 An editor opens, containing the list of revisions,
1871 An editor opens, containing the list of revisions,
1871 with specific actions specified::
1872 with specific actions specified::
1872
1873
1873 pick 252a1af424ad 2 Blorb a morgwazzle
1874 pick 252a1af424ad 2 Blorb a morgwazzle
1874 pick 5339bf82f0ca 3 Zworgle the foobar
1875 pick 5339bf82f0ca 3 Zworgle the foobar
1875 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1876 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1876
1877
1877 To swap revision 2 and 4, its lines are swapped
1878 To swap revision 2 and 4, its lines are swapped
1878 in the editor::
1879 in the editor::
1879
1880
1880 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1881 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1881 pick 5339bf82f0ca 3 Zworgle the foobar
1882 pick 5339bf82f0ca 3 Zworgle the foobar
1882 pick 252a1af424ad 2 Blorb a morgwazzle
1883 pick 252a1af424ad 2 Blorb a morgwazzle
1883
1884
1884 Returns 0 on success, 1 if user intervention is required (not only
1885 Returns 0 on success, 1 if user intervention is required (not only
1885 for intentional "edit" command, but also for resolving unexpected
1886 for intentional "edit" command, but also for resolving unexpected
1886 conflicts).
1887 conflicts).
1887 """
1888 """
1888 opts = pycompat.byteskwargs(opts)
1889 opts = pycompat.byteskwargs(opts)
1889
1890
1890 # kludge: _chistedit only works for starting an edit, not aborting
1891 # kludge: _chistedit only works for starting an edit, not aborting
1891 # or continuing, so fall back to regular _texthistedit for those
1892 # or continuing, so fall back to regular _texthistedit for those
1892 # operations.
1893 # operations.
1893 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1894 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1894 return _chistedit(ui, repo, freeargs, opts)
1895 return _chistedit(ui, repo, freeargs, opts)
1895 return _texthistedit(ui, repo, freeargs, opts)
1896 return _texthistedit(ui, repo, freeargs, opts)
1896
1897
1897
1898
1898 def _texthistedit(ui, repo, freeargs, opts):
1899 def _texthistedit(ui, repo, freeargs, opts):
1899 state = histeditstate(repo)
1900 state = histeditstate(repo)
1900 with repo.wlock() as wlock, repo.lock() as lock:
1901 with repo.wlock() as wlock, repo.lock() as lock:
1901 state.wlock = wlock
1902 state.wlock = wlock
1902 state.lock = lock
1903 state.lock = lock
1903 _histedit(ui, repo, state, freeargs, opts)
1904 _histedit(ui, repo, state, freeargs, opts)
1904
1905
1905
1906
1906 goalcontinue = b'continue'
1907 goalcontinue = b'continue'
1907 goalabort = b'abort'
1908 goalabort = b'abort'
1908 goaleditplan = b'edit-plan'
1909 goaleditplan = b'edit-plan'
1909 goalnew = b'new'
1910 goalnew = b'new'
1910
1911
1911
1912
1912 def _getgoal(opts):
1913 def _getgoal(opts):
1913 if opts.get(b'continue'):
1914 if opts.get(b'continue'):
1914 return goalcontinue
1915 return goalcontinue
1915 if opts.get(b'abort'):
1916 if opts.get(b'abort'):
1916 return goalabort
1917 return goalabort
1917 if opts.get(b'edit_plan'):
1918 if opts.get(b'edit_plan'):
1918 return goaleditplan
1919 return goaleditplan
1919 return goalnew
1920 return goalnew
1920
1921
1921
1922
1922 def _readfile(ui, path):
1923 def _readfile(ui, path):
1923 if path == b'-':
1924 if path == b'-':
1924 with ui.timeblockedsection(b'histedit'):
1925 with ui.timeblockedsection(b'histedit'):
1925 return ui.fin.read()
1926 return ui.fin.read()
1926 else:
1927 else:
1927 with open(path, b'rb') as f:
1928 with open(path, b'rb') as f:
1928 return f.read()
1929 return f.read()
1929
1930
1930
1931
1931 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1932 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1932 # TODO only abort if we try to histedit mq patches, not just
1933 # TODO only abort if we try to histedit mq patches, not just
1933 # blanket if mq patches are applied somewhere
1934 # blanket if mq patches are applied somewhere
1934 mq = getattr(repo, 'mq', None)
1935 mq = getattr(repo, 'mq', None)
1935 if mq and mq.applied:
1936 if mq and mq.applied:
1936 raise error.Abort(_(b'source has mq patches applied'))
1937 raise error.Abort(_(b'source has mq patches applied'))
1937
1938
1938 # basic argument incompatibility processing
1939 # basic argument incompatibility processing
1939 outg = opts.get(b'outgoing')
1940 outg = opts.get(b'outgoing')
1940 editplan = opts.get(b'edit_plan')
1941 editplan = opts.get(b'edit_plan')
1941 abort = opts.get(b'abort')
1942 abort = opts.get(b'abort')
1942 force = opts.get(b'force')
1943 force = opts.get(b'force')
1943 if force and not outg:
1944 if force and not outg:
1944 raise error.Abort(_(b'--force only allowed with --outgoing'))
1945 raise error.Abort(_(b'--force only allowed with --outgoing'))
1945 if goal == b'continue':
1946 if goal == b'continue':
1946 if any((outg, abort, revs, freeargs, rules, editplan)):
1947 if any((outg, abort, revs, freeargs, rules, editplan)):
1947 raise error.Abort(_(b'no arguments allowed with --continue'))
1948 raise error.Abort(_(b'no arguments allowed with --continue'))
1948 elif goal == b'abort':
1949 elif goal == b'abort':
1949 if any((outg, revs, freeargs, rules, editplan)):
1950 if any((outg, revs, freeargs, rules, editplan)):
1950 raise error.Abort(_(b'no arguments allowed with --abort'))
1951 raise error.Abort(_(b'no arguments allowed with --abort'))
1951 elif goal == b'edit-plan':
1952 elif goal == b'edit-plan':
1952 if any((outg, revs, freeargs)):
1953 if any((outg, revs, freeargs)):
1953 raise error.Abort(
1954 raise error.Abort(
1954 _(b'only --commands argument allowed with --edit-plan')
1955 _(b'only --commands argument allowed with --edit-plan')
1955 )
1956 )
1956 else:
1957 else:
1957 if state.inprogress():
1958 if state.inprogress():
1958 raise error.Abort(
1959 raise error.Abort(
1959 _(
1960 _(
1960 b'history edit already in progress, try '
1961 b'history edit already in progress, try '
1961 b'--continue or --abort'
1962 b'--continue or --abort'
1962 )
1963 )
1963 )
1964 )
1964 if outg:
1965 if outg:
1965 if revs:
1966 if revs:
1966 raise error.Abort(_(b'no revisions allowed with --outgoing'))
1967 raise error.Abort(_(b'no revisions allowed with --outgoing'))
1967 if len(freeargs) > 1:
1968 if len(freeargs) > 1:
1968 raise error.Abort(
1969 raise error.Abort(
1969 _(b'only one repo argument allowed with --outgoing')
1970 _(b'only one repo argument allowed with --outgoing')
1970 )
1971 )
1971 else:
1972 else:
1972 revs.extend(freeargs)
1973 revs.extend(freeargs)
1973 if len(revs) == 0:
1974 if len(revs) == 0:
1974 defaultrev = destutil.desthistedit(ui, repo)
1975 defaultrev = destutil.desthistedit(ui, repo)
1975 if defaultrev is not None:
1976 if defaultrev is not None:
1976 revs.append(defaultrev)
1977 revs.append(defaultrev)
1977
1978
1978 if len(revs) != 1:
1979 if len(revs) != 1:
1979 raise error.Abort(
1980 raise error.Abort(
1980 _(b'histedit requires exactly one ancestor revision')
1981 _(b'histedit requires exactly one ancestor revision')
1981 )
1982 )
1982
1983
1983
1984
1984 def _histedit(ui, repo, state, freeargs, opts):
1985 def _histedit(ui, repo, state, freeargs, opts):
1985 fm = ui.formatter(b'histedit', opts)
1986 fm = ui.formatter(b'histedit', opts)
1986 fm.startitem()
1987 fm.startitem()
1987 goal = _getgoal(opts)
1988 goal = _getgoal(opts)
1988 revs = opts.get(b'rev', [])
1989 revs = opts.get(b'rev', [])
1989 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
1990 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
1990 rules = opts.get(b'commands', b'')
1991 rules = opts.get(b'commands', b'')
1991 state.keep = opts.get(b'keep', False)
1992 state.keep = opts.get(b'keep', False)
1992
1993
1993 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1994 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1994
1995
1995 hastags = False
1996 hastags = False
1996 if revs:
1997 if revs:
1997 revs = scmutil.revrange(repo, revs)
1998 revs = scmutil.revrange(repo, revs)
1998 ctxs = [repo[rev] for rev in revs]
1999 ctxs = [repo[rev] for rev in revs]
1999 for ctx in ctxs:
2000 for ctx in ctxs:
2000 tags = [tag for tag in ctx.tags() if tag != b'tip']
2001 tags = [tag for tag in ctx.tags() if tag != b'tip']
2001 if not hastags:
2002 if not hastags:
2002 hastags = len(tags)
2003 hastags = len(tags)
2003 if hastags:
2004 if hastags:
2004 if ui.promptchoice(
2005 if ui.promptchoice(
2005 _(
2006 _(
2006 b'warning: tags associated with the given'
2007 b'warning: tags associated with the given'
2007 b' changeset will be lost after histedit.\n'
2008 b' changeset will be lost after histedit.\n'
2008 b'do you want to continue (yN)? $$ &Yes $$ &No'
2009 b'do you want to continue (yN)? $$ &Yes $$ &No'
2009 ),
2010 ),
2010 default=1,
2011 default=1,
2011 ):
2012 ):
2012 raise error.Abort(_(b'histedit cancelled\n'))
2013 raise error.Abort(_(b'histedit cancelled\n'))
2013 # rebuild state
2014 # rebuild state
2014 if goal == goalcontinue:
2015 if goal == goalcontinue:
2015 state.read()
2016 state.read()
2016 state = bootstrapcontinue(ui, state, opts)
2017 state = bootstrapcontinue(ui, state, opts)
2017 elif goal == goaleditplan:
2018 elif goal == goaleditplan:
2018 _edithisteditplan(ui, repo, state, rules)
2019 _edithisteditplan(ui, repo, state, rules)
2019 return
2020 return
2020 elif goal == goalabort:
2021 elif goal == goalabort:
2021 _aborthistedit(ui, repo, state, nobackup=nobackup)
2022 _aborthistedit(ui, repo, state, nobackup=nobackup)
2022 return
2023 return
2023 else:
2024 else:
2024 # goal == goalnew
2025 # goal == goalnew
2025 _newhistedit(ui, repo, state, revs, freeargs, opts)
2026 _newhistedit(ui, repo, state, revs, freeargs, opts)
2026
2027
2027 _continuehistedit(ui, repo, state)
2028 _continuehistedit(ui, repo, state)
2028 _finishhistedit(ui, repo, state, fm)
2029 _finishhistedit(ui, repo, state, fm)
2029 fm.end()
2030 fm.end()
2030
2031
2031
2032
2032 def _continuehistedit(ui, repo, state):
2033 def _continuehistedit(ui, repo, state):
2033 """This function runs after either:
2034 """This function runs after either:
2034 - bootstrapcontinue (if the goal is 'continue')
2035 - bootstrapcontinue (if the goal is 'continue')
2035 - _newhistedit (if the goal is 'new')
2036 - _newhistedit (if the goal is 'new')
2036 """
2037 """
2037 # preprocess rules so that we can hide inner folds from the user
2038 # preprocess rules so that we can hide inner folds from the user
2038 # and only show one editor
2039 # and only show one editor
2039 actions = state.actions[:]
2040 actions = state.actions[:]
2040 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2041 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2041 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2042 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2042 state.actions[idx].__class__ = _multifold
2043 state.actions[idx].__class__ = _multifold
2043
2044
2044 # Force an initial state file write, so the user can run --abort/continue
2045 # Force an initial state file write, so the user can run --abort/continue
2045 # even if there's an exception before the first transaction serialize.
2046 # even if there's an exception before the first transaction serialize.
2046 state.write()
2047 state.write()
2047
2048
2048 tr = None
2049 tr = None
2049 # Don't use singletransaction by default since it rolls the entire
2050 # Don't use singletransaction by default since it rolls the entire
2050 # transaction back if an unexpected exception happens (like a
2051 # transaction back if an unexpected exception happens (like a
2051 # pretxncommit hook throws, or the user aborts the commit msg editor).
2052 # pretxncommit hook throws, or the user aborts the commit msg editor).
2052 if ui.configbool(b"histedit", b"singletransaction"):
2053 if ui.configbool(b"histedit", b"singletransaction"):
2053 # Don't use a 'with' for the transaction, since actions may close
2054 # Don't use a 'with' for the transaction, since actions may close
2054 # and reopen a transaction. For example, if the action executes an
2055 # and reopen a transaction. For example, if the action executes an
2055 # external process it may choose to commit the transaction first.
2056 # external process it may choose to commit the transaction first.
2056 tr = repo.transaction(b'histedit')
2057 tr = repo.transaction(b'histedit')
2057 progress = ui.makeprogress(
2058 progress = ui.makeprogress(
2058 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2059 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2059 )
2060 )
2060 with progress, util.acceptintervention(tr):
2061 with progress, util.acceptintervention(tr):
2061 while state.actions:
2062 while state.actions:
2062 state.write(tr=tr)
2063 state.write(tr=tr)
2063 actobj = state.actions[0]
2064 actobj = state.actions[0]
2064 progress.increment(item=actobj.torule())
2065 progress.increment(item=actobj.torule())
2065 ui.debug(
2066 ui.debug(
2066 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2067 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2067 )
2068 )
2068 parentctx, replacement_ = actobj.run()
2069 parentctx, replacement_ = actobj.run()
2069 state.parentctxnode = parentctx.node()
2070 state.parentctxnode = parentctx.node()
2070 state.replacements.extend(replacement_)
2071 state.replacements.extend(replacement_)
2071 state.actions.pop(0)
2072 state.actions.pop(0)
2072
2073
2073 state.write()
2074 state.write()
2074
2075
2075
2076
2076 def _finishhistedit(ui, repo, state, fm):
2077 def _finishhistedit(ui, repo, state, fm):
2077 """This action runs when histedit is finishing its session"""
2078 """This action runs when histedit is finishing its session"""
2078 mergemod.update(repo[state.parentctxnode])
2079 mergemod.update(repo[state.parentctxnode])
2079
2080
2080 mapping, tmpnodes, created, ntm = processreplacement(state)
2081 mapping, tmpnodes, created, ntm = processreplacement(state)
2081 if mapping:
2082 if mapping:
2082 for prec, succs in pycompat.iteritems(mapping):
2083 for prec, succs in pycompat.iteritems(mapping):
2083 if not succs:
2084 if not succs:
2084 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2085 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2085 else:
2086 else:
2086 ui.debug(
2087 ui.debug(
2087 b'histedit: %s is replaced by %s\n'
2088 b'histedit: %s is replaced by %s\n'
2088 % (short(prec), short(succs[0]))
2089 % (short(prec), short(succs[0]))
2089 )
2090 )
2090 if len(succs) > 1:
2091 if len(succs) > 1:
2091 m = b'histedit: %s'
2092 m = b'histedit: %s'
2092 for n in succs[1:]:
2093 for n in succs[1:]:
2093 ui.debug(m % short(n))
2094 ui.debug(m % short(n))
2094
2095
2095 if not state.keep:
2096 if not state.keep:
2096 if mapping:
2097 if mapping:
2097 movetopmostbookmarks(repo, state.topmost, ntm)
2098 movetopmostbookmarks(repo, state.topmost, ntm)
2098 # TODO update mq state
2099 # TODO update mq state
2099 else:
2100 else:
2100 mapping = {}
2101 mapping = {}
2101
2102
2102 for n in tmpnodes:
2103 for n in tmpnodes:
2103 if n in repo:
2104 if n in repo:
2104 mapping[n] = ()
2105 mapping[n] = ()
2105
2106
2106 # remove entries about unknown nodes
2107 # remove entries about unknown nodes
2107 has_node = repo.unfiltered().changelog.index.has_node
2108 has_node = repo.unfiltered().changelog.index.has_node
2108 mapping = {
2109 mapping = {
2109 k: v
2110 k: v
2110 for k, v in mapping.items()
2111 for k, v in mapping.items()
2111 if has_node(k) and all(has_node(n) for n in v)
2112 if has_node(k) and all(has_node(n) for n in v)
2112 }
2113 }
2113 scmutil.cleanupnodes(repo, mapping, b'histedit')
2114 scmutil.cleanupnodes(repo, mapping, b'histedit')
2114 hf = fm.hexfunc
2115 hf = fm.hexfunc
2115 fl = fm.formatlist
2116 fl = fm.formatlist
2116 fd = fm.formatdict
2117 fd = fm.formatdict
2117 nodechanges = fd(
2118 nodechanges = fd(
2118 {
2119 {
2119 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2120 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2120 for oldn, newn in pycompat.iteritems(mapping)
2121 for oldn, newn in pycompat.iteritems(mapping)
2121 },
2122 },
2122 key=b"oldnode",
2123 key=b"oldnode",
2123 value=b"newnodes",
2124 value=b"newnodes",
2124 )
2125 )
2125 fm.data(nodechanges=nodechanges)
2126 fm.data(nodechanges=nodechanges)
2126
2127
2127 state.clear()
2128 state.clear()
2128 if os.path.exists(repo.sjoin(b'undo')):
2129 if os.path.exists(repo.sjoin(b'undo')):
2129 os.unlink(repo.sjoin(b'undo'))
2130 os.unlink(repo.sjoin(b'undo'))
2130 if repo.vfs.exists(b'histedit-last-edit.txt'):
2131 if repo.vfs.exists(b'histedit-last-edit.txt'):
2131 repo.vfs.unlink(b'histedit-last-edit.txt')
2132 repo.vfs.unlink(b'histedit-last-edit.txt')
2132
2133
2133
2134
2134 def _aborthistedit(ui, repo, state, nobackup=False):
2135 def _aborthistedit(ui, repo, state, nobackup=False):
2135 try:
2136 try:
2136 state.read()
2137 state.read()
2137 __, leafs, tmpnodes, __ = processreplacement(state)
2138 __, leafs, tmpnodes, __ = processreplacement(state)
2138 ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
2139 ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
2139
2140
2140 # Recover our old commits if necessary
2141 # Recover our old commits if necessary
2141 if not state.topmost in repo and state.backupfile:
2142 if not state.topmost in repo and state.backupfile:
2142 backupfile = repo.vfs.join(state.backupfile)
2143 backupfile = repo.vfs.join(state.backupfile)
2143 f = hg.openpath(ui, backupfile)
2144 f = hg.openpath(ui, backupfile)
2144 gen = exchange.readbundle(ui, f, backupfile)
2145 gen = exchange.readbundle(ui, f, backupfile)
2145 with repo.transaction(b'histedit.abort') as tr:
2146 with repo.transaction(b'histedit.abort') as tr:
2146 bundle2.applybundle(
2147 bundle2.applybundle(
2147 repo,
2148 repo,
2148 gen,
2149 gen,
2149 tr,
2150 tr,
2150 source=b'histedit',
2151 source=b'histedit',
2151 url=b'bundle:' + backupfile,
2152 url=b'bundle:' + backupfile,
2152 )
2153 )
2153
2154
2154 os.remove(backupfile)
2155 os.remove(backupfile)
2155
2156
2156 # check whether we should update away
2157 # check whether we should update away
2157 if repo.unfiltered().revs(
2158 if repo.unfiltered().revs(
2158 b'parents() and (%n or %ln::)',
2159 b'parents() and (%n or %ln::)',
2159 state.parentctxnode,
2160 state.parentctxnode,
2160 leafs | tmpnodes,
2161 leafs | tmpnodes,
2161 ):
2162 ):
2162 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2163 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2163 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2164 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2164 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2165 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2165 except Exception:
2166 except Exception:
2166 if state.inprogress():
2167 if state.inprogress():
2167 ui.warn(
2168 ui.warn(
2168 _(
2169 _(
2169 b'warning: encountered an exception during histedit '
2170 b'warning: encountered an exception during histedit '
2170 b'--abort; the repository may not have been completely '
2171 b'--abort; the repository may not have been completely '
2171 b'cleaned up\n'
2172 b'cleaned up\n'
2172 )
2173 )
2173 )
2174 )
2174 raise
2175 raise
2175 finally:
2176 finally:
2176 state.clear()
2177 state.clear()
2177
2178
2178
2179
2179 def hgaborthistedit(ui, repo):
2180 def hgaborthistedit(ui, repo):
2180 state = histeditstate(repo)
2181 state = histeditstate(repo)
2181 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2182 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2182 with repo.wlock() as wlock, repo.lock() as lock:
2183 with repo.wlock() as wlock, repo.lock() as lock:
2183 state.wlock = wlock
2184 state.wlock = wlock
2184 state.lock = lock
2185 state.lock = lock
2185 _aborthistedit(ui, repo, state, nobackup=nobackup)
2186 _aborthistedit(ui, repo, state, nobackup=nobackup)
2186
2187
2187
2188
2188 def _edithisteditplan(ui, repo, state, rules):
2189 def _edithisteditplan(ui, repo, state, rules):
2189 state.read()
2190 state.read()
2190 if not rules:
2191 if not rules:
2191 comment = geteditcomment(
2192 comment = geteditcomment(
2192 ui, short(state.parentctxnode), short(state.topmost)
2193 ui, short(state.parentctxnode), short(state.topmost)
2193 )
2194 )
2194 rules = ruleeditor(repo, ui, state.actions, comment)
2195 rules = ruleeditor(repo, ui, state.actions, comment)
2195 else:
2196 else:
2196 rules = _readfile(ui, rules)
2197 rules = _readfile(ui, rules)
2197 actions = parserules(rules, state)
2198 actions = parserules(rules, state)
2198 ctxs = [repo[act.node] for act in state.actions if act.node]
2199 ctxs = [repo[act.node] for act in state.actions if act.node]
2199 warnverifyactions(ui, repo, actions, state, ctxs)
2200 warnverifyactions(ui, repo, actions, state, ctxs)
2200 state.actions = actions
2201 state.actions = actions
2201 state.write()
2202 state.write()
2202
2203
2203
2204
2204 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2205 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2205 outg = opts.get(b'outgoing')
2206 outg = opts.get(b'outgoing')
2206 rules = opts.get(b'commands', b'')
2207 rules = opts.get(b'commands', b'')
2207 force = opts.get(b'force')
2208 force = opts.get(b'force')
2208
2209
2209 cmdutil.checkunfinished(repo)
2210 cmdutil.checkunfinished(repo)
2210 cmdutil.bailifchanged(repo)
2211 cmdutil.bailifchanged(repo)
2211
2212
2212 topmost = repo.dirstate.p1()
2213 topmost = repo.dirstate.p1()
2213 if outg:
2214 if outg:
2214 if freeargs:
2215 if freeargs:
2215 remote = freeargs[0]
2216 remote = freeargs[0]
2216 else:
2217 else:
2217 remote = None
2218 remote = None
2218 root = findoutgoing(ui, repo, remote, force, opts)
2219 root = findoutgoing(ui, repo, remote, force, opts)
2219 else:
2220 else:
2220 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
2221 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
2221 if len(rr) != 1:
2222 if len(rr) != 1:
2222 raise error.Abort(
2223 raise error.Abort(
2223 _(
2224 _(
2224 b'The specified revisions must have '
2225 b'The specified revisions must have '
2225 b'exactly one common root'
2226 b'exactly one common root'
2226 )
2227 )
2227 )
2228 )
2228 root = rr[0].node()
2229 root = rr[0].node()
2229
2230
2230 revs = between(repo, root, topmost, state.keep)
2231 revs = between(repo, root, topmost, state.keep)
2231 if not revs:
2232 if not revs:
2232 raise error.Abort(
2233 raise error.Abort(
2233 _(b'%s is not an ancestor of working directory') % short(root)
2234 _(b'%s is not an ancestor of working directory') % short(root)
2234 )
2235 )
2235
2236
2236 ctxs = [repo[r] for r in revs]
2237 ctxs = [repo[r] for r in revs]
2237
2238
2238 wctx = repo[None]
2239 wctx = repo[None]
2239 # Please don't ask me why `ancestors` is this value. I figured it
2240 # Please don't ask me why `ancestors` is this value. I figured it
2240 # out with print-debugging, not by actually understanding what the
2241 # out with print-debugging, not by actually understanding what the
2241 # merge code is doing. :(
2242 # merge code is doing. :(
2242 ancs = [repo[b'.']]
2243 ancs = [repo[b'.']]
2243 # Sniff-test to make sure we won't collide with untracked files in
2244 # Sniff-test to make sure we won't collide with untracked files in
2244 # the working directory. If we don't do this, we can get a
2245 # the working directory. If we don't do this, we can get a
2245 # collision after we've started histedit and backing out gets ugly
2246 # collision after we've started histedit and backing out gets ugly
2246 # for everyone, especially the user.
2247 # for everyone, especially the user.
2247 for c in [ctxs[0].p1()] + ctxs:
2248 for c in [ctxs[0].p1()] + ctxs:
2248 try:
2249 try:
2249 mergemod.calculateupdates(
2250 mergemod.calculateupdates(
2250 repo,
2251 repo,
2251 wctx,
2252 wctx,
2252 c,
2253 c,
2253 ancs,
2254 ancs,
2254 # These parameters were determined by print-debugging
2255 # These parameters were determined by print-debugging
2255 # what happens later on inside histedit.
2256 # what happens later on inside histedit.
2256 branchmerge=False,
2257 branchmerge=False,
2257 force=False,
2258 force=False,
2258 acceptremote=False,
2259 acceptremote=False,
2259 followcopies=False,
2260 followcopies=False,
2260 )
2261 )
2261 except error.Abort:
2262 except error.Abort:
2262 raise error.Abort(
2263 raise error.Abort(
2263 _(
2264 _(
2264 b"untracked files in working directory conflict with files in %s"
2265 b"untracked files in working directory conflict with files in %s"
2265 )
2266 )
2266 % c
2267 % c
2267 )
2268 )
2268
2269
2269 if not rules:
2270 if not rules:
2270 comment = geteditcomment(ui, short(root), short(topmost))
2271 comment = geteditcomment(ui, short(root), short(topmost))
2271 actions = [pick(state, r) for r in revs]
2272 actions = [pick(state, r) for r in revs]
2272 rules = ruleeditor(repo, ui, actions, comment)
2273 rules = ruleeditor(repo, ui, actions, comment)
2273 else:
2274 else:
2274 rules = _readfile(ui, rules)
2275 rules = _readfile(ui, rules)
2275 actions = parserules(rules, state)
2276 actions = parserules(rules, state)
2276 warnverifyactions(ui, repo, actions, state, ctxs)
2277 warnverifyactions(ui, repo, actions, state, ctxs)
2277
2278
2278 parentctxnode = repo[root].p1().node()
2279 parentctxnode = repo[root].p1().node()
2279
2280
2280 state.parentctxnode = parentctxnode
2281 state.parentctxnode = parentctxnode
2281 state.actions = actions
2282 state.actions = actions
2282 state.topmost = topmost
2283 state.topmost = topmost
2283 state.replacements = []
2284 state.replacements = []
2284
2285
2285 ui.log(
2286 ui.log(
2286 b"histedit",
2287 b"histedit",
2287 b"%d actions to histedit\n",
2288 b"%d actions to histedit\n",
2288 len(actions),
2289 len(actions),
2289 histedit_num_actions=len(actions),
2290 histedit_num_actions=len(actions),
2290 )
2291 )
2291
2292
2292 # Create a backup so we can always abort completely.
2293 # Create a backup so we can always abort completely.
2293 backupfile = None
2294 backupfile = None
2294 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2295 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2295 backupfile = repair.backupbundle(
2296 backupfile = repair.backupbundle(
2296 repo, [parentctxnode], [topmost], root, b'histedit'
2297 repo, [parentctxnode], [topmost], root, b'histedit'
2297 )
2298 )
2298 state.backupfile = backupfile
2299 state.backupfile = backupfile
2299
2300
2300
2301
2301 def _getsummary(ctx):
2302 def _getsummary(ctx):
2302 # a common pattern is to extract the summary but default to the empty
2303 # a common pattern is to extract the summary but default to the empty
2303 # string
2304 # string
2304 summary = ctx.description() or b''
2305 summary = ctx.description() or b''
2305 if summary:
2306 if summary:
2306 summary = summary.splitlines()[0]
2307 summary = summary.splitlines()[0]
2307 return summary
2308 return summary
2308
2309
2309
2310
2310 def bootstrapcontinue(ui, state, opts):
2311 def bootstrapcontinue(ui, state, opts):
2311 repo = state.repo
2312 repo = state.repo
2312
2313
2313 ms = mergestatemod.mergestate.read(repo)
2314 ms = mergestatemod.mergestate.read(repo)
2314 mergeutil.checkunresolved(ms)
2315 mergeutil.checkunresolved(ms)
2315
2316
2316 if state.actions:
2317 if state.actions:
2317 actobj = state.actions.pop(0)
2318 actobj = state.actions.pop(0)
2318
2319
2319 if _isdirtywc(repo):
2320 if _isdirtywc(repo):
2320 actobj.continuedirty()
2321 actobj.continuedirty()
2321 if _isdirtywc(repo):
2322 if _isdirtywc(repo):
2322 abortdirty()
2323 abortdirty()
2323
2324
2324 parentctx, replacements = actobj.continueclean()
2325 parentctx, replacements = actobj.continueclean()
2325
2326
2326 state.parentctxnode = parentctx.node()
2327 state.parentctxnode = parentctx.node()
2327 state.replacements.extend(replacements)
2328 state.replacements.extend(replacements)
2328
2329
2329 return state
2330 return state
2330
2331
2331
2332
2332 def between(repo, old, new, keep):
2333 def between(repo, old, new, keep):
2333 """select and validate the set of revision to edit
2334 """select and validate the set of revision to edit
2334
2335
2335 When keep is false, the specified set can't have children."""
2336 When keep is false, the specified set can't have children."""
2336 revs = repo.revs(b'%n::%n', old, new)
2337 revs = repo.revs(b'%n::%n', old, new)
2337 if revs and not keep:
2338 if revs and not keep:
2338 rewriteutil.precheck(repo, revs, b'edit')
2339 rewriteutil.precheck(repo, revs, b'edit')
2339 if repo.revs(b'(%ld) and merge()', revs):
2340 if repo.revs(b'(%ld) and merge()', revs):
2340 raise error.Abort(_(b'cannot edit history that contains merges'))
2341 raise error.Abort(_(b'cannot edit history that contains merges'))
2341 return pycompat.maplist(repo.changelog.node, revs)
2342 return pycompat.maplist(repo.changelog.node, revs)
2342
2343
2343
2344
2344 def ruleeditor(repo, ui, actions, editcomment=b""):
2345 def ruleeditor(repo, ui, actions, editcomment=b""):
2345 """open an editor to edit rules
2346 """open an editor to edit rules
2346
2347
2347 rules are in the format [ [act, ctx], ...] like in state.rules
2348 rules are in the format [ [act, ctx], ...] like in state.rules
2348 """
2349 """
2349 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2350 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2350 newact = util.sortdict()
2351 newact = util.sortdict()
2351 for act in actions:
2352 for act in actions:
2352 ctx = repo[act.node]
2353 ctx = repo[act.node]
2353 summary = _getsummary(ctx)
2354 summary = _getsummary(ctx)
2354 fword = summary.split(b' ', 1)[0].lower()
2355 fword = summary.split(b' ', 1)[0].lower()
2355 added = False
2356 added = False
2356
2357
2357 # if it doesn't end with the special character '!' just skip this
2358 # if it doesn't end with the special character '!' just skip this
2358 if fword.endswith(b'!'):
2359 if fword.endswith(b'!'):
2359 fword = fword[:-1]
2360 fword = fword[:-1]
2360 if fword in primaryactions | secondaryactions | tertiaryactions:
2361 if fword in primaryactions | secondaryactions | tertiaryactions:
2361 act.verb = fword
2362 act.verb = fword
2362 # get the target summary
2363 # get the target summary
2363 tsum = summary[len(fword) + 1 :].lstrip()
2364 tsum = summary[len(fword) + 1 :].lstrip()
2364 # safe but slow: reverse iterate over the actions so we
2365 # safe but slow: reverse iterate over the actions so we
2365 # don't clash on two commits having the same summary
2366 # don't clash on two commits having the same summary
2366 for na, l in reversed(list(pycompat.iteritems(newact))):
2367 for na, l in reversed(list(pycompat.iteritems(newact))):
2367 actx = repo[na.node]
2368 actx = repo[na.node]
2368 asum = _getsummary(actx)
2369 asum = _getsummary(actx)
2369 if asum == tsum:
2370 if asum == tsum:
2370 added = True
2371 added = True
2371 l.append(act)
2372 l.append(act)
2372 break
2373 break
2373
2374
2374 if not added:
2375 if not added:
2375 newact[act] = []
2376 newact[act] = []
2376
2377
2377 # copy over and flatten the new list
2378 # copy over and flatten the new list
2378 actions = []
2379 actions = []
2379 for na, l in pycompat.iteritems(newact):
2380 for na, l in pycompat.iteritems(newact):
2380 actions.append(na)
2381 actions.append(na)
2381 actions += l
2382 actions += l
2382
2383
2383 rules = b'\n'.join([act.torule() for act in actions])
2384 rules = b'\n'.join([act.torule() for act in actions])
2384 rules += b'\n\n'
2385 rules += b'\n\n'
2385 rules += editcomment
2386 rules += editcomment
2386 rules = ui.edit(
2387 rules = ui.edit(
2387 rules,
2388 rules,
2388 ui.username(),
2389 ui.username(),
2389 {b'prefix': b'histedit'},
2390 {b'prefix': b'histedit'},
2390 repopath=repo.path,
2391 repopath=repo.path,
2391 action=b'histedit',
2392 action=b'histedit',
2392 )
2393 )
2393
2394
2394 # Save edit rules in .hg/histedit-last-edit.txt in case
2395 # Save edit rules in .hg/histedit-last-edit.txt in case
2395 # the user needs to ask for help after something
2396 # the user needs to ask for help after something
2396 # surprising happens.
2397 # surprising happens.
2397 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2398 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2398 f.write(rules)
2399 f.write(rules)
2399
2400
2400 return rules
2401 return rules
2401
2402
2402
2403
2403 def parserules(rules, state):
2404 def parserules(rules, state):
2404 """Read the histedit rules string and return list of action objects """
2405 """Read the histedit rules string and return list of action objects """
2405 rules = [
2406 rules = [
2406 l
2407 l
2407 for l in (r.strip() for r in rules.splitlines())
2408 for l in (r.strip() for r in rules.splitlines())
2408 if l and not l.startswith(b'#')
2409 if l and not l.startswith(b'#')
2409 ]
2410 ]
2410 actions = []
2411 actions = []
2411 for r in rules:
2412 for r in rules:
2412 if b' ' not in r:
2413 if b' ' not in r:
2413 raise error.ParseError(_(b'malformed line "%s"') % r)
2414 raise error.ParseError(_(b'malformed line "%s"') % r)
2414 verb, rest = r.split(b' ', 1)
2415 verb, rest = r.split(b' ', 1)
2415
2416
2416 if verb not in actiontable:
2417 if verb not in actiontable:
2417 raise error.ParseError(_(b'unknown action "%s"') % verb)
2418 raise error.ParseError(_(b'unknown action "%s"') % verb)
2418
2419
2419 action = actiontable[verb].fromrule(state, rest)
2420 action = actiontable[verb].fromrule(state, rest)
2420 actions.append(action)
2421 actions.append(action)
2421 return actions
2422 return actions
2422
2423
2423
2424
2424 def warnverifyactions(ui, repo, actions, state, ctxs):
2425 def warnverifyactions(ui, repo, actions, state, ctxs):
2425 try:
2426 try:
2426 verifyactions(actions, state, ctxs)
2427 verifyactions(actions, state, ctxs)
2427 except error.ParseError:
2428 except error.ParseError:
2428 if repo.vfs.exists(b'histedit-last-edit.txt'):
2429 if repo.vfs.exists(b'histedit-last-edit.txt'):
2429 ui.warn(
2430 ui.warn(
2430 _(
2431 _(
2431 b'warning: histedit rules saved '
2432 b'warning: histedit rules saved '
2432 b'to: .hg/histedit-last-edit.txt\n'
2433 b'to: .hg/histedit-last-edit.txt\n'
2433 )
2434 )
2434 )
2435 )
2435 raise
2436 raise
2436
2437
2437
2438
2438 def verifyactions(actions, state, ctxs):
2439 def verifyactions(actions, state, ctxs):
2439 """Verify that there exists exactly one action per given changeset and
2440 """Verify that there exists exactly one action per given changeset and
2440 other constraints.
2441 other constraints.
2441
2442
2442 Will abort if there are to many or too few rules, a malformed rule,
2443 Will abort if there are to many or too few rules, a malformed rule,
2443 or a rule on a changeset outside of the user-given range.
2444 or a rule on a changeset outside of the user-given range.
2444 """
2445 """
2445 expected = {c.node() for c in ctxs}
2446 expected = {c.node() for c in ctxs}
2446 seen = set()
2447 seen = set()
2447 prev = None
2448 prev = None
2448
2449
2449 if actions and actions[0].verb in [b'roll', b'fold']:
2450 if actions and actions[0].verb in [b'roll', b'fold']:
2450 raise error.ParseError(
2451 raise error.ParseError(
2451 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2452 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2452 )
2453 )
2453
2454
2454 for action in actions:
2455 for action in actions:
2455 action.verify(prev, expected, seen)
2456 action.verify(prev, expected, seen)
2456 prev = action
2457 prev = action
2457 if action.node is not None:
2458 if action.node is not None:
2458 seen.add(action.node)
2459 seen.add(action.node)
2459 missing = sorted(expected - seen) # sort to stabilize output
2460 missing = sorted(expected - seen) # sort to stabilize output
2460
2461
2461 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2462 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2462 if len(actions) == 0:
2463 if len(actions) == 0:
2463 raise error.ParseError(
2464 raise error.ParseError(
2464 _(b'no rules provided'),
2465 _(b'no rules provided'),
2465 hint=_(b'use strip extension to remove commits'),
2466 hint=_(b'use strip extension to remove commits'),
2466 )
2467 )
2467
2468
2468 drops = [drop(state, n) for n in missing]
2469 drops = [drop(state, n) for n in missing]
2469 # put the in the beginning so they execute immediately and
2470 # put the in the beginning so they execute immediately and
2470 # don't show in the edit-plan in the future
2471 # don't show in the edit-plan in the future
2471 actions[:0] = drops
2472 actions[:0] = drops
2472 elif missing:
2473 elif missing:
2473 raise error.ParseError(
2474 raise error.ParseError(
2474 _(b'missing rules for changeset %s') % short(missing[0]),
2475 _(b'missing rules for changeset %s') % short(missing[0]),
2475 hint=_(
2476 hint=_(
2476 b'use "drop %s" to discard, see also: '
2477 b'use "drop %s" to discard, see also: '
2477 b"'hg help -e histedit.config'"
2478 b"'hg help -e histedit.config'"
2478 )
2479 )
2479 % short(missing[0]),
2480 % short(missing[0]),
2480 )
2481 )
2481
2482
2482
2483
2483 def adjustreplacementsfrommarkers(repo, oldreplacements):
2484 def adjustreplacementsfrommarkers(repo, oldreplacements):
2484 """Adjust replacements from obsolescence markers
2485 """Adjust replacements from obsolescence markers
2485
2486
2486 Replacements structure is originally generated based on
2487 Replacements structure is originally generated based on
2487 histedit's state and does not account for changes that are
2488 histedit's state and does not account for changes that are
2488 not recorded there. This function fixes that by adding
2489 not recorded there. This function fixes that by adding
2489 data read from obsolescence markers"""
2490 data read from obsolescence markers"""
2490 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2491 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2491 return oldreplacements
2492 return oldreplacements
2492
2493
2493 unfi = repo.unfiltered()
2494 unfi = repo.unfiltered()
2494 get_rev = unfi.changelog.index.get_rev
2495 get_rev = unfi.changelog.index.get_rev
2495 obsstore = repo.obsstore
2496 obsstore = repo.obsstore
2496 newreplacements = list(oldreplacements)
2497 newreplacements = list(oldreplacements)
2497 oldsuccs = [r[1] for r in oldreplacements]
2498 oldsuccs = [r[1] for r in oldreplacements]
2498 # successors that have already been added to succstocheck once
2499 # successors that have already been added to succstocheck once
2499 seensuccs = set().union(
2500 seensuccs = set().union(
2500 *oldsuccs
2501 *oldsuccs
2501 ) # create a set from an iterable of tuples
2502 ) # create a set from an iterable of tuples
2502 succstocheck = list(seensuccs)
2503 succstocheck = list(seensuccs)
2503 while succstocheck:
2504 while succstocheck:
2504 n = succstocheck.pop()
2505 n = succstocheck.pop()
2505 missing = get_rev(n) is None
2506 missing = get_rev(n) is None
2506 markers = obsstore.successors.get(n, ())
2507 markers = obsstore.successors.get(n, ())
2507 if missing and not markers:
2508 if missing and not markers:
2508 # dead end, mark it as such
2509 # dead end, mark it as such
2509 newreplacements.append((n, ()))
2510 newreplacements.append((n, ()))
2510 for marker in markers:
2511 for marker in markers:
2511 nsuccs = marker[1]
2512 nsuccs = marker[1]
2512 newreplacements.append((n, nsuccs))
2513 newreplacements.append((n, nsuccs))
2513 for nsucc in nsuccs:
2514 for nsucc in nsuccs:
2514 if nsucc not in seensuccs:
2515 if nsucc not in seensuccs:
2515 seensuccs.add(nsucc)
2516 seensuccs.add(nsucc)
2516 succstocheck.append(nsucc)
2517 succstocheck.append(nsucc)
2517
2518
2518 return newreplacements
2519 return newreplacements
2519
2520
2520
2521
2521 def processreplacement(state):
2522 def processreplacement(state):
2522 """process the list of replacements to return
2523 """process the list of replacements to return
2523
2524
2524 1) the final mapping between original and created nodes
2525 1) the final mapping between original and created nodes
2525 2) the list of temporary node created by histedit
2526 2) the list of temporary node created by histedit
2526 3) the list of new commit created by histedit"""
2527 3) the list of new commit created by histedit"""
2527 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2528 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2528 allsuccs = set()
2529 allsuccs = set()
2529 replaced = set()
2530 replaced = set()
2530 fullmapping = {}
2531 fullmapping = {}
2531 # initialize basic set
2532 # initialize basic set
2532 # fullmapping records all operations recorded in replacement
2533 # fullmapping records all operations recorded in replacement
2533 for rep in replacements:
2534 for rep in replacements:
2534 allsuccs.update(rep[1])
2535 allsuccs.update(rep[1])
2535 replaced.add(rep[0])
2536 replaced.add(rep[0])
2536 fullmapping.setdefault(rep[0], set()).update(rep[1])
2537 fullmapping.setdefault(rep[0], set()).update(rep[1])
2537 new = allsuccs - replaced
2538 new = allsuccs - replaced
2538 tmpnodes = allsuccs & replaced
2539 tmpnodes = allsuccs & replaced
2539 # Reduce content fullmapping into direct relation between original nodes
2540 # Reduce content fullmapping into direct relation between original nodes
2540 # and final node created during history edition
2541 # and final node created during history edition
2541 # Dropped changeset are replaced by an empty list
2542 # Dropped changeset are replaced by an empty list
2542 toproceed = set(fullmapping)
2543 toproceed = set(fullmapping)
2543 final = {}
2544 final = {}
2544 while toproceed:
2545 while toproceed:
2545 for x in list(toproceed):
2546 for x in list(toproceed):
2546 succs = fullmapping[x]
2547 succs = fullmapping[x]
2547 for s in list(succs):
2548 for s in list(succs):
2548 if s in toproceed:
2549 if s in toproceed:
2549 # non final node with unknown closure
2550 # non final node with unknown closure
2550 # We can't process this now
2551 # We can't process this now
2551 break
2552 break
2552 elif s in final:
2553 elif s in final:
2553 # non final node, replace with closure
2554 # non final node, replace with closure
2554 succs.remove(s)
2555 succs.remove(s)
2555 succs.update(final[s])
2556 succs.update(final[s])
2556 else:
2557 else:
2557 final[x] = succs
2558 final[x] = succs
2558 toproceed.remove(x)
2559 toproceed.remove(x)
2559 # remove tmpnodes from final mapping
2560 # remove tmpnodes from final mapping
2560 for n in tmpnodes:
2561 for n in tmpnodes:
2561 del final[n]
2562 del final[n]
2562 # we expect all changes involved in final to exist in the repo
2563 # we expect all changes involved in final to exist in the repo
2563 # turn `final` into list (topologically sorted)
2564 # turn `final` into list (topologically sorted)
2564 get_rev = state.repo.changelog.index.get_rev
2565 get_rev = state.repo.changelog.index.get_rev
2565 for prec, succs in final.items():
2566 for prec, succs in final.items():
2566 final[prec] = sorted(succs, key=get_rev)
2567 final[prec] = sorted(succs, key=get_rev)
2567
2568
2568 # computed topmost element (necessary for bookmark)
2569 # computed topmost element (necessary for bookmark)
2569 if new:
2570 if new:
2570 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2571 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2571 elif not final:
2572 elif not final:
2572 # Nothing rewritten at all. we won't need `newtopmost`
2573 # Nothing rewritten at all. we won't need `newtopmost`
2573 # It is the same as `oldtopmost` and `processreplacement` know it
2574 # It is the same as `oldtopmost` and `processreplacement` know it
2574 newtopmost = None
2575 newtopmost = None
2575 else:
2576 else:
2576 # every body died. The newtopmost is the parent of the root.
2577 # every body died. The newtopmost is the parent of the root.
2577 r = state.repo.changelog.rev
2578 r = state.repo.changelog.rev
2578 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2579 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2579
2580
2580 return final, tmpnodes, new, newtopmost
2581 return final, tmpnodes, new, newtopmost
2581
2582
2582
2583
2583 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2584 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2584 """Move bookmark from oldtopmost to newly created topmost
2585 """Move bookmark from oldtopmost to newly created topmost
2585
2586
2586 This is arguably a feature and we may only want that for the active
2587 This is arguably a feature and we may only want that for the active
2587 bookmark. But the behavior is kept compatible with the old version for now.
2588 bookmark. But the behavior is kept compatible with the old version for now.
2588 """
2589 """
2589 if not oldtopmost or not newtopmost:
2590 if not oldtopmost or not newtopmost:
2590 return
2591 return
2591 oldbmarks = repo.nodebookmarks(oldtopmost)
2592 oldbmarks = repo.nodebookmarks(oldtopmost)
2592 if oldbmarks:
2593 if oldbmarks:
2593 with repo.lock(), repo.transaction(b'histedit') as tr:
2594 with repo.lock(), repo.transaction(b'histedit') as tr:
2594 marks = repo._bookmarks
2595 marks = repo._bookmarks
2595 changes = []
2596 changes = []
2596 for name in oldbmarks:
2597 for name in oldbmarks:
2597 changes.append((name, newtopmost))
2598 changes.append((name, newtopmost))
2598 marks.applychanges(repo, tr, changes)
2599 marks.applychanges(repo, tr, changes)
2599
2600
2600
2601
2601 def cleanupnode(ui, repo, nodes, nobackup=False):
2602 def cleanupnode(ui, repo, nodes, nobackup=False):
2602 """strip a group of nodes from the repository
2603 """strip a group of nodes from the repository
2603
2604
2604 The set of node to strip may contains unknown nodes."""
2605 The set of node to strip may contains unknown nodes."""
2605 with repo.lock():
2606 with repo.lock():
2606 # do not let filtering get in the way of the cleanse
2607 # do not let filtering get in the way of the cleanse
2607 # we should probably get rid of obsolescence marker created during the
2608 # we should probably get rid of obsolescence marker created during the
2608 # histedit, but we currently do not have such information.
2609 # histedit, but we currently do not have such information.
2609 repo = repo.unfiltered()
2610 repo = repo.unfiltered()
2610 # Find all nodes that need to be stripped
2611 # Find all nodes that need to be stripped
2611 # (we use %lr instead of %ln to silently ignore unknown items)
2612 # (we use %lr instead of %ln to silently ignore unknown items)
2612 has_node = repo.changelog.index.has_node
2613 has_node = repo.changelog.index.has_node
2613 nodes = sorted(n for n in nodes if has_node(n))
2614 nodes = sorted(n for n in nodes if has_node(n))
2614 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2615 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2615 if roots:
2616 if roots:
2616 backup = not nobackup
2617 backup = not nobackup
2617 repair.strip(ui, repo, roots, backup=backup)
2618 repair.strip(ui, repo, roots, backup=backup)
2618
2619
2619
2620
2620 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2621 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2621 if isinstance(nodelist, bytes):
2622 if isinstance(nodelist, bytes):
2622 nodelist = [nodelist]
2623 nodelist = [nodelist]
2623 state = histeditstate(repo)
2624 state = histeditstate(repo)
2624 if state.inprogress():
2625 if state.inprogress():
2625 state.read()
2626 state.read()
2626 histedit_nodes = {
2627 histedit_nodes = {
2627 action.node for action in state.actions if action.node
2628 action.node for action in state.actions if action.node
2628 }
2629 }
2629 common_nodes = histedit_nodes & set(nodelist)
2630 common_nodes = histedit_nodes & set(nodelist)
2630 if common_nodes:
2631 if common_nodes:
2631 raise error.Abort(
2632 raise error.Abort(
2632 _(b"histedit in progress, can't strip %s")
2633 _(b"histedit in progress, can't strip %s")
2633 % b', '.join(short(x) for x in common_nodes)
2634 % b', '.join(short(x) for x in common_nodes)
2634 )
2635 )
2635 return orig(ui, repo, nodelist, *args, **kwargs)
2636 return orig(ui, repo, nodelist, *args, **kwargs)
2636
2637
2637
2638
2638 extensions.wrapfunction(repair, b'strip', stripwrapper)
2639 extensions.wrapfunction(repair, b'strip', stripwrapper)
2639
2640
2640
2641
2641 def summaryhook(ui, repo):
2642 def summaryhook(ui, repo):
2642 state = histeditstate(repo)
2643 state = histeditstate(repo)
2643 if not state.inprogress():
2644 if not state.inprogress():
2644 return
2645 return
2645 state.read()
2646 state.read()
2646 if state.actions:
2647 if state.actions:
2647 # i18n: column positioning for "hg summary"
2648 # i18n: column positioning for "hg summary"
2648 ui.write(
2649 ui.write(
2649 _(b'hist: %s (histedit --continue)\n')
2650 _(b'hist: %s (histedit --continue)\n')
2650 % (
2651 % (
2651 ui.label(_(b'%d remaining'), b'histedit.remaining')
2652 ui.label(_(b'%d remaining'), b'histedit.remaining')
2652 % len(state.actions)
2653 % len(state.actions)
2653 )
2654 )
2654 )
2655 )
2655
2656
2656
2657
2657 def extsetup(ui):
2658 def extsetup(ui):
2658 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2659 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2659 statemod.addunfinished(
2660 statemod.addunfinished(
2660 b'histedit',
2661 b'histedit',
2661 fname=b'histedit-state',
2662 fname=b'histedit-state',
2662 allowcommit=True,
2663 allowcommit=True,
2663 continueflag=True,
2664 continueflag=True,
2664 abortfunc=hgaborthistedit,
2665 abortfunc=hgaborthistedit,
2665 )
2666 )
@@ -1,179 +1,182 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''base class for store implementations and store-related utility code'''
9 '''base class for store implementations and store-related utility code'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 from mercurial import node, util
14 from mercurial import node, util
15 from mercurial.utils import (
16 urlutil,
17 )
15
18
16 from . import lfutil
19 from . import lfutil
17
20
18
21
19 class StoreError(Exception):
22 class StoreError(Exception):
20 """Raised when there is a problem getting files from or putting
23 """Raised when there is a problem getting files from or putting
21 files to a central store."""
24 files to a central store."""
22
25
23 def __init__(self, filename, hash, url, detail):
26 def __init__(self, filename, hash, url, detail):
24 self.filename = filename
27 self.filename = filename
25 self.hash = hash
28 self.hash = hash
26 self.url = url
29 self.url = url
27 self.detail = detail
30 self.detail = detail
28
31
29 def longmessage(self):
32 def longmessage(self):
30 return _(b"error getting id %s from url %s for file %s: %s\n") % (
33 return _(b"error getting id %s from url %s for file %s: %s\n") % (
31 self.hash,
34 self.hash,
32 util.hidepassword(self.url),
35 urlutil.hidepassword(self.url),
33 self.filename,
36 self.filename,
34 self.detail,
37 self.detail,
35 )
38 )
36
39
37 def __str__(self):
40 def __str__(self):
38 return b"%s: %s" % (util.hidepassword(self.url), self.detail)
41 return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail)
39
42
40
43
41 class basestore(object):
44 class basestore(object):
42 def __init__(self, ui, repo, url):
45 def __init__(self, ui, repo, url):
43 self.ui = ui
46 self.ui = ui
44 self.repo = repo
47 self.repo = repo
45 self.url = url
48 self.url = url
46
49
47 def put(self, source, hash):
50 def put(self, source, hash):
48 '''Put source file into the store so it can be retrieved by hash.'''
51 '''Put source file into the store so it can be retrieved by hash.'''
49 raise NotImplementedError(b'abstract method')
52 raise NotImplementedError(b'abstract method')
50
53
51 def exists(self, hashes):
54 def exists(self, hashes):
52 """Check to see if the store contains the given hashes. Given an
55 """Check to see if the store contains the given hashes. Given an
53 iterable of hashes it returns a mapping from hash to bool."""
56 iterable of hashes it returns a mapping from hash to bool."""
54 raise NotImplementedError(b'abstract method')
57 raise NotImplementedError(b'abstract method')
55
58
56 def get(self, files):
59 def get(self, files):
57 """Get the specified largefiles from the store and write to local
60 """Get the specified largefiles from the store and write to local
58 files under repo.root. files is a list of (filename, hash)
61 files under repo.root. files is a list of (filename, hash)
59 tuples. Return (success, missing), lists of files successfully
62 tuples. Return (success, missing), lists of files successfully
60 downloaded and those not found in the store. success is a list
63 downloaded and those not found in the store. success is a list
61 of (filename, hash) tuples; missing is a list of filenames that
64 of (filename, hash) tuples; missing is a list of filenames that
62 we could not get. (The detailed error message will already have
65 we could not get. (The detailed error message will already have
63 been presented to the user, so missing is just supplied as a
66 been presented to the user, so missing is just supplied as a
64 summary.)"""
67 summary.)"""
65 success = []
68 success = []
66 missing = []
69 missing = []
67 ui = self.ui
70 ui = self.ui
68
71
69 at = 0
72 at = 0
70 available = self.exists({hash for (_filename, hash) in files})
73 available = self.exists({hash for (_filename, hash) in files})
71 with ui.makeprogress(
74 with ui.makeprogress(
72 _(b'getting largefiles'), unit=_(b'files'), total=len(files)
75 _(b'getting largefiles'), unit=_(b'files'), total=len(files)
73 ) as progress:
76 ) as progress:
74 for filename, hash in files:
77 for filename, hash in files:
75 progress.update(at)
78 progress.update(at)
76 at += 1
79 at += 1
77 ui.note(_(b'getting %s:%s\n') % (filename, hash))
80 ui.note(_(b'getting %s:%s\n') % (filename, hash))
78
81
79 if not available.get(hash):
82 if not available.get(hash):
80 ui.warn(
83 ui.warn(
81 _(b'%s: largefile %s not available from %s\n')
84 _(b'%s: largefile %s not available from %s\n')
82 % (filename, hash, util.hidepassword(self.url))
85 % (filename, hash, urlutil.hidepassword(self.url))
83 )
86 )
84 missing.append(filename)
87 missing.append(filename)
85 continue
88 continue
86
89
87 if self._gethash(filename, hash):
90 if self._gethash(filename, hash):
88 success.append((filename, hash))
91 success.append((filename, hash))
89 else:
92 else:
90 missing.append(filename)
93 missing.append(filename)
91
94
92 return (success, missing)
95 return (success, missing)
93
96
94 def _gethash(self, filename, hash):
97 def _gethash(self, filename, hash):
95 """Get file with the provided hash and store it in the local repo's
98 """Get file with the provided hash and store it in the local repo's
96 store and in the usercache.
99 store and in the usercache.
97 filename is for informational messages only.
100 filename is for informational messages only.
98 """
101 """
99 util.makedirs(lfutil.storepath(self.repo, b''))
102 util.makedirs(lfutil.storepath(self.repo, b''))
100 storefilename = lfutil.storepath(self.repo, hash)
103 storefilename = lfutil.storepath(self.repo, hash)
101
104
102 tmpname = storefilename + b'.tmp'
105 tmpname = storefilename + b'.tmp'
103 with util.atomictempfile(
106 with util.atomictempfile(
104 tmpname, createmode=self.repo.store.createmode
107 tmpname, createmode=self.repo.store.createmode
105 ) as tmpfile:
108 ) as tmpfile:
106 try:
109 try:
107 gothash = self._getfile(tmpfile, filename, hash)
110 gothash = self._getfile(tmpfile, filename, hash)
108 except StoreError as err:
111 except StoreError as err:
109 self.ui.warn(err.longmessage())
112 self.ui.warn(err.longmessage())
110 gothash = b""
113 gothash = b""
111
114
112 if gothash != hash:
115 if gothash != hash:
113 if gothash != b"":
116 if gothash != b"":
114 self.ui.warn(
117 self.ui.warn(
115 _(b'%s: data corruption (expected %s, got %s)\n')
118 _(b'%s: data corruption (expected %s, got %s)\n')
116 % (filename, hash, gothash)
119 % (filename, hash, gothash)
117 )
120 )
118 util.unlink(tmpname)
121 util.unlink(tmpname)
119 return False
122 return False
120
123
121 util.rename(tmpname, storefilename)
124 util.rename(tmpname, storefilename)
122 lfutil.linktousercache(self.repo, hash)
125 lfutil.linktousercache(self.repo, hash)
123 return True
126 return True
124
127
125 def verify(self, revs, contents=False):
128 def verify(self, revs, contents=False):
126 """Verify the existence (and, optionally, contents) of every big
129 """Verify the existence (and, optionally, contents) of every big
127 file revision referenced by every changeset in revs.
130 file revision referenced by every changeset in revs.
128 Return 0 if all is well, non-zero on any errors."""
131 Return 0 if all is well, non-zero on any errors."""
129
132
130 self.ui.status(
133 self.ui.status(
131 _(b'searching %d changesets for largefiles\n') % len(revs)
134 _(b'searching %d changesets for largefiles\n') % len(revs)
132 )
135 )
133 verified = set() # set of (filename, filenode) tuples
136 verified = set() # set of (filename, filenode) tuples
134 filestocheck = [] # list of (cset, filename, expectedhash)
137 filestocheck = [] # list of (cset, filename, expectedhash)
135 for rev in revs:
138 for rev in revs:
136 cctx = self.repo[rev]
139 cctx = self.repo[rev]
137 cset = b"%d:%s" % (cctx.rev(), node.short(cctx.node()))
140 cset = b"%d:%s" % (cctx.rev(), node.short(cctx.node()))
138
141
139 for standin in cctx:
142 for standin in cctx:
140 filename = lfutil.splitstandin(standin)
143 filename = lfutil.splitstandin(standin)
141 if filename:
144 if filename:
142 fctx = cctx[standin]
145 fctx = cctx[standin]
143 key = (filename, fctx.filenode())
146 key = (filename, fctx.filenode())
144 if key not in verified:
147 if key not in verified:
145 verified.add(key)
148 verified.add(key)
146 expectedhash = lfutil.readasstandin(fctx)
149 expectedhash = lfutil.readasstandin(fctx)
147 filestocheck.append((cset, filename, expectedhash))
150 filestocheck.append((cset, filename, expectedhash))
148
151
149 failed = self._verifyfiles(contents, filestocheck)
152 failed = self._verifyfiles(contents, filestocheck)
150
153
151 numrevs = len(verified)
154 numrevs = len(verified)
152 numlfiles = len({fname for (fname, fnode) in verified})
155 numlfiles = len({fname for (fname, fnode) in verified})
153 if contents:
156 if contents:
154 self.ui.status(
157 self.ui.status(
155 _(b'verified contents of %d revisions of %d largefiles\n')
158 _(b'verified contents of %d revisions of %d largefiles\n')
156 % (numrevs, numlfiles)
159 % (numrevs, numlfiles)
157 )
160 )
158 else:
161 else:
159 self.ui.status(
162 self.ui.status(
160 _(b'verified existence of %d revisions of %d largefiles\n')
163 _(b'verified existence of %d revisions of %d largefiles\n')
161 % (numrevs, numlfiles)
164 % (numrevs, numlfiles)
162 )
165 )
163 return int(failed)
166 return int(failed)
164
167
165 def _getfile(self, tmpfile, filename, hash):
168 def _getfile(self, tmpfile, filename, hash):
166 """Fetch one revision of one file from the store and write it
169 """Fetch one revision of one file from the store and write it
167 to tmpfile. Compute the hash of the file on-the-fly as it
170 to tmpfile. Compute the hash of the file on-the-fly as it
168 downloads and return the hash. Close tmpfile. Raise
171 downloads and return the hash. Close tmpfile. Raise
169 StoreError if unable to download the file (e.g. it does not
172 StoreError if unable to download the file (e.g. it does not
170 exist in the store)."""
173 exist in the store)."""
171 raise NotImplementedError(b'abstract method')
174 raise NotImplementedError(b'abstract method')
172
175
173 def _verifyfiles(self, contents, filestocheck):
176 def _verifyfiles(self, contents, filestocheck):
174 """Perform the actual verification of files in the store.
177 """Perform the actual verification of files in the store.
175 'contents' controls verification of content hash.
178 'contents' controls verification of content hash.
176 'filestocheck' is list of files to check.
179 'filestocheck' is list of files to check.
177 Returns _true_ if any problems are found!
180 Returns _true_ if any problems are found!
178 """
181 """
179 raise NotImplementedError(b'abstract method')
182 raise NotImplementedError(b'abstract method')
@@ -1,153 +1,156 b''
1 # Copyright 2010-2011 Fog Creek Software
1 # Copyright 2010-2011 Fog Creek Software
2 # Copyright 2010-2011 Unity Technologies
2 # Copyright 2010-2011 Unity Technologies
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 '''remote largefile store; the base class for wirestore'''
7 '''remote largefile store; the base class for wirestore'''
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11
11
12 from mercurial import (
12 from mercurial import (
13 error,
13 error,
14 pycompat,
14 pycompat,
15 util,
15 util,
16 )
16 )
17
17
18 from mercurial.utils import stringutil
18 from mercurial.utils import (
19 stringutil,
20 urlutil,
21 )
19
22
20 from . import (
23 from . import (
21 basestore,
24 basestore,
22 lfutil,
25 lfutil,
23 localstore,
26 localstore,
24 )
27 )
25
28
26 urlerr = util.urlerr
29 urlerr = util.urlerr
27 urlreq = util.urlreq
30 urlreq = util.urlreq
28
31
29
32
30 class remotestore(basestore.basestore):
33 class remotestore(basestore.basestore):
31 '''a largefile store accessed over a network'''
34 '''a largefile store accessed over a network'''
32
35
33 def __init__(self, ui, repo, url):
36 def __init__(self, ui, repo, url):
34 super(remotestore, self).__init__(ui, repo, url)
37 super(remotestore, self).__init__(ui, repo, url)
35 self._lstore = None
38 self._lstore = None
36 if repo is not None:
39 if repo is not None:
37 self._lstore = localstore.localstore(self.ui, self.repo, self.repo)
40 self._lstore = localstore.localstore(self.ui, self.repo, self.repo)
38
41
39 def put(self, source, hash):
42 def put(self, source, hash):
40 if self.sendfile(source, hash):
43 if self.sendfile(source, hash):
41 raise error.Abort(
44 raise error.Abort(
42 _(b'remotestore: could not put %s to remote store %s')
45 _(b'remotestore: could not put %s to remote store %s')
43 % (source, util.hidepassword(self.url))
46 % (source, urlutil.hidepassword(self.url))
44 )
47 )
45 self.ui.debug(
48 self.ui.debug(
46 _(b'remotestore: put %s to remote store %s\n')
49 _(b'remotestore: put %s to remote store %s\n')
47 % (source, util.hidepassword(self.url))
50 % (source, urlutil.hidepassword(self.url))
48 )
51 )
49
52
50 def exists(self, hashes):
53 def exists(self, hashes):
51 return {
54 return {
52 h: s == 0
55 h: s == 0
53 for (h, s) in pycompat.iteritems(
56 for (h, s) in pycompat.iteritems(
54 self._stat(hashes)
57 self._stat(hashes)
55 ) # dict-from-generator
58 ) # dict-from-generator
56 }
59 }
57
60
58 def sendfile(self, filename, hash):
61 def sendfile(self, filename, hash):
59 self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash))
62 self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash))
60 try:
63 try:
61 with lfutil.httpsendfile(self.ui, filename) as fd:
64 with lfutil.httpsendfile(self.ui, filename) as fd:
62 return self._put(hash, fd)
65 return self._put(hash, fd)
63 except IOError as e:
66 except IOError as e:
64 raise error.Abort(
67 raise error.Abort(
65 _(b'remotestore: could not open file %s: %s')
68 _(b'remotestore: could not open file %s: %s')
66 % (filename, stringutil.forcebytestr(e))
69 % (filename, stringutil.forcebytestr(e))
67 )
70 )
68
71
69 def _getfile(self, tmpfile, filename, hash):
72 def _getfile(self, tmpfile, filename, hash):
70 try:
73 try:
71 chunks = self._get(hash)
74 chunks = self._get(hash)
72 except urlerr.httperror as e:
75 except urlerr.httperror as e:
73 # 401s get converted to error.Aborts; everything else is fine being
76 # 401s get converted to error.Aborts; everything else is fine being
74 # turned into a StoreError
77 # turned into a StoreError
75 raise basestore.StoreError(
78 raise basestore.StoreError(
76 filename, hash, self.url, stringutil.forcebytestr(e)
79 filename, hash, self.url, stringutil.forcebytestr(e)
77 )
80 )
78 except urlerr.urlerror as e:
81 except urlerr.urlerror as e:
79 # This usually indicates a connection problem, so don't
82 # This usually indicates a connection problem, so don't
80 # keep trying with the other files... they will probably
83 # keep trying with the other files... they will probably
81 # all fail too.
84 # all fail too.
82 raise error.Abort(
85 raise error.Abort(
83 b'%s: %s' % (util.hidepassword(self.url), e.reason)
86 b'%s: %s' % (urlutil.hidepassword(self.url), e.reason)
84 )
87 )
85 except IOError as e:
88 except IOError as e:
86 raise basestore.StoreError(
89 raise basestore.StoreError(
87 filename, hash, self.url, stringutil.forcebytestr(e)
90 filename, hash, self.url, stringutil.forcebytestr(e)
88 )
91 )
89
92
90 return lfutil.copyandhash(chunks, tmpfile)
93 return lfutil.copyandhash(chunks, tmpfile)
91
94
92 def _hashesavailablelocally(self, hashes):
95 def _hashesavailablelocally(self, hashes):
93 existslocallymap = self._lstore.exists(hashes)
96 existslocallymap = self._lstore.exists(hashes)
94 localhashes = [hash for hash in hashes if existslocallymap[hash]]
97 localhashes = [hash for hash in hashes if existslocallymap[hash]]
95 return localhashes
98 return localhashes
96
99
97 def _verifyfiles(self, contents, filestocheck):
100 def _verifyfiles(self, contents, filestocheck):
98 failed = False
101 failed = False
99 expectedhashes = [
102 expectedhashes = [
100 expectedhash for cset, filename, expectedhash in filestocheck
103 expectedhash for cset, filename, expectedhash in filestocheck
101 ]
104 ]
102 localhashes = self._hashesavailablelocally(expectedhashes)
105 localhashes = self._hashesavailablelocally(expectedhashes)
103 stats = self._stat(
106 stats = self._stat(
104 [
107 [
105 expectedhash
108 expectedhash
106 for expectedhash in expectedhashes
109 for expectedhash in expectedhashes
107 if expectedhash not in localhashes
110 if expectedhash not in localhashes
108 ]
111 ]
109 )
112 )
110
113
111 for cset, filename, expectedhash in filestocheck:
114 for cset, filename, expectedhash in filestocheck:
112 if expectedhash in localhashes:
115 if expectedhash in localhashes:
113 filetocheck = (cset, filename, expectedhash)
116 filetocheck = (cset, filename, expectedhash)
114 verifyresult = self._lstore._verifyfiles(
117 verifyresult = self._lstore._verifyfiles(
115 contents, [filetocheck]
118 contents, [filetocheck]
116 )
119 )
117 if verifyresult:
120 if verifyresult:
118 failed = True
121 failed = True
119 else:
122 else:
120 stat = stats[expectedhash]
123 stat = stats[expectedhash]
121 if stat:
124 if stat:
122 if stat == 1:
125 if stat == 1:
123 self.ui.warn(
126 self.ui.warn(
124 _(b'changeset %s: %s: contents differ\n')
127 _(b'changeset %s: %s: contents differ\n')
125 % (cset, filename)
128 % (cset, filename)
126 )
129 )
127 failed = True
130 failed = True
128 elif stat == 2:
131 elif stat == 2:
129 self.ui.warn(
132 self.ui.warn(
130 _(b'changeset %s: %s missing\n') % (cset, filename)
133 _(b'changeset %s: %s missing\n') % (cset, filename)
131 )
134 )
132 failed = True
135 failed = True
133 else:
136 else:
134 raise RuntimeError(
137 raise RuntimeError(
135 b'verify failed: unexpected response '
138 b'verify failed: unexpected response '
136 b'from statlfile (%r)' % stat
139 b'from statlfile (%r)' % stat
137 )
140 )
138 return failed
141 return failed
139
142
140 def _put(self, hash, fd):
143 def _put(self, hash, fd):
141 '''Put file with the given hash in the remote store.'''
144 '''Put file with the given hash in the remote store.'''
142 raise NotImplementedError(b'abstract method')
145 raise NotImplementedError(b'abstract method')
143
146
144 def _get(self, hash):
147 def _get(self, hash):
145 '''Get a iterator for content with the given hash.'''
148 '''Get a iterator for content with the given hash.'''
146 raise NotImplementedError(b'abstract method')
149 raise NotImplementedError(b'abstract method')
147
150
148 def _stat(self, hashes):
151 def _stat(self, hashes):
149 """Get information about availability of files specified by
152 """Get information about availability of files specified by
150 hashes in the remote store. Return dictionary mapping hashes
153 hashes in the remote store. Return dictionary mapping hashes
151 to return code where 0 means that file is available, other
154 to return code where 0 means that file is available, other
152 values if not."""
155 values if not."""
153 raise NotImplementedError(b'abstract method')
156 raise NotImplementedError(b'abstract method')
@@ -1,89 +1,92 b''
1 # This software may be used and distributed according to the terms of the
1 # This software may be used and distributed according to the terms of the
2 # GNU General Public License version 2 or any later version.
2 # GNU General Public License version 2 or any later version.
3
3
4 from __future__ import absolute_import
4 from __future__ import absolute_import
5
5
6 import re
6 import re
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial.pycompat import getattr
9 from mercurial.pycompat import getattr
10 from mercurial import (
10 from mercurial import (
11 error,
11 error,
12 hg,
12 hg,
13 util,
13 util,
14 )
14 )
15 from mercurial.utils import (
16 urlutil,
17 )
15
18
16 from . import (
19 from . import (
17 lfutil,
20 lfutil,
18 localstore,
21 localstore,
19 wirestore,
22 wirestore,
20 )
23 )
21
24
22 # During clone this function is passed the src's ui object
25 # During clone this function is passed the src's ui object
23 # but it needs the dest's ui object so it can read out of
26 # but it needs the dest's ui object so it can read out of
24 # the config file. Use repo.ui instead.
27 # the config file. Use repo.ui instead.
25 def openstore(repo=None, remote=None, put=False, ui=None):
28 def openstore(repo=None, remote=None, put=False, ui=None):
26 if ui is None:
29 if ui is None:
27 ui = repo.ui
30 ui = repo.ui
28
31
29 if not remote:
32 if not remote:
30 lfpullsource = getattr(repo, 'lfpullsource', None)
33 lfpullsource = getattr(repo, 'lfpullsource', None)
31 if lfpullsource:
34 if lfpullsource:
32 path = ui.expandpath(lfpullsource)
35 path = ui.expandpath(lfpullsource)
33 elif put:
36 elif put:
34 path = ui.expandpath(b'default-push', b'default')
37 path = ui.expandpath(b'default-push', b'default')
35 else:
38 else:
36 path = ui.expandpath(b'default')
39 path = ui.expandpath(b'default')
37
40
38 # ui.expandpath() leaves 'default-push' and 'default' alone if
41 # ui.expandpath() leaves 'default-push' and 'default' alone if
39 # they cannot be expanded: fallback to the empty string,
42 # they cannot be expanded: fallback to the empty string,
40 # meaning the current directory.
43 # meaning the current directory.
41 if repo is None:
44 if repo is None:
42 path = ui.expandpath(b'default')
45 path = ui.expandpath(b'default')
43 path, _branches = hg.parseurl(path)
46 path, _branches = hg.parseurl(path)
44 remote = hg.peer(repo or ui, {}, path)
47 remote = hg.peer(repo or ui, {}, path)
45 elif path == b'default-push' or path == b'default':
48 elif path == b'default-push' or path == b'default':
46 remote = repo
49 remote = repo
47 else:
50 else:
48 path, _branches = hg.parseurl(path)
51 path, _branches = hg.parseurl(path)
49 remote = hg.peer(repo or ui, {}, path)
52 remote = hg.peer(repo or ui, {}, path)
50
53
51 # The path could be a scheme so use Mercurial's normal functionality
54 # The path could be a scheme so use Mercurial's normal functionality
52 # to resolve the scheme to a repository and use its path
55 # to resolve the scheme to a repository and use its path
53 path = util.safehasattr(remote, b'url') and remote.url() or remote.path
56 path = util.safehasattr(remote, b'url') and remote.url() or remote.path
54
57
55 match = _scheme_re.match(path)
58 match = _scheme_re.match(path)
56 if not match: # regular filesystem path
59 if not match: # regular filesystem path
57 scheme = b'file'
60 scheme = b'file'
58 else:
61 else:
59 scheme = match.group(1)
62 scheme = match.group(1)
60
63
61 try:
64 try:
62 storeproviders = _storeprovider[scheme]
65 storeproviders = _storeprovider[scheme]
63 except KeyError:
66 except KeyError:
64 raise error.Abort(_(b'unsupported URL scheme %r') % scheme)
67 raise error.Abort(_(b'unsupported URL scheme %r') % scheme)
65
68
66 for classobj in storeproviders:
69 for classobj in storeproviders:
67 try:
70 try:
68 return classobj(ui, repo, remote)
71 return classobj(ui, repo, remote)
69 except lfutil.storeprotonotcapable:
72 except lfutil.storeprotonotcapable:
70 pass
73 pass
71
74
72 raise error.Abort(
75 raise error.Abort(
73 _(b'%s does not appear to be a largefile store')
76 _(b'%s does not appear to be a largefile store')
74 % util.hidepassword(path)
77 % urlutil.hidepassword(path)
75 )
78 )
76
79
77
80
78 _storeprovider = {
81 _storeprovider = {
79 b'file': [localstore.localstore],
82 b'file': [localstore.localstore],
80 b'http': [wirestore.wirestore],
83 b'http': [wirestore.wirestore],
81 b'https': [wirestore.wirestore],
84 b'https': [wirestore.wirestore],
82 b'ssh': [wirestore.wirestore],
85 b'ssh': [wirestore.wirestore],
83 }
86 }
84
87
85 _scheme_re = re.compile(br'^([a-zA-Z0-9+-.]+)://')
88 _scheme_re = re.compile(br'^([a-zA-Z0-9+-.]+)://')
86
89
87
90
88 def getlfile(ui, hash):
91 def getlfile(ui, hash):
89 return util.chunkbuffer(openstore(ui=ui)._get(hash))
92 return util.chunkbuffer(openstore(ui=ui)._get(hash))
@@ -1,767 +1,770 b''
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import contextlib
10 import contextlib
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import json
13 import json
14 import os
14 import os
15 import re
15 import re
16 import socket
16 import socket
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.pycompat import getattr
19 from mercurial.pycompat import getattr
20 from mercurial.node import hex
20 from mercurial.node import hex
21
21
22 from mercurial import (
22 from mercurial import (
23 encoding,
23 encoding,
24 error,
24 error,
25 httpconnection as httpconnectionmod,
25 httpconnection as httpconnectionmod,
26 pathutil,
26 pathutil,
27 pycompat,
27 pycompat,
28 url as urlmod,
28 url as urlmod,
29 util,
29 util,
30 vfs as vfsmod,
30 vfs as vfsmod,
31 worker,
31 worker,
32 )
32 )
33
33
34 from mercurial.utils import stringutil
34 from mercurial.utils import (
35 stringutil,
36 urlutil,
37 )
35
38
36 from ..largefiles import lfutil
39 from ..largefiles import lfutil
37
40
38 # 64 bytes for SHA256
41 # 64 bytes for SHA256
39 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
42 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
40
43
41
44
42 class lfsvfs(vfsmod.vfs):
45 class lfsvfs(vfsmod.vfs):
43 def join(self, path):
46 def join(self, path):
44 """split the path at first two characters, like: XX/XXXXX..."""
47 """split the path at first two characters, like: XX/XXXXX..."""
45 if not _lfsre.match(path):
48 if not _lfsre.match(path):
46 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
49 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
47 return super(lfsvfs, self).join(path[0:2], path[2:])
50 return super(lfsvfs, self).join(path[0:2], path[2:])
48
51
49 def walk(self, path=None, onerror=None):
52 def walk(self, path=None, onerror=None):
50 """Yield (dirpath, [], oids) tuple for blobs under path
53 """Yield (dirpath, [], oids) tuple for blobs under path
51
54
52 Oids only exist in the root of this vfs, so dirpath is always ''.
55 Oids only exist in the root of this vfs, so dirpath is always ''.
53 """
56 """
54 root = os.path.normpath(self.base)
57 root = os.path.normpath(self.base)
55 # when dirpath == root, dirpath[prefixlen:] becomes empty
58 # when dirpath == root, dirpath[prefixlen:] becomes empty
56 # because len(dirpath) < prefixlen.
59 # because len(dirpath) < prefixlen.
57 prefixlen = len(pathutil.normasprefix(root))
60 prefixlen = len(pathutil.normasprefix(root))
58 oids = []
61 oids = []
59
62
60 for dirpath, dirs, files in os.walk(
63 for dirpath, dirs, files in os.walk(
61 self.reljoin(self.base, path or b''), onerror=onerror
64 self.reljoin(self.base, path or b''), onerror=onerror
62 ):
65 ):
63 dirpath = dirpath[prefixlen:]
66 dirpath = dirpath[prefixlen:]
64
67
65 # Silently skip unexpected files and directories
68 # Silently skip unexpected files and directories
66 if len(dirpath) == 2:
69 if len(dirpath) == 2:
67 oids.extend(
70 oids.extend(
68 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
71 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
69 )
72 )
70
73
71 yield (b'', [], oids)
74 yield (b'', [], oids)
72
75
73
76
74 class nullvfs(lfsvfs):
77 class nullvfs(lfsvfs):
75 def __init__(self):
78 def __init__(self):
76 pass
79 pass
77
80
78 def exists(self, oid):
81 def exists(self, oid):
79 return False
82 return False
80
83
81 def read(self, oid):
84 def read(self, oid):
82 # store.read() calls into here if the blob doesn't exist in its
85 # store.read() calls into here if the blob doesn't exist in its
83 # self.vfs. Raise the same error as a normal vfs when asked to read a
86 # self.vfs. Raise the same error as a normal vfs when asked to read a
84 # file that doesn't exist. The only difference is the full file path
87 # file that doesn't exist. The only difference is the full file path
85 # isn't available in the error.
88 # isn't available in the error.
86 raise IOError(
89 raise IOError(
87 errno.ENOENT,
90 errno.ENOENT,
88 pycompat.sysstr(b'%s: No such file or directory' % oid),
91 pycompat.sysstr(b'%s: No such file or directory' % oid),
89 )
92 )
90
93
91 def walk(self, path=None, onerror=None):
94 def walk(self, path=None, onerror=None):
92 return (b'', [], [])
95 return (b'', [], [])
93
96
94 def write(self, oid, data):
97 def write(self, oid, data):
95 pass
98 pass
96
99
97
100
98 class lfsuploadfile(httpconnectionmod.httpsendfile):
101 class lfsuploadfile(httpconnectionmod.httpsendfile):
99 """a file-like object that supports keepalive."""
102 """a file-like object that supports keepalive."""
100
103
101 def __init__(self, ui, filename):
104 def __init__(self, ui, filename):
102 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
105 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
103 self.read = self._data.read
106 self.read = self._data.read
104
107
105 def _makeprogress(self):
108 def _makeprogress(self):
106 return None # progress is handled by the worker client
109 return None # progress is handled by the worker client
107
110
108
111
109 class local(object):
112 class local(object):
110 """Local blobstore for large file contents.
113 """Local blobstore for large file contents.
111
114
112 This blobstore is used both as a cache and as a staging area for large blobs
115 This blobstore is used both as a cache and as a staging area for large blobs
113 to be uploaded to the remote blobstore.
116 to be uploaded to the remote blobstore.
114 """
117 """
115
118
116 def __init__(self, repo):
119 def __init__(self, repo):
117 fullpath = repo.svfs.join(b'lfs/objects')
120 fullpath = repo.svfs.join(b'lfs/objects')
118 self.vfs = lfsvfs(fullpath)
121 self.vfs = lfsvfs(fullpath)
119
122
120 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
123 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
121 self.cachevfs = nullvfs()
124 self.cachevfs = nullvfs()
122 else:
125 else:
123 usercache = lfutil._usercachedir(repo.ui, b'lfs')
126 usercache = lfutil._usercachedir(repo.ui, b'lfs')
124 self.cachevfs = lfsvfs(usercache)
127 self.cachevfs = lfsvfs(usercache)
125 self.ui = repo.ui
128 self.ui = repo.ui
126
129
127 def open(self, oid):
130 def open(self, oid):
128 """Open a read-only file descriptor to the named blob, in either the
131 """Open a read-only file descriptor to the named blob, in either the
129 usercache or the local store."""
132 usercache or the local store."""
130 return open(self.path(oid), 'rb')
133 return open(self.path(oid), 'rb')
131
134
132 def path(self, oid):
135 def path(self, oid):
133 """Build the path for the given blob ``oid``.
136 """Build the path for the given blob ``oid``.
134
137
135 If the blob exists locally, the path may point to either the usercache
138 If the blob exists locally, the path may point to either the usercache
136 or the local store. If it doesn't, it will point to the local store.
139 or the local store. If it doesn't, it will point to the local store.
137 This is meant for situations where existing code that isn't LFS aware
140 This is meant for situations where existing code that isn't LFS aware
138 needs to open a blob. Generally, prefer the ``open`` method on this
141 needs to open a blob. Generally, prefer the ``open`` method on this
139 class.
142 class.
140 """
143 """
141 # The usercache is the most likely place to hold the file. Commit will
144 # The usercache is the most likely place to hold the file. Commit will
142 # write to both it and the local store, as will anything that downloads
145 # write to both it and the local store, as will anything that downloads
143 # the blobs. However, things like clone without an update won't
146 # the blobs. However, things like clone without an update won't
144 # populate the local store. For an init + push of a local clone,
147 # populate the local store. For an init + push of a local clone,
145 # the usercache is the only place it _could_ be. If not present, the
148 # the usercache is the only place it _could_ be. If not present, the
146 # missing file msg here will indicate the local repo, not the usercache.
149 # missing file msg here will indicate the local repo, not the usercache.
147 if self.cachevfs.exists(oid):
150 if self.cachevfs.exists(oid):
148 return self.cachevfs.join(oid)
151 return self.cachevfs.join(oid)
149
152
150 return self.vfs.join(oid)
153 return self.vfs.join(oid)
151
154
152 def download(self, oid, src, content_length):
155 def download(self, oid, src, content_length):
153 """Read the blob from the remote source in chunks, verify the content,
156 """Read the blob from the remote source in chunks, verify the content,
154 and write to this local blobstore."""
157 and write to this local blobstore."""
155 sha256 = hashlib.sha256()
158 sha256 = hashlib.sha256()
156 size = 0
159 size = 0
157
160
158 with self.vfs(oid, b'wb', atomictemp=True) as fp:
161 with self.vfs(oid, b'wb', atomictemp=True) as fp:
159 for chunk in util.filechunkiter(src, size=1048576):
162 for chunk in util.filechunkiter(src, size=1048576):
160 fp.write(chunk)
163 fp.write(chunk)
161 sha256.update(chunk)
164 sha256.update(chunk)
162 size += len(chunk)
165 size += len(chunk)
163
166
164 # If the server advertised a length longer than what we actually
167 # If the server advertised a length longer than what we actually
165 # received, then we should expect that the server crashed while
168 # received, then we should expect that the server crashed while
166 # producing the response (but the server has no way of telling us
169 # producing the response (but the server has no way of telling us
167 # that), and we really don't need to try to write the response to
170 # that), and we really don't need to try to write the response to
168 # the localstore, because it's not going to match the expected.
171 # the localstore, because it's not going to match the expected.
169 if content_length is not None and int(content_length) != size:
172 if content_length is not None and int(content_length) != size:
170 msg = (
173 msg = (
171 b"Response length (%s) does not match Content-Length "
174 b"Response length (%s) does not match Content-Length "
172 b"header (%d): likely server-side crash"
175 b"header (%d): likely server-side crash"
173 )
176 )
174 raise LfsRemoteError(_(msg) % (size, int(content_length)))
177 raise LfsRemoteError(_(msg) % (size, int(content_length)))
175
178
176 realoid = hex(sha256.digest())
179 realoid = hex(sha256.digest())
177 if realoid != oid:
180 if realoid != oid:
178 raise LfsCorruptionError(
181 raise LfsCorruptionError(
179 _(b'corrupt remote lfs object: %s') % oid
182 _(b'corrupt remote lfs object: %s') % oid
180 )
183 )
181
184
182 self._linktousercache(oid)
185 self._linktousercache(oid)
183
186
184 def write(self, oid, data):
187 def write(self, oid, data):
185 """Write blob to local blobstore.
188 """Write blob to local blobstore.
186
189
187 This should only be called from the filelog during a commit or similar.
190 This should only be called from the filelog during a commit or similar.
188 As such, there is no need to verify the data. Imports from a remote
191 As such, there is no need to verify the data. Imports from a remote
189 store must use ``download()`` instead."""
192 store must use ``download()`` instead."""
190 with self.vfs(oid, b'wb', atomictemp=True) as fp:
193 with self.vfs(oid, b'wb', atomictemp=True) as fp:
191 fp.write(data)
194 fp.write(data)
192
195
193 self._linktousercache(oid)
196 self._linktousercache(oid)
194
197
195 def linkfromusercache(self, oid):
198 def linkfromusercache(self, oid):
196 """Link blobs found in the user cache into this store.
199 """Link blobs found in the user cache into this store.
197
200
198 The server module needs to do this when it lets the client know not to
201 The server module needs to do this when it lets the client know not to
199 upload the blob, to ensure it is always available in this store.
202 upload the blob, to ensure it is always available in this store.
200 Normally this is done implicitly when the client reads or writes the
203 Normally this is done implicitly when the client reads or writes the
201 blob, but that doesn't happen when the server tells the client that it
204 blob, but that doesn't happen when the server tells the client that it
202 already has the blob.
205 already has the blob.
203 """
206 """
204 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
207 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
205 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
208 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
206 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
209 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
207
210
208 def _linktousercache(self, oid):
211 def _linktousercache(self, oid):
209 # XXX: should we verify the content of the cache, and hardlink back to
212 # XXX: should we verify the content of the cache, and hardlink back to
210 # the local store on success, but truncate, write and link on failure?
213 # the local store on success, but truncate, write and link on failure?
211 if not self.cachevfs.exists(oid) and not isinstance(
214 if not self.cachevfs.exists(oid) and not isinstance(
212 self.cachevfs, nullvfs
215 self.cachevfs, nullvfs
213 ):
216 ):
214 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
217 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
215 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
218 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
216
219
217 def read(self, oid, verify=True):
220 def read(self, oid, verify=True):
218 """Read blob from local blobstore."""
221 """Read blob from local blobstore."""
219 if not self.vfs.exists(oid):
222 if not self.vfs.exists(oid):
220 blob = self._read(self.cachevfs, oid, verify)
223 blob = self._read(self.cachevfs, oid, verify)
221
224
222 # Even if revlog will verify the content, it needs to be verified
225 # Even if revlog will verify the content, it needs to be verified
223 # now before making the hardlink to avoid propagating corrupt blobs.
226 # now before making the hardlink to avoid propagating corrupt blobs.
224 # Don't abort if corruption is detected, because `hg verify` will
227 # Don't abort if corruption is detected, because `hg verify` will
225 # give more useful info about the corruption- simply don't add the
228 # give more useful info about the corruption- simply don't add the
226 # hardlink.
229 # hardlink.
227 if verify or hex(hashlib.sha256(blob).digest()) == oid:
230 if verify or hex(hashlib.sha256(blob).digest()) == oid:
228 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
231 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
229 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
232 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
230 else:
233 else:
231 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
234 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
232 blob = self._read(self.vfs, oid, verify)
235 blob = self._read(self.vfs, oid, verify)
233 return blob
236 return blob
234
237
235 def _read(self, vfs, oid, verify):
238 def _read(self, vfs, oid, verify):
236 """Read blob (after verifying) from the given store"""
239 """Read blob (after verifying) from the given store"""
237 blob = vfs.read(oid)
240 blob = vfs.read(oid)
238 if verify:
241 if verify:
239 _verify(oid, blob)
242 _verify(oid, blob)
240 return blob
243 return blob
241
244
242 def verify(self, oid):
245 def verify(self, oid):
243 """Indicate whether or not the hash of the underlying file matches its
246 """Indicate whether or not the hash of the underlying file matches its
244 name."""
247 name."""
245 sha256 = hashlib.sha256()
248 sha256 = hashlib.sha256()
246
249
247 with self.open(oid) as fp:
250 with self.open(oid) as fp:
248 for chunk in util.filechunkiter(fp, size=1048576):
251 for chunk in util.filechunkiter(fp, size=1048576):
249 sha256.update(chunk)
252 sha256.update(chunk)
250
253
251 return oid == hex(sha256.digest())
254 return oid == hex(sha256.digest())
252
255
253 def has(self, oid):
256 def has(self, oid):
254 """Returns True if the local blobstore contains the requested blob,
257 """Returns True if the local blobstore contains the requested blob,
255 False otherwise."""
258 False otherwise."""
256 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
259 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
257
260
258
261
259 def _urlerrorreason(urlerror):
262 def _urlerrorreason(urlerror):
260 """Create a friendly message for the given URLError to be used in an
263 """Create a friendly message for the given URLError to be used in an
261 LfsRemoteError message.
264 LfsRemoteError message.
262 """
265 """
263 inst = urlerror
266 inst = urlerror
264
267
265 if isinstance(urlerror.reason, Exception):
268 if isinstance(urlerror.reason, Exception):
266 inst = urlerror.reason
269 inst = urlerror.reason
267
270
268 if util.safehasattr(inst, b'reason'):
271 if util.safehasattr(inst, b'reason'):
269 try: # usually it is in the form (errno, strerror)
272 try: # usually it is in the form (errno, strerror)
270 reason = inst.reason.args[1]
273 reason = inst.reason.args[1]
271 except (AttributeError, IndexError):
274 except (AttributeError, IndexError):
272 # it might be anything, for example a string
275 # it might be anything, for example a string
273 reason = inst.reason
276 reason = inst.reason
274 if isinstance(reason, pycompat.unicode):
277 if isinstance(reason, pycompat.unicode):
275 # SSLError of Python 2.7.9 contains a unicode
278 # SSLError of Python 2.7.9 contains a unicode
276 reason = encoding.unitolocal(reason)
279 reason = encoding.unitolocal(reason)
277 return reason
280 return reason
278 elif getattr(inst, "strerror", None):
281 elif getattr(inst, "strerror", None):
279 return encoding.strtolocal(inst.strerror)
282 return encoding.strtolocal(inst.strerror)
280 else:
283 else:
281 return stringutil.forcebytestr(urlerror)
284 return stringutil.forcebytestr(urlerror)
282
285
283
286
284 class lfsauthhandler(util.urlreq.basehandler):
287 class lfsauthhandler(util.urlreq.basehandler):
285 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
288 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
286
289
287 def http_error_401(self, req, fp, code, msg, headers):
290 def http_error_401(self, req, fp, code, msg, headers):
288 """Enforces that any authentication performed is HTTP Basic
291 """Enforces that any authentication performed is HTTP Basic
289 Authentication. No authentication is also acceptable.
292 Authentication. No authentication is also acceptable.
290 """
293 """
291 authreq = headers.get('www-authenticate', None)
294 authreq = headers.get('www-authenticate', None)
292 if authreq:
295 if authreq:
293 scheme = authreq.split()[0]
296 scheme = authreq.split()[0]
294
297
295 if scheme.lower() != 'basic':
298 if scheme.lower() != 'basic':
296 msg = _(b'the server must support Basic Authentication')
299 msg = _(b'the server must support Basic Authentication')
297 raise util.urlerr.httperror(
300 raise util.urlerr.httperror(
298 req.get_full_url(),
301 req.get_full_url(),
299 code,
302 code,
300 encoding.strfromlocal(msg),
303 encoding.strfromlocal(msg),
301 headers,
304 headers,
302 fp,
305 fp,
303 )
306 )
304 return None
307 return None
305
308
306
309
307 class _gitlfsremote(object):
310 class _gitlfsremote(object):
308 def __init__(self, repo, url):
311 def __init__(self, repo, url):
309 ui = repo.ui
312 ui = repo.ui
310 self.ui = ui
313 self.ui = ui
311 baseurl, authinfo = url.authinfo()
314 baseurl, authinfo = url.authinfo()
312 self.baseurl = baseurl.rstrip(b'/')
315 self.baseurl = baseurl.rstrip(b'/')
313 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
316 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
314 if not useragent:
317 if not useragent:
315 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
318 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
316 self.urlopener = urlmod.opener(ui, authinfo, useragent)
319 self.urlopener = urlmod.opener(ui, authinfo, useragent)
317 self.urlopener.add_handler(lfsauthhandler())
320 self.urlopener.add_handler(lfsauthhandler())
318 self.retry = ui.configint(b'lfs', b'retry')
321 self.retry = ui.configint(b'lfs', b'retry')
319
322
320 def writebatch(self, pointers, fromstore):
323 def writebatch(self, pointers, fromstore):
321 """Batch upload from local to remote blobstore."""
324 """Batch upload from local to remote blobstore."""
322 self._batch(_deduplicate(pointers), fromstore, b'upload')
325 self._batch(_deduplicate(pointers), fromstore, b'upload')
323
326
324 def readbatch(self, pointers, tostore):
327 def readbatch(self, pointers, tostore):
325 """Batch download from remote to local blostore."""
328 """Batch download from remote to local blostore."""
326 self._batch(_deduplicate(pointers), tostore, b'download')
329 self._batch(_deduplicate(pointers), tostore, b'download')
327
330
328 def _batchrequest(self, pointers, action):
331 def _batchrequest(self, pointers, action):
329 """Get metadata about objects pointed by pointers for given action
332 """Get metadata about objects pointed by pointers for given action
330
333
331 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
334 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
332 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
335 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
333 """
336 """
334 objects = [
337 objects = [
335 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
338 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
336 for p in pointers
339 for p in pointers
337 ]
340 ]
338 requestdata = pycompat.bytesurl(
341 requestdata = pycompat.bytesurl(
339 json.dumps(
342 json.dumps(
340 {
343 {
341 'objects': objects,
344 'objects': objects,
342 'operation': pycompat.strurl(action),
345 'operation': pycompat.strurl(action),
343 }
346 }
344 )
347 )
345 )
348 )
346 url = b'%s/objects/batch' % self.baseurl
349 url = b'%s/objects/batch' % self.baseurl
347 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
350 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
348 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
351 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
349 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
352 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
350 try:
353 try:
351 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
354 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
352 rawjson = rsp.read()
355 rawjson = rsp.read()
353 except util.urlerr.httperror as ex:
356 except util.urlerr.httperror as ex:
354 hints = {
357 hints = {
355 400: _(
358 400: _(
356 b'check that lfs serving is enabled on %s and "%s" is '
359 b'check that lfs serving is enabled on %s and "%s" is '
357 b'supported'
360 b'supported'
358 )
361 )
359 % (self.baseurl, action),
362 % (self.baseurl, action),
360 404: _(b'the "lfs.url" config may be used to override %s')
363 404: _(b'the "lfs.url" config may be used to override %s')
361 % self.baseurl,
364 % self.baseurl,
362 }
365 }
363 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
366 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
364 raise LfsRemoteError(
367 raise LfsRemoteError(
365 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
368 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
366 hint=hint,
369 hint=hint,
367 )
370 )
368 except util.urlerr.urlerror as ex:
371 except util.urlerr.urlerror as ex:
369 hint = (
372 hint = (
370 _(b'the "lfs.url" config may be used to override %s')
373 _(b'the "lfs.url" config may be used to override %s')
371 % self.baseurl
374 % self.baseurl
372 )
375 )
373 raise LfsRemoteError(
376 raise LfsRemoteError(
374 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
377 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
375 )
378 )
376 try:
379 try:
377 response = pycompat.json_loads(rawjson)
380 response = pycompat.json_loads(rawjson)
378 except ValueError:
381 except ValueError:
379 raise LfsRemoteError(
382 raise LfsRemoteError(
380 _(b'LFS server returns invalid JSON: %s')
383 _(b'LFS server returns invalid JSON: %s')
381 % rawjson.encode("utf-8")
384 % rawjson.encode("utf-8")
382 )
385 )
383
386
384 if self.ui.debugflag:
387 if self.ui.debugflag:
385 self.ui.debug(b'Status: %d\n' % rsp.status)
388 self.ui.debug(b'Status: %d\n' % rsp.status)
386 # lfs-test-server and hg serve return headers in different order
389 # lfs-test-server and hg serve return headers in different order
387 headers = pycompat.bytestr(rsp.info()).strip()
390 headers = pycompat.bytestr(rsp.info()).strip()
388 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
391 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
389
392
390 if 'objects' in response:
393 if 'objects' in response:
391 response['objects'] = sorted(
394 response['objects'] = sorted(
392 response['objects'], key=lambda p: p['oid']
395 response['objects'], key=lambda p: p['oid']
393 )
396 )
394 self.ui.debug(
397 self.ui.debug(
395 b'%s\n'
398 b'%s\n'
396 % pycompat.bytesurl(
399 % pycompat.bytesurl(
397 json.dumps(
400 json.dumps(
398 response,
401 response,
399 indent=2,
402 indent=2,
400 separators=('', ': '),
403 separators=('', ': '),
401 sort_keys=True,
404 sort_keys=True,
402 )
405 )
403 )
406 )
404 )
407 )
405
408
406 def encodestr(x):
409 def encodestr(x):
407 if isinstance(x, pycompat.unicode):
410 if isinstance(x, pycompat.unicode):
408 return x.encode('utf-8')
411 return x.encode('utf-8')
409 return x
412 return x
410
413
411 return pycompat.rapply(encodestr, response)
414 return pycompat.rapply(encodestr, response)
412
415
413 def _checkforservererror(self, pointers, responses, action):
416 def _checkforservererror(self, pointers, responses, action):
414 """Scans errors from objects
417 """Scans errors from objects
415
418
416 Raises LfsRemoteError if any objects have an error"""
419 Raises LfsRemoteError if any objects have an error"""
417 for response in responses:
420 for response in responses:
418 # The server should return 404 when objects cannot be found. Some
421 # The server should return 404 when objects cannot be found. Some
419 # server implementation (ex. lfs-test-server) does not set "error"
422 # server implementation (ex. lfs-test-server) does not set "error"
420 # but just removes "download" from "actions". Treat that case
423 # but just removes "download" from "actions". Treat that case
421 # as the same as 404 error.
424 # as the same as 404 error.
422 if b'error' not in response:
425 if b'error' not in response:
423 if action == b'download' and action not in response.get(
426 if action == b'download' and action not in response.get(
424 b'actions', []
427 b'actions', []
425 ):
428 ):
426 code = 404
429 code = 404
427 else:
430 else:
428 continue
431 continue
429 else:
432 else:
430 # An error dict without a code doesn't make much sense, so
433 # An error dict without a code doesn't make much sense, so
431 # treat as a server error.
434 # treat as a server error.
432 code = response.get(b'error').get(b'code', 500)
435 code = response.get(b'error').get(b'code', 500)
433
436
434 ptrmap = {p.oid(): p for p in pointers}
437 ptrmap = {p.oid(): p for p in pointers}
435 p = ptrmap.get(response[b'oid'], None)
438 p = ptrmap.get(response[b'oid'], None)
436 if p:
439 if p:
437 filename = getattr(p, 'filename', b'unknown')
440 filename = getattr(p, 'filename', b'unknown')
438 errors = {
441 errors = {
439 404: b'The object does not exist',
442 404: b'The object does not exist',
440 410: b'The object was removed by the owner',
443 410: b'The object was removed by the owner',
441 422: b'Validation error',
444 422: b'Validation error',
442 500: b'Internal server error',
445 500: b'Internal server error',
443 }
446 }
444 msg = errors.get(code, b'status code %d' % code)
447 msg = errors.get(code, b'status code %d' % code)
445 raise LfsRemoteError(
448 raise LfsRemoteError(
446 _(b'LFS server error for "%s": %s') % (filename, msg)
449 _(b'LFS server error for "%s": %s') % (filename, msg)
447 )
450 )
448 else:
451 else:
449 raise LfsRemoteError(
452 raise LfsRemoteError(
450 _(b'LFS server error. Unsolicited response for oid %s')
453 _(b'LFS server error. Unsolicited response for oid %s')
451 % response[b'oid']
454 % response[b'oid']
452 )
455 )
453
456
454 def _extractobjects(self, response, pointers, action):
457 def _extractobjects(self, response, pointers, action):
455 """extract objects from response of the batch API
458 """extract objects from response of the batch API
456
459
457 response: parsed JSON object returned by batch API
460 response: parsed JSON object returned by batch API
458 return response['objects'] filtered by action
461 return response['objects'] filtered by action
459 raise if any object has an error
462 raise if any object has an error
460 """
463 """
461 # Scan errors from objects - fail early
464 # Scan errors from objects - fail early
462 objects = response.get(b'objects', [])
465 objects = response.get(b'objects', [])
463 self._checkforservererror(pointers, objects, action)
466 self._checkforservererror(pointers, objects, action)
464
467
465 # Filter objects with given action. Practically, this skips uploading
468 # Filter objects with given action. Practically, this skips uploading
466 # objects which exist in the server.
469 # objects which exist in the server.
467 filteredobjects = [
470 filteredobjects = [
468 o for o in objects if action in o.get(b'actions', [])
471 o for o in objects if action in o.get(b'actions', [])
469 ]
472 ]
470
473
471 return filteredobjects
474 return filteredobjects
472
475
473 def _basictransfer(self, obj, action, localstore):
476 def _basictransfer(self, obj, action, localstore):
474 """Download or upload a single object using basic transfer protocol
477 """Download or upload a single object using basic transfer protocol
475
478
476 obj: dict, an object description returned by batch API
479 obj: dict, an object description returned by batch API
477 action: string, one of ['upload', 'download']
480 action: string, one of ['upload', 'download']
478 localstore: blobstore.local
481 localstore: blobstore.local
479
482
480 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
483 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
481 basic-transfers.md
484 basic-transfers.md
482 """
485 """
483 oid = obj[b'oid']
486 oid = obj[b'oid']
484 href = obj[b'actions'][action].get(b'href')
487 href = obj[b'actions'][action].get(b'href')
485 headers = obj[b'actions'][action].get(b'header', {}).items()
488 headers = obj[b'actions'][action].get(b'header', {}).items()
486
489
487 request = util.urlreq.request(pycompat.strurl(href))
490 request = util.urlreq.request(pycompat.strurl(href))
488 if action == b'upload':
491 if action == b'upload':
489 # If uploading blobs, read data from local blobstore.
492 # If uploading blobs, read data from local blobstore.
490 if not localstore.verify(oid):
493 if not localstore.verify(oid):
491 raise error.Abort(
494 raise error.Abort(
492 _(b'detected corrupt lfs object: %s') % oid,
495 _(b'detected corrupt lfs object: %s') % oid,
493 hint=_(b'run hg verify'),
496 hint=_(b'run hg verify'),
494 )
497 )
495
498
496 for k, v in headers:
499 for k, v in headers:
497 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
500 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
498
501
499 try:
502 try:
500 if action == b'upload':
503 if action == b'upload':
501 request.data = lfsuploadfile(self.ui, localstore.path(oid))
504 request.data = lfsuploadfile(self.ui, localstore.path(oid))
502 request.get_method = lambda: 'PUT'
505 request.get_method = lambda: 'PUT'
503 request.add_header('Content-Type', 'application/octet-stream')
506 request.add_header('Content-Type', 'application/octet-stream')
504 request.add_header('Content-Length', request.data.length)
507 request.add_header('Content-Length', request.data.length)
505
508
506 with contextlib.closing(self.urlopener.open(request)) as res:
509 with contextlib.closing(self.urlopener.open(request)) as res:
507 contentlength = res.info().get(b"content-length")
510 contentlength = res.info().get(b"content-length")
508 ui = self.ui # Shorten debug lines
511 ui = self.ui # Shorten debug lines
509 if self.ui.debugflag:
512 if self.ui.debugflag:
510 ui.debug(b'Status: %d\n' % res.status)
513 ui.debug(b'Status: %d\n' % res.status)
511 # lfs-test-server and hg serve return headers in different
514 # lfs-test-server and hg serve return headers in different
512 # order
515 # order
513 headers = pycompat.bytestr(res.info()).strip()
516 headers = pycompat.bytestr(res.info()).strip()
514 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
517 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
515
518
516 if action == b'download':
519 if action == b'download':
517 # If downloading blobs, store downloaded data to local
520 # If downloading blobs, store downloaded data to local
518 # blobstore
521 # blobstore
519 localstore.download(oid, res, contentlength)
522 localstore.download(oid, res, contentlength)
520 else:
523 else:
521 blocks = []
524 blocks = []
522 while True:
525 while True:
523 data = res.read(1048576)
526 data = res.read(1048576)
524 if not data:
527 if not data:
525 break
528 break
526 blocks.append(data)
529 blocks.append(data)
527
530
528 response = b"".join(blocks)
531 response = b"".join(blocks)
529 if response:
532 if response:
530 ui.debug(b'lfs %s response: %s' % (action, response))
533 ui.debug(b'lfs %s response: %s' % (action, response))
531 except util.urlerr.httperror as ex:
534 except util.urlerr.httperror as ex:
532 if self.ui.debugflag:
535 if self.ui.debugflag:
533 self.ui.debug(
536 self.ui.debug(
534 b'%s: %s\n' % (oid, ex.read())
537 b'%s: %s\n' % (oid, ex.read())
535 ) # XXX: also bytes?
538 ) # XXX: also bytes?
536 raise LfsRemoteError(
539 raise LfsRemoteError(
537 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
540 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
538 % (stringutil.forcebytestr(ex), oid, action)
541 % (stringutil.forcebytestr(ex), oid, action)
539 )
542 )
540 except util.urlerr.urlerror as ex:
543 except util.urlerr.urlerror as ex:
541 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
544 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
542 util.urllibcompat.getfullurl(request)
545 util.urllibcompat.getfullurl(request)
543 )
546 )
544 raise LfsRemoteError(
547 raise LfsRemoteError(
545 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
548 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
546 )
549 )
547 finally:
550 finally:
548 if request.data:
551 if request.data:
549 request.data.close()
552 request.data.close()
550
553
551 def _batch(self, pointers, localstore, action):
554 def _batch(self, pointers, localstore, action):
552 if action not in [b'upload', b'download']:
555 if action not in [b'upload', b'download']:
553 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
556 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
554
557
555 response = self._batchrequest(pointers, action)
558 response = self._batchrequest(pointers, action)
556 objects = self._extractobjects(response, pointers, action)
559 objects = self._extractobjects(response, pointers, action)
557 total = sum(x.get(b'size', 0) for x in objects)
560 total = sum(x.get(b'size', 0) for x in objects)
558 sizes = {}
561 sizes = {}
559 for obj in objects:
562 for obj in objects:
560 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
563 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
561 topic = {
564 topic = {
562 b'upload': _(b'lfs uploading'),
565 b'upload': _(b'lfs uploading'),
563 b'download': _(b'lfs downloading'),
566 b'download': _(b'lfs downloading'),
564 }[action]
567 }[action]
565 if len(objects) > 1:
568 if len(objects) > 1:
566 self.ui.note(
569 self.ui.note(
567 _(b'lfs: need to transfer %d objects (%s)\n')
570 _(b'lfs: need to transfer %d objects (%s)\n')
568 % (len(objects), util.bytecount(total))
571 % (len(objects), util.bytecount(total))
569 )
572 )
570
573
571 def transfer(chunk):
574 def transfer(chunk):
572 for obj in chunk:
575 for obj in chunk:
573 objsize = obj.get(b'size', 0)
576 objsize = obj.get(b'size', 0)
574 if self.ui.verbose:
577 if self.ui.verbose:
575 if action == b'download':
578 if action == b'download':
576 msg = _(b'lfs: downloading %s (%s)\n')
579 msg = _(b'lfs: downloading %s (%s)\n')
577 elif action == b'upload':
580 elif action == b'upload':
578 msg = _(b'lfs: uploading %s (%s)\n')
581 msg = _(b'lfs: uploading %s (%s)\n')
579 self.ui.note(
582 self.ui.note(
580 msg % (obj.get(b'oid'), util.bytecount(objsize))
583 msg % (obj.get(b'oid'), util.bytecount(objsize))
581 )
584 )
582 retry = self.retry
585 retry = self.retry
583 while True:
586 while True:
584 try:
587 try:
585 self._basictransfer(obj, action, localstore)
588 self._basictransfer(obj, action, localstore)
586 yield 1, obj.get(b'oid')
589 yield 1, obj.get(b'oid')
587 break
590 break
588 except socket.error as ex:
591 except socket.error as ex:
589 if retry > 0:
592 if retry > 0:
590 self.ui.note(
593 self.ui.note(
591 _(b'lfs: failed: %r (remaining retry %d)\n')
594 _(b'lfs: failed: %r (remaining retry %d)\n')
592 % (stringutil.forcebytestr(ex), retry)
595 % (stringutil.forcebytestr(ex), retry)
593 )
596 )
594 retry -= 1
597 retry -= 1
595 continue
598 continue
596 raise
599 raise
597
600
598 # Until https multiplexing gets sorted out
601 # Until https multiplexing gets sorted out
599 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
602 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
600 oids = worker.worker(
603 oids = worker.worker(
601 self.ui,
604 self.ui,
602 0.1,
605 0.1,
603 transfer,
606 transfer,
604 (),
607 (),
605 sorted(objects, key=lambda o: o.get(b'oid')),
608 sorted(objects, key=lambda o: o.get(b'oid')),
606 )
609 )
607 else:
610 else:
608 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
611 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
609
612
610 with self.ui.makeprogress(
613 with self.ui.makeprogress(
611 topic, unit=_(b"bytes"), total=total
614 topic, unit=_(b"bytes"), total=total
612 ) as progress:
615 ) as progress:
613 progress.update(0)
616 progress.update(0)
614 processed = 0
617 processed = 0
615 blobs = 0
618 blobs = 0
616 for _one, oid in oids:
619 for _one, oid in oids:
617 processed += sizes[oid]
620 processed += sizes[oid]
618 blobs += 1
621 blobs += 1
619 progress.update(processed)
622 progress.update(processed)
620 self.ui.note(_(b'lfs: processed: %s\n') % oid)
623 self.ui.note(_(b'lfs: processed: %s\n') % oid)
621
624
622 if blobs > 0:
625 if blobs > 0:
623 if action == b'upload':
626 if action == b'upload':
624 self.ui.status(
627 self.ui.status(
625 _(b'lfs: uploaded %d files (%s)\n')
628 _(b'lfs: uploaded %d files (%s)\n')
626 % (blobs, util.bytecount(processed))
629 % (blobs, util.bytecount(processed))
627 )
630 )
628 elif action == b'download':
631 elif action == b'download':
629 self.ui.status(
632 self.ui.status(
630 _(b'lfs: downloaded %d files (%s)\n')
633 _(b'lfs: downloaded %d files (%s)\n')
631 % (blobs, util.bytecount(processed))
634 % (blobs, util.bytecount(processed))
632 )
635 )
633
636
634 def __del__(self):
637 def __del__(self):
635 # copied from mercurial/httppeer.py
638 # copied from mercurial/httppeer.py
636 urlopener = getattr(self, 'urlopener', None)
639 urlopener = getattr(self, 'urlopener', None)
637 if urlopener:
640 if urlopener:
638 for h in urlopener.handlers:
641 for h in urlopener.handlers:
639 h.close()
642 h.close()
640 getattr(h, "close_all", lambda: None)()
643 getattr(h, "close_all", lambda: None)()
641
644
642
645
643 class _dummyremote(object):
646 class _dummyremote(object):
644 """Dummy store storing blobs to temp directory."""
647 """Dummy store storing blobs to temp directory."""
645
648
646 def __init__(self, repo, url):
649 def __init__(self, repo, url):
647 fullpath = repo.vfs.join(b'lfs', url.path)
650 fullpath = repo.vfs.join(b'lfs', url.path)
648 self.vfs = lfsvfs(fullpath)
651 self.vfs = lfsvfs(fullpath)
649
652
650 def writebatch(self, pointers, fromstore):
653 def writebatch(self, pointers, fromstore):
651 for p in _deduplicate(pointers):
654 for p in _deduplicate(pointers):
652 content = fromstore.read(p.oid(), verify=True)
655 content = fromstore.read(p.oid(), verify=True)
653 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
656 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
654 fp.write(content)
657 fp.write(content)
655
658
656 def readbatch(self, pointers, tostore):
659 def readbatch(self, pointers, tostore):
657 for p in _deduplicate(pointers):
660 for p in _deduplicate(pointers):
658 with self.vfs(p.oid(), b'rb') as fp:
661 with self.vfs(p.oid(), b'rb') as fp:
659 tostore.download(p.oid(), fp, None)
662 tostore.download(p.oid(), fp, None)
660
663
661
664
662 class _nullremote(object):
665 class _nullremote(object):
663 """Null store storing blobs to /dev/null."""
666 """Null store storing blobs to /dev/null."""
664
667
665 def __init__(self, repo, url):
668 def __init__(self, repo, url):
666 pass
669 pass
667
670
668 def writebatch(self, pointers, fromstore):
671 def writebatch(self, pointers, fromstore):
669 pass
672 pass
670
673
671 def readbatch(self, pointers, tostore):
674 def readbatch(self, pointers, tostore):
672 pass
675 pass
673
676
674
677
675 class _promptremote(object):
678 class _promptremote(object):
676 """Prompt user to set lfs.url when accessed."""
679 """Prompt user to set lfs.url when accessed."""
677
680
678 def __init__(self, repo, url):
681 def __init__(self, repo, url):
679 pass
682 pass
680
683
681 def writebatch(self, pointers, fromstore, ui=None):
684 def writebatch(self, pointers, fromstore, ui=None):
682 self._prompt()
685 self._prompt()
683
686
684 def readbatch(self, pointers, tostore, ui=None):
687 def readbatch(self, pointers, tostore, ui=None):
685 self._prompt()
688 self._prompt()
686
689
687 def _prompt(self):
690 def _prompt(self):
688 raise error.Abort(_(b'lfs.url needs to be configured'))
691 raise error.Abort(_(b'lfs.url needs to be configured'))
689
692
690
693
691 _storemap = {
694 _storemap = {
692 b'https': _gitlfsremote,
695 b'https': _gitlfsremote,
693 b'http': _gitlfsremote,
696 b'http': _gitlfsremote,
694 b'file': _dummyremote,
697 b'file': _dummyremote,
695 b'null': _nullremote,
698 b'null': _nullremote,
696 None: _promptremote,
699 None: _promptremote,
697 }
700 }
698
701
699
702
700 def _deduplicate(pointers):
703 def _deduplicate(pointers):
701 """Remove any duplicate oids that exist in the list"""
704 """Remove any duplicate oids that exist in the list"""
702 reduced = util.sortdict()
705 reduced = util.sortdict()
703 for p in pointers:
706 for p in pointers:
704 reduced[p.oid()] = p
707 reduced[p.oid()] = p
705 return reduced.values()
708 return reduced.values()
706
709
707
710
708 def _verify(oid, content):
711 def _verify(oid, content):
709 realoid = hex(hashlib.sha256(content).digest())
712 realoid = hex(hashlib.sha256(content).digest())
710 if realoid != oid:
713 if realoid != oid:
711 raise LfsCorruptionError(
714 raise LfsCorruptionError(
712 _(b'detected corrupt lfs object: %s') % oid,
715 _(b'detected corrupt lfs object: %s') % oid,
713 hint=_(b'run hg verify'),
716 hint=_(b'run hg verify'),
714 )
717 )
715
718
716
719
717 def remote(repo, remote=None):
720 def remote(repo, remote=None):
718 """remotestore factory. return a store in _storemap depending on config
721 """remotestore factory. return a store in _storemap depending on config
719
722
720 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
723 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
721 infer the endpoint, based on the remote repository using the same path
724 infer the endpoint, based on the remote repository using the same path
722 adjustments as git. As an extension, 'http' is supported as well so that
725 adjustments as git. As an extension, 'http' is supported as well so that
723 ``hg serve`` works out of the box.
726 ``hg serve`` works out of the box.
724
727
725 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
728 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
726 """
729 """
727 lfsurl = repo.ui.config(b'lfs', b'url')
730 lfsurl = repo.ui.config(b'lfs', b'url')
728 url = util.url(lfsurl or b'')
731 url = urlutil.url(lfsurl or b'')
729 if lfsurl is None:
732 if lfsurl is None:
730 if remote:
733 if remote:
731 path = remote
734 path = remote
732 elif util.safehasattr(repo, b'_subtoppath'):
735 elif util.safehasattr(repo, b'_subtoppath'):
733 # The pull command sets this during the optional update phase, which
736 # The pull command sets this during the optional update phase, which
734 # tells exactly where the pull originated, whether 'paths.default'
737 # tells exactly where the pull originated, whether 'paths.default'
735 # or explicit.
738 # or explicit.
736 path = repo._subtoppath
739 path = repo._subtoppath
737 else:
740 else:
738 # TODO: investigate 'paths.remote:lfsurl' style path customization,
741 # TODO: investigate 'paths.remote:lfsurl' style path customization,
739 # and fall back to inferring from 'paths.remote' if unspecified.
742 # and fall back to inferring from 'paths.remote' if unspecified.
740 path = repo.ui.config(b'paths', b'default') or b''
743 path = repo.ui.config(b'paths', b'default') or b''
741
744
742 defaulturl = util.url(path)
745 defaulturl = urlutil.url(path)
743
746
744 # TODO: support local paths as well.
747 # TODO: support local paths as well.
745 # TODO: consider the ssh -> https transformation that git applies
748 # TODO: consider the ssh -> https transformation that git applies
746 if defaulturl.scheme in (b'http', b'https'):
749 if defaulturl.scheme in (b'http', b'https'):
747 if defaulturl.path and defaulturl.path[:-1] != b'/':
750 if defaulturl.path and defaulturl.path[:-1] != b'/':
748 defaulturl.path += b'/'
751 defaulturl.path += b'/'
749 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
752 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
750
753
751 url = util.url(bytes(defaulturl))
754 url = urlutil.url(bytes(defaulturl))
752 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
755 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
753
756
754 scheme = url.scheme
757 scheme = url.scheme
755 if scheme not in _storemap:
758 if scheme not in _storemap:
756 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
759 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
757 return _storemap[scheme](repo, url)
760 return _storemap[scheme](repo, url)
758
761
759
762
760 class LfsRemoteError(error.StorageError):
763 class LfsRemoteError(error.StorageError):
761 pass
764 pass
762
765
763
766
764 class LfsCorruptionError(error.Abort):
767 class LfsCorruptionError(error.Abort):
765 """Raised when a corrupt blob is detected, aborting an operation
768 """Raised when a corrupt blob is detected, aborting an operation
766
769
767 It exists to allow specialized handling on the server side."""
770 It exists to allow specialized handling on the server side."""
@@ -1,4313 +1,4314 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help COMMAND` for more details)::
17 Common tasks (use :hg:`help COMMAND` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behavior can be configured with::
31 files creations or deletions. This behavior can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from __future__ import absolute_import, print_function
65 from __future__ import absolute_import, print_function
66
66
67 import errno
67 import errno
68 import os
68 import os
69 import re
69 import re
70 import shutil
70 import shutil
71 import sys
71 import sys
72 from mercurial.i18n import _
72 from mercurial.i18n import _
73 from mercurial.node import (
73 from mercurial.node import (
74 bin,
74 bin,
75 hex,
75 hex,
76 nullid,
76 nullid,
77 nullrev,
77 nullrev,
78 short,
78 short,
79 )
79 )
80 from mercurial.pycompat import (
80 from mercurial.pycompat import (
81 delattr,
81 delattr,
82 getattr,
82 getattr,
83 open,
83 open,
84 )
84 )
85 from mercurial import (
85 from mercurial import (
86 cmdutil,
86 cmdutil,
87 commands,
87 commands,
88 dirstateguard,
88 dirstateguard,
89 encoding,
89 encoding,
90 error,
90 error,
91 extensions,
91 extensions,
92 hg,
92 hg,
93 localrepo,
93 localrepo,
94 lock as lockmod,
94 lock as lockmod,
95 logcmdutil,
95 logcmdutil,
96 patch as patchmod,
96 patch as patchmod,
97 phases,
97 phases,
98 pycompat,
98 pycompat,
99 registrar,
99 registrar,
100 revsetlang,
100 revsetlang,
101 scmutil,
101 scmutil,
102 smartset,
102 smartset,
103 strip,
103 strip,
104 subrepoutil,
104 subrepoutil,
105 util,
105 util,
106 vfs as vfsmod,
106 vfs as vfsmod,
107 )
107 )
108 from mercurial.utils import (
108 from mercurial.utils import (
109 dateutil,
109 dateutil,
110 stringutil,
110 stringutil,
111 urlutil,
111 )
112 )
112
113
113 release = lockmod.release
114 release = lockmod.release
114 seriesopts = [(b's', b'summary', None, _(b'print first line of patch header'))]
115 seriesopts = [(b's', b'summary', None, _(b'print first line of patch header'))]
115
116
116 cmdtable = {}
117 cmdtable = {}
117 command = registrar.command(cmdtable)
118 command = registrar.command(cmdtable)
118 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
119 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
119 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
120 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
120 # be specifying the version(s) of Mercurial they are tested with, or
121 # be specifying the version(s) of Mercurial they are tested with, or
121 # leave the attribute unspecified.
122 # leave the attribute unspecified.
122 testedwith = b'ships-with-hg-core'
123 testedwith = b'ships-with-hg-core'
123
124
124 configtable = {}
125 configtable = {}
125 configitem = registrar.configitem(configtable)
126 configitem = registrar.configitem(configtable)
126
127
127 configitem(
128 configitem(
128 b'mq',
129 b'mq',
129 b'git',
130 b'git',
130 default=b'auto',
131 default=b'auto',
131 )
132 )
132 configitem(
133 configitem(
133 b'mq',
134 b'mq',
134 b'keepchanges',
135 b'keepchanges',
135 default=False,
136 default=False,
136 )
137 )
137 configitem(
138 configitem(
138 b'mq',
139 b'mq',
139 b'plain',
140 b'plain',
140 default=False,
141 default=False,
141 )
142 )
142 configitem(
143 configitem(
143 b'mq',
144 b'mq',
144 b'secret',
145 b'secret',
145 default=False,
146 default=False,
146 )
147 )
147
148
148 # force load strip extension formerly included in mq and import some utility
149 # force load strip extension formerly included in mq and import some utility
149 try:
150 try:
150 extensions.find(b'strip')
151 extensions.find(b'strip')
151 except KeyError:
152 except KeyError:
152 # note: load is lazy so we could avoid the try-except,
153 # note: load is lazy so we could avoid the try-except,
153 # but I (marmoute) prefer this explicit code.
154 # but I (marmoute) prefer this explicit code.
154 class dummyui(object):
155 class dummyui(object):
155 def debug(self, msg):
156 def debug(self, msg):
156 pass
157 pass
157
158
158 def log(self, event, msgfmt, *msgargs, **opts):
159 def log(self, event, msgfmt, *msgargs, **opts):
159 pass
160 pass
160
161
161 extensions.load(dummyui(), b'strip', b'')
162 extensions.load(dummyui(), b'strip', b'')
162
163
163 strip = strip.strip
164 strip = strip.strip
164
165
165
166
166 def checksubstate(repo, baserev=None):
167 def checksubstate(repo, baserev=None):
167 """return list of subrepos at a different revision than substate.
168 """return list of subrepos at a different revision than substate.
168 Abort if any subrepos have uncommitted changes."""
169 Abort if any subrepos have uncommitted changes."""
169 inclsubs = []
170 inclsubs = []
170 wctx = repo[None]
171 wctx = repo[None]
171 if baserev:
172 if baserev:
172 bctx = repo[baserev]
173 bctx = repo[baserev]
173 else:
174 else:
174 bctx = wctx.p1()
175 bctx = wctx.p1()
175 for s in sorted(wctx.substate):
176 for s in sorted(wctx.substate):
176 wctx.sub(s).bailifchanged(True)
177 wctx.sub(s).bailifchanged(True)
177 if s not in bctx.substate or bctx.sub(s).dirty():
178 if s not in bctx.substate or bctx.sub(s).dirty():
178 inclsubs.append(s)
179 inclsubs.append(s)
179 return inclsubs
180 return inclsubs
180
181
181
182
182 # Patch names looks like unix-file names.
183 # Patch names looks like unix-file names.
183 # They must be joinable with queue directory and result in the patch path.
184 # They must be joinable with queue directory and result in the patch path.
184 normname = util.normpath
185 normname = util.normpath
185
186
186
187
187 class statusentry(object):
188 class statusentry(object):
188 def __init__(self, node, name):
189 def __init__(self, node, name):
189 self.node, self.name = node, name
190 self.node, self.name = node, name
190
191
191 def __bytes__(self):
192 def __bytes__(self):
192 return hex(self.node) + b':' + self.name
193 return hex(self.node) + b':' + self.name
193
194
194 __str__ = encoding.strmethod(__bytes__)
195 __str__ = encoding.strmethod(__bytes__)
195 __repr__ = encoding.strmethod(__bytes__)
196 __repr__ = encoding.strmethod(__bytes__)
196
197
197
198
198 # The order of the headers in 'hg export' HG patches:
199 # The order of the headers in 'hg export' HG patches:
199 HGHEADERS = [
200 HGHEADERS = [
200 # '# HG changeset patch',
201 # '# HG changeset patch',
201 b'# User ',
202 b'# User ',
202 b'# Date ',
203 b'# Date ',
203 b'# ',
204 b'# ',
204 b'# Branch ',
205 b'# Branch ',
205 b'# Node ID ',
206 b'# Node ID ',
206 b'# Parent ', # can occur twice for merges - but that is not relevant for mq
207 b'# Parent ', # can occur twice for merges - but that is not relevant for mq
207 ]
208 ]
208 # The order of headers in plain 'mail style' patches:
209 # The order of headers in plain 'mail style' patches:
209 PLAINHEADERS = {
210 PLAINHEADERS = {
210 b'from': 0,
211 b'from': 0,
211 b'date': 1,
212 b'date': 1,
212 b'subject': 2,
213 b'subject': 2,
213 }
214 }
214
215
215
216
216 def inserthgheader(lines, header, value):
217 def inserthgheader(lines, header, value):
217 """Assuming lines contains a HG patch header, add a header line with value.
218 """Assuming lines contains a HG patch header, add a header line with value.
218 >>> try: inserthgheader([], b'# Date ', b'z')
219 >>> try: inserthgheader([], b'# Date ', b'z')
219 ... except ValueError as inst: print("oops")
220 ... except ValueError as inst: print("oops")
220 oops
221 oops
221 >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
222 >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
222 ['# HG changeset patch', '# Date z']
223 ['# HG changeset patch', '# Date z']
223 >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
224 >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
224 ['# HG changeset patch', '# Date z', '']
225 ['# HG changeset patch', '# Date z', '']
225 >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
226 >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
226 ['# HG changeset patch', '# User y', '# Date z']
227 ['# HG changeset patch', '# User y', '# Date z']
227 >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
228 >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
228 ... b'# User ', b'z')
229 ... b'# User ', b'z')
229 ['# HG changeset patch', '# Date x', '# User z']
230 ['# HG changeset patch', '# Date x', '# User z']
230 >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
231 >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
231 ['# HG changeset patch', '# Date z']
232 ['# HG changeset patch', '# Date z']
232 >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
233 >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
233 ... b'# Date ', b'z')
234 ... b'# Date ', b'z')
234 ['# HG changeset patch', '# Date z', '', '# Date y']
235 ['# HG changeset patch', '# Date z', '', '# Date y']
235 >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
236 >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
236 ... b'# Date ', b'z')
237 ... b'# Date ', b'z')
237 ['# HG changeset patch', '# Date z', '# Parent y']
238 ['# HG changeset patch', '# Date z', '# Parent y']
238 """
239 """
239 start = lines.index(b'# HG changeset patch') + 1
240 start = lines.index(b'# HG changeset patch') + 1
240 newindex = HGHEADERS.index(header)
241 newindex = HGHEADERS.index(header)
241 bestpos = len(lines)
242 bestpos = len(lines)
242 for i in range(start, len(lines)):
243 for i in range(start, len(lines)):
243 line = lines[i]
244 line = lines[i]
244 if not line.startswith(b'# '):
245 if not line.startswith(b'# '):
245 bestpos = min(bestpos, i)
246 bestpos = min(bestpos, i)
246 break
247 break
247 for lineindex, h in enumerate(HGHEADERS):
248 for lineindex, h in enumerate(HGHEADERS):
248 if line.startswith(h):
249 if line.startswith(h):
249 if lineindex == newindex:
250 if lineindex == newindex:
250 lines[i] = header + value
251 lines[i] = header + value
251 return lines
252 return lines
252 if lineindex > newindex:
253 if lineindex > newindex:
253 bestpos = min(bestpos, i)
254 bestpos = min(bestpos, i)
254 break # next line
255 break # next line
255 lines.insert(bestpos, header + value)
256 lines.insert(bestpos, header + value)
256 return lines
257 return lines
257
258
258
259
259 def insertplainheader(lines, header, value):
260 def insertplainheader(lines, header, value):
260 """For lines containing a plain patch header, add a header line with value.
261 """For lines containing a plain patch header, add a header line with value.
261 >>> insertplainheader([], b'Date', b'z')
262 >>> insertplainheader([], b'Date', b'z')
262 ['Date: z']
263 ['Date: z']
263 >>> insertplainheader([b''], b'Date', b'z')
264 >>> insertplainheader([b''], b'Date', b'z')
264 ['Date: z', '']
265 ['Date: z', '']
265 >>> insertplainheader([b'x'], b'Date', b'z')
266 >>> insertplainheader([b'x'], b'Date', b'z')
266 ['Date: z', '', 'x']
267 ['Date: z', '', 'x']
267 >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
268 >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
268 ['From: y', 'Date: z', '', 'x']
269 ['From: y', 'Date: z', '', 'x']
269 >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
270 >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
270 [' date : x', 'From: z', '']
271 [' date : x', 'From: z', '']
271 >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
272 >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
272 ['Date: z', '', 'Date: y']
273 ['Date: z', '', 'Date: y']
273 >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
274 >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
274 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
275 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
275 """
276 """
276 newprio = PLAINHEADERS[header.lower()]
277 newprio = PLAINHEADERS[header.lower()]
277 bestpos = len(lines)
278 bestpos = len(lines)
278 for i, line in enumerate(lines):
279 for i, line in enumerate(lines):
279 if b':' in line:
280 if b':' in line:
280 lheader = line.split(b':', 1)[0].strip().lower()
281 lheader = line.split(b':', 1)[0].strip().lower()
281 lprio = PLAINHEADERS.get(lheader, newprio + 1)
282 lprio = PLAINHEADERS.get(lheader, newprio + 1)
282 if lprio == newprio:
283 if lprio == newprio:
283 lines[i] = b'%s: %s' % (header, value)
284 lines[i] = b'%s: %s' % (header, value)
284 return lines
285 return lines
285 if lprio > newprio and i < bestpos:
286 if lprio > newprio and i < bestpos:
286 bestpos = i
287 bestpos = i
287 else:
288 else:
288 if line:
289 if line:
289 lines.insert(i, b'')
290 lines.insert(i, b'')
290 if i < bestpos:
291 if i < bestpos:
291 bestpos = i
292 bestpos = i
292 break
293 break
293 lines.insert(bestpos, b'%s: %s' % (header, value))
294 lines.insert(bestpos, b'%s: %s' % (header, value))
294 return lines
295 return lines
295
296
296
297
297 class patchheader(object):
298 class patchheader(object):
298 def __init__(self, pf, plainmode=False):
299 def __init__(self, pf, plainmode=False):
299 def eatdiff(lines):
300 def eatdiff(lines):
300 while lines:
301 while lines:
301 l = lines[-1]
302 l = lines[-1]
302 if (
303 if (
303 l.startswith(b"diff -")
304 l.startswith(b"diff -")
304 or l.startswith(b"Index:")
305 or l.startswith(b"Index:")
305 or l.startswith(b"===========")
306 or l.startswith(b"===========")
306 ):
307 ):
307 del lines[-1]
308 del lines[-1]
308 else:
309 else:
309 break
310 break
310
311
311 def eatempty(lines):
312 def eatempty(lines):
312 while lines:
313 while lines:
313 if not lines[-1].strip():
314 if not lines[-1].strip():
314 del lines[-1]
315 del lines[-1]
315 else:
316 else:
316 break
317 break
317
318
318 message = []
319 message = []
319 comments = []
320 comments = []
320 user = None
321 user = None
321 date = None
322 date = None
322 parent = None
323 parent = None
323 format = None
324 format = None
324 subject = None
325 subject = None
325 branch = None
326 branch = None
326 nodeid = None
327 nodeid = None
327 diffstart = 0
328 diffstart = 0
328
329
329 for line in open(pf, b'rb'):
330 for line in open(pf, b'rb'):
330 line = line.rstrip()
331 line = line.rstrip()
331 if line.startswith(b'diff --git') or (
332 if line.startswith(b'diff --git') or (
332 diffstart and line.startswith(b'+++ ')
333 diffstart and line.startswith(b'+++ ')
333 ):
334 ):
334 diffstart = 2
335 diffstart = 2
335 break
336 break
336 diffstart = 0 # reset
337 diffstart = 0 # reset
337 if line.startswith(b"--- "):
338 if line.startswith(b"--- "):
338 diffstart = 1
339 diffstart = 1
339 continue
340 continue
340 elif format == b"hgpatch":
341 elif format == b"hgpatch":
341 # parse values when importing the result of an hg export
342 # parse values when importing the result of an hg export
342 if line.startswith(b"# User "):
343 if line.startswith(b"# User "):
343 user = line[7:]
344 user = line[7:]
344 elif line.startswith(b"# Date "):
345 elif line.startswith(b"# Date "):
345 date = line[7:]
346 date = line[7:]
346 elif line.startswith(b"# Parent "):
347 elif line.startswith(b"# Parent "):
347 parent = line[9:].lstrip() # handle double trailing space
348 parent = line[9:].lstrip() # handle double trailing space
348 elif line.startswith(b"# Branch "):
349 elif line.startswith(b"# Branch "):
349 branch = line[9:]
350 branch = line[9:]
350 elif line.startswith(b"# Node ID "):
351 elif line.startswith(b"# Node ID "):
351 nodeid = line[10:]
352 nodeid = line[10:]
352 elif not line.startswith(b"# ") and line:
353 elif not line.startswith(b"# ") and line:
353 message.append(line)
354 message.append(line)
354 format = None
355 format = None
355 elif line == b'# HG changeset patch':
356 elif line == b'# HG changeset patch':
356 message = []
357 message = []
357 format = b"hgpatch"
358 format = b"hgpatch"
358 elif format != b"tagdone" and (
359 elif format != b"tagdone" and (
359 line.startswith(b"Subject: ") or line.startswith(b"subject: ")
360 line.startswith(b"Subject: ") or line.startswith(b"subject: ")
360 ):
361 ):
361 subject = line[9:]
362 subject = line[9:]
362 format = b"tag"
363 format = b"tag"
363 elif format != b"tagdone" and (
364 elif format != b"tagdone" and (
364 line.startswith(b"From: ") or line.startswith(b"from: ")
365 line.startswith(b"From: ") or line.startswith(b"from: ")
365 ):
366 ):
366 user = line[6:]
367 user = line[6:]
367 format = b"tag"
368 format = b"tag"
368 elif format != b"tagdone" and (
369 elif format != b"tagdone" and (
369 line.startswith(b"Date: ") or line.startswith(b"date: ")
370 line.startswith(b"Date: ") or line.startswith(b"date: ")
370 ):
371 ):
371 date = line[6:]
372 date = line[6:]
372 format = b"tag"
373 format = b"tag"
373 elif format == b"tag" and line == b"":
374 elif format == b"tag" and line == b"":
374 # when looking for tags (subject: from: etc) they
375 # when looking for tags (subject: from: etc) they
375 # end once you find a blank line in the source
376 # end once you find a blank line in the source
376 format = b"tagdone"
377 format = b"tagdone"
377 elif message or line:
378 elif message or line:
378 message.append(line)
379 message.append(line)
379 comments.append(line)
380 comments.append(line)
380
381
381 eatdiff(message)
382 eatdiff(message)
382 eatdiff(comments)
383 eatdiff(comments)
383 # Remember the exact starting line of the patch diffs before consuming
384 # Remember the exact starting line of the patch diffs before consuming
384 # empty lines, for external use by TortoiseHg and others
385 # empty lines, for external use by TortoiseHg and others
385 self.diffstartline = len(comments)
386 self.diffstartline = len(comments)
386 eatempty(message)
387 eatempty(message)
387 eatempty(comments)
388 eatempty(comments)
388
389
389 # make sure message isn't empty
390 # make sure message isn't empty
390 if format and format.startswith(b"tag") and subject:
391 if format and format.startswith(b"tag") and subject:
391 message.insert(0, subject)
392 message.insert(0, subject)
392
393
393 self.message = message
394 self.message = message
394 self.comments = comments
395 self.comments = comments
395 self.user = user
396 self.user = user
396 self.date = date
397 self.date = date
397 self.parent = parent
398 self.parent = parent
398 # nodeid and branch are for external use by TortoiseHg and others
399 # nodeid and branch are for external use by TortoiseHg and others
399 self.nodeid = nodeid
400 self.nodeid = nodeid
400 self.branch = branch
401 self.branch = branch
401 self.haspatch = diffstart > 1
402 self.haspatch = diffstart > 1
402 self.plainmode = (
403 self.plainmode = (
403 plainmode
404 plainmode
404 or b'# HG changeset patch' not in self.comments
405 or b'# HG changeset patch' not in self.comments
405 and any(
406 and any(
406 c.startswith(b'Date: ') or c.startswith(b'From: ')
407 c.startswith(b'Date: ') or c.startswith(b'From: ')
407 for c in self.comments
408 for c in self.comments
408 )
409 )
409 )
410 )
410
411
411 def setuser(self, user):
412 def setuser(self, user):
412 try:
413 try:
413 inserthgheader(self.comments, b'# User ', user)
414 inserthgheader(self.comments, b'# User ', user)
414 except ValueError:
415 except ValueError:
415 if self.plainmode:
416 if self.plainmode:
416 insertplainheader(self.comments, b'From', user)
417 insertplainheader(self.comments, b'From', user)
417 else:
418 else:
418 tmp = [b'# HG changeset patch', b'# User ' + user]
419 tmp = [b'# HG changeset patch', b'# User ' + user]
419 self.comments = tmp + self.comments
420 self.comments = tmp + self.comments
420 self.user = user
421 self.user = user
421
422
422 def setdate(self, date):
423 def setdate(self, date):
423 try:
424 try:
424 inserthgheader(self.comments, b'# Date ', date)
425 inserthgheader(self.comments, b'# Date ', date)
425 except ValueError:
426 except ValueError:
426 if self.plainmode:
427 if self.plainmode:
427 insertplainheader(self.comments, b'Date', date)
428 insertplainheader(self.comments, b'Date', date)
428 else:
429 else:
429 tmp = [b'# HG changeset patch', b'# Date ' + date]
430 tmp = [b'# HG changeset patch', b'# Date ' + date]
430 self.comments = tmp + self.comments
431 self.comments = tmp + self.comments
431 self.date = date
432 self.date = date
432
433
433 def setparent(self, parent):
434 def setparent(self, parent):
434 try:
435 try:
435 inserthgheader(self.comments, b'# Parent ', parent)
436 inserthgheader(self.comments, b'# Parent ', parent)
436 except ValueError:
437 except ValueError:
437 if not self.plainmode:
438 if not self.plainmode:
438 tmp = [b'# HG changeset patch', b'# Parent ' + parent]
439 tmp = [b'# HG changeset patch', b'# Parent ' + parent]
439 self.comments = tmp + self.comments
440 self.comments = tmp + self.comments
440 self.parent = parent
441 self.parent = parent
441
442
442 def setmessage(self, message):
443 def setmessage(self, message):
443 if self.comments:
444 if self.comments:
444 self._delmsg()
445 self._delmsg()
445 self.message = [message]
446 self.message = [message]
446 if message:
447 if message:
447 if self.plainmode and self.comments and self.comments[-1]:
448 if self.plainmode and self.comments and self.comments[-1]:
448 self.comments.append(b'')
449 self.comments.append(b'')
449 self.comments.append(message)
450 self.comments.append(message)
450
451
451 def __bytes__(self):
452 def __bytes__(self):
452 s = b'\n'.join(self.comments).rstrip()
453 s = b'\n'.join(self.comments).rstrip()
453 if not s:
454 if not s:
454 return b''
455 return b''
455 return s + b'\n\n'
456 return s + b'\n\n'
456
457
457 __str__ = encoding.strmethod(__bytes__)
458 __str__ = encoding.strmethod(__bytes__)
458
459
459 def _delmsg(self):
460 def _delmsg(self):
460 """Remove existing message, keeping the rest of the comments fields.
461 """Remove existing message, keeping the rest of the comments fields.
461 If comments contains 'subject: ', message will prepend
462 If comments contains 'subject: ', message will prepend
462 the field and a blank line."""
463 the field and a blank line."""
463 if self.message:
464 if self.message:
464 subj = b'subject: ' + self.message[0].lower()
465 subj = b'subject: ' + self.message[0].lower()
465 for i in pycompat.xrange(len(self.comments)):
466 for i in pycompat.xrange(len(self.comments)):
466 if subj == self.comments[i].lower():
467 if subj == self.comments[i].lower():
467 del self.comments[i]
468 del self.comments[i]
468 self.message = self.message[2:]
469 self.message = self.message[2:]
469 break
470 break
470 ci = 0
471 ci = 0
471 for mi in self.message:
472 for mi in self.message:
472 while mi != self.comments[ci]:
473 while mi != self.comments[ci]:
473 ci += 1
474 ci += 1
474 del self.comments[ci]
475 del self.comments[ci]
475
476
476
477
477 def newcommit(repo, phase, *args, **kwargs):
478 def newcommit(repo, phase, *args, **kwargs):
478 """helper dedicated to ensure a commit respect mq.secret setting
479 """helper dedicated to ensure a commit respect mq.secret setting
479
480
480 It should be used instead of repo.commit inside the mq source for operation
481 It should be used instead of repo.commit inside the mq source for operation
481 creating new changeset.
482 creating new changeset.
482 """
483 """
483 repo = repo.unfiltered()
484 repo = repo.unfiltered()
484 if phase is None:
485 if phase is None:
485 if repo.ui.configbool(b'mq', b'secret'):
486 if repo.ui.configbool(b'mq', b'secret'):
486 phase = phases.secret
487 phase = phases.secret
487 overrides = {(b'ui', b'allowemptycommit'): True}
488 overrides = {(b'ui', b'allowemptycommit'): True}
488 if phase is not None:
489 if phase is not None:
489 overrides[(b'phases', b'new-commit')] = phase
490 overrides[(b'phases', b'new-commit')] = phase
490 with repo.ui.configoverride(overrides, b'mq'):
491 with repo.ui.configoverride(overrides, b'mq'):
491 repo.ui.setconfig(b'ui', b'allowemptycommit', True)
492 repo.ui.setconfig(b'ui', b'allowemptycommit', True)
492 return repo.commit(*args, **kwargs)
493 return repo.commit(*args, **kwargs)
493
494
494
495
495 class AbortNoCleanup(error.Abort):
496 class AbortNoCleanup(error.Abort):
496 pass
497 pass
497
498
498
499
499 class queue(object):
500 class queue(object):
500 def __init__(self, ui, baseui, path, patchdir=None):
501 def __init__(self, ui, baseui, path, patchdir=None):
501 self.basepath = path
502 self.basepath = path
502 try:
503 try:
503 with open(os.path.join(path, b'patches.queue'), 'rb') as fh:
504 with open(os.path.join(path, b'patches.queue'), 'rb') as fh:
504 cur = fh.read().rstrip()
505 cur = fh.read().rstrip()
505
506
506 if not cur:
507 if not cur:
507 curpath = os.path.join(path, b'patches')
508 curpath = os.path.join(path, b'patches')
508 else:
509 else:
509 curpath = os.path.join(path, b'patches-' + cur)
510 curpath = os.path.join(path, b'patches-' + cur)
510 except IOError:
511 except IOError:
511 curpath = os.path.join(path, b'patches')
512 curpath = os.path.join(path, b'patches')
512 self.path = patchdir or curpath
513 self.path = patchdir or curpath
513 self.opener = vfsmod.vfs(self.path)
514 self.opener = vfsmod.vfs(self.path)
514 self.ui = ui
515 self.ui = ui
515 self.baseui = baseui
516 self.baseui = baseui
516 self.applieddirty = False
517 self.applieddirty = False
517 self.seriesdirty = False
518 self.seriesdirty = False
518 self.added = []
519 self.added = []
519 self.seriespath = b"series"
520 self.seriespath = b"series"
520 self.statuspath = b"status"
521 self.statuspath = b"status"
521 self.guardspath = b"guards"
522 self.guardspath = b"guards"
522 self.activeguards = None
523 self.activeguards = None
523 self.guardsdirty = False
524 self.guardsdirty = False
524 # Handle mq.git as a bool with extended values
525 # Handle mq.git as a bool with extended values
525 gitmode = ui.config(b'mq', b'git').lower()
526 gitmode = ui.config(b'mq', b'git').lower()
526 boolmode = stringutil.parsebool(gitmode)
527 boolmode = stringutil.parsebool(gitmode)
527 if boolmode is not None:
528 if boolmode is not None:
528 if boolmode:
529 if boolmode:
529 gitmode = b'yes'
530 gitmode = b'yes'
530 else:
531 else:
531 gitmode = b'no'
532 gitmode = b'no'
532 self.gitmode = gitmode
533 self.gitmode = gitmode
533 # deprecated config: mq.plain
534 # deprecated config: mq.plain
534 self.plainmode = ui.configbool(b'mq', b'plain')
535 self.plainmode = ui.configbool(b'mq', b'plain')
535 self.checkapplied = True
536 self.checkapplied = True
536
537
537 @util.propertycache
538 @util.propertycache
538 def applied(self):
539 def applied(self):
539 def parselines(lines):
540 def parselines(lines):
540 for l in lines:
541 for l in lines:
541 entry = l.split(b':', 1)
542 entry = l.split(b':', 1)
542 if len(entry) > 1:
543 if len(entry) > 1:
543 n, name = entry
544 n, name = entry
544 yield statusentry(bin(n), name)
545 yield statusentry(bin(n), name)
545 elif l.strip():
546 elif l.strip():
546 self.ui.warn(
547 self.ui.warn(
547 _(b'malformated mq status line: %s\n')
548 _(b'malformated mq status line: %s\n')
548 % stringutil.pprint(entry)
549 % stringutil.pprint(entry)
549 )
550 )
550 # else we ignore empty lines
551 # else we ignore empty lines
551
552
552 try:
553 try:
553 lines = self.opener.read(self.statuspath).splitlines()
554 lines = self.opener.read(self.statuspath).splitlines()
554 return list(parselines(lines))
555 return list(parselines(lines))
555 except IOError as e:
556 except IOError as e:
556 if e.errno == errno.ENOENT:
557 if e.errno == errno.ENOENT:
557 return []
558 return []
558 raise
559 raise
559
560
560 @util.propertycache
561 @util.propertycache
561 def fullseries(self):
562 def fullseries(self):
562 try:
563 try:
563 return self.opener.read(self.seriespath).splitlines()
564 return self.opener.read(self.seriespath).splitlines()
564 except IOError as e:
565 except IOError as e:
565 if e.errno == errno.ENOENT:
566 if e.errno == errno.ENOENT:
566 return []
567 return []
567 raise
568 raise
568
569
569 @util.propertycache
570 @util.propertycache
570 def series(self):
571 def series(self):
571 self.parseseries()
572 self.parseseries()
572 return self.series
573 return self.series
573
574
574 @util.propertycache
575 @util.propertycache
575 def seriesguards(self):
576 def seriesguards(self):
576 self.parseseries()
577 self.parseseries()
577 return self.seriesguards
578 return self.seriesguards
578
579
579 def invalidate(self):
580 def invalidate(self):
580 for a in 'applied fullseries series seriesguards'.split():
581 for a in 'applied fullseries series seriesguards'.split():
581 if a in self.__dict__:
582 if a in self.__dict__:
582 delattr(self, a)
583 delattr(self, a)
583 self.applieddirty = False
584 self.applieddirty = False
584 self.seriesdirty = False
585 self.seriesdirty = False
585 self.guardsdirty = False
586 self.guardsdirty = False
586 self.activeguards = None
587 self.activeguards = None
587
588
588 def diffopts(self, opts=None, patchfn=None, plain=False):
589 def diffopts(self, opts=None, patchfn=None, plain=False):
589 """Return diff options tweaked for this mq use, possibly upgrading to
590 """Return diff options tweaked for this mq use, possibly upgrading to
590 git format, and possibly plain and without lossy options."""
591 git format, and possibly plain and without lossy options."""
591 diffopts = patchmod.difffeatureopts(
592 diffopts = patchmod.difffeatureopts(
592 self.ui,
593 self.ui,
593 opts,
594 opts,
594 git=True,
595 git=True,
595 whitespace=not plain,
596 whitespace=not plain,
596 formatchanging=not plain,
597 formatchanging=not plain,
597 )
598 )
598 if self.gitmode == b'auto':
599 if self.gitmode == b'auto':
599 diffopts.upgrade = True
600 diffopts.upgrade = True
600 elif self.gitmode == b'keep':
601 elif self.gitmode == b'keep':
601 pass
602 pass
602 elif self.gitmode in (b'yes', b'no'):
603 elif self.gitmode in (b'yes', b'no'):
603 diffopts.git = self.gitmode == b'yes'
604 diffopts.git = self.gitmode == b'yes'
604 else:
605 else:
605 raise error.Abort(
606 raise error.Abort(
606 _(b'mq.git option can be auto/keep/yes/no got %s')
607 _(b'mq.git option can be auto/keep/yes/no got %s')
607 % self.gitmode
608 % self.gitmode
608 )
609 )
609 if patchfn:
610 if patchfn:
610 diffopts = self.patchopts(diffopts, patchfn)
611 diffopts = self.patchopts(diffopts, patchfn)
611 return diffopts
612 return diffopts
612
613
613 def patchopts(self, diffopts, *patches):
614 def patchopts(self, diffopts, *patches):
614 """Return a copy of input diff options with git set to true if
615 """Return a copy of input diff options with git set to true if
615 referenced patch is a git patch and should be preserved as such.
616 referenced patch is a git patch and should be preserved as such.
616 """
617 """
617 diffopts = diffopts.copy()
618 diffopts = diffopts.copy()
618 if not diffopts.git and self.gitmode == b'keep':
619 if not diffopts.git and self.gitmode == b'keep':
619 for patchfn in patches:
620 for patchfn in patches:
620 patchf = self.opener(patchfn, b'r')
621 patchf = self.opener(patchfn, b'r')
621 # if the patch was a git patch, refresh it as a git patch
622 # if the patch was a git patch, refresh it as a git patch
622 diffopts.git = any(
623 diffopts.git = any(
623 line.startswith(b'diff --git') for line in patchf
624 line.startswith(b'diff --git') for line in patchf
624 )
625 )
625 patchf.close()
626 patchf.close()
626 return diffopts
627 return diffopts
627
628
628 def join(self, *p):
629 def join(self, *p):
629 return os.path.join(self.path, *p)
630 return os.path.join(self.path, *p)
630
631
631 def findseries(self, patch):
632 def findseries(self, patch):
632 def matchpatch(l):
633 def matchpatch(l):
633 l = l.split(b'#', 1)[0]
634 l = l.split(b'#', 1)[0]
634 return l.strip() == patch
635 return l.strip() == patch
635
636
636 for index, l in enumerate(self.fullseries):
637 for index, l in enumerate(self.fullseries):
637 if matchpatch(l):
638 if matchpatch(l):
638 return index
639 return index
639 return None
640 return None
640
641
641 guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
642 guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
642
643
643 def parseseries(self):
644 def parseseries(self):
644 self.series = []
645 self.series = []
645 self.seriesguards = []
646 self.seriesguards = []
646 for l in self.fullseries:
647 for l in self.fullseries:
647 h = l.find(b'#')
648 h = l.find(b'#')
648 if h == -1:
649 if h == -1:
649 patch = l
650 patch = l
650 comment = b''
651 comment = b''
651 elif h == 0:
652 elif h == 0:
652 continue
653 continue
653 else:
654 else:
654 patch = l[:h]
655 patch = l[:h]
655 comment = l[h:]
656 comment = l[h:]
656 patch = patch.strip()
657 patch = patch.strip()
657 if patch:
658 if patch:
658 if patch in self.series:
659 if patch in self.series:
659 raise error.Abort(
660 raise error.Abort(
660 _(b'%s appears more than once in %s')
661 _(b'%s appears more than once in %s')
661 % (patch, self.join(self.seriespath))
662 % (patch, self.join(self.seriespath))
662 )
663 )
663 self.series.append(patch)
664 self.series.append(patch)
664 self.seriesguards.append(self.guard_re.findall(comment))
665 self.seriesguards.append(self.guard_re.findall(comment))
665
666
666 def checkguard(self, guard):
667 def checkguard(self, guard):
667 if not guard:
668 if not guard:
668 return _(b'guard cannot be an empty string')
669 return _(b'guard cannot be an empty string')
669 bad_chars = b'# \t\r\n\f'
670 bad_chars = b'# \t\r\n\f'
670 first = guard[0]
671 first = guard[0]
671 if first in b'-+':
672 if first in b'-+':
672 return _(b'guard %r starts with invalid character: %r') % (
673 return _(b'guard %r starts with invalid character: %r') % (
673 guard,
674 guard,
674 first,
675 first,
675 )
676 )
676 for c in bad_chars:
677 for c in bad_chars:
677 if c in guard:
678 if c in guard:
678 return _(b'invalid character in guard %r: %r') % (guard, c)
679 return _(b'invalid character in guard %r: %r') % (guard, c)
679
680
680 def setactive(self, guards):
681 def setactive(self, guards):
681 for guard in guards:
682 for guard in guards:
682 bad = self.checkguard(guard)
683 bad = self.checkguard(guard)
683 if bad:
684 if bad:
684 raise error.Abort(bad)
685 raise error.Abort(bad)
685 guards = sorted(set(guards))
686 guards = sorted(set(guards))
686 self.ui.debug(b'active guards: %s\n' % b' '.join(guards))
687 self.ui.debug(b'active guards: %s\n' % b' '.join(guards))
687 self.activeguards = guards
688 self.activeguards = guards
688 self.guardsdirty = True
689 self.guardsdirty = True
689
690
690 def active(self):
691 def active(self):
691 if self.activeguards is None:
692 if self.activeguards is None:
692 self.activeguards = []
693 self.activeguards = []
693 try:
694 try:
694 guards = self.opener.read(self.guardspath).split()
695 guards = self.opener.read(self.guardspath).split()
695 except IOError as err:
696 except IOError as err:
696 if err.errno != errno.ENOENT:
697 if err.errno != errno.ENOENT:
697 raise
698 raise
698 guards = []
699 guards = []
699 for i, guard in enumerate(guards):
700 for i, guard in enumerate(guards):
700 bad = self.checkguard(guard)
701 bad = self.checkguard(guard)
701 if bad:
702 if bad:
702 self.ui.warn(
703 self.ui.warn(
703 b'%s:%d: %s\n'
704 b'%s:%d: %s\n'
704 % (self.join(self.guardspath), i + 1, bad)
705 % (self.join(self.guardspath), i + 1, bad)
705 )
706 )
706 else:
707 else:
707 self.activeguards.append(guard)
708 self.activeguards.append(guard)
708 return self.activeguards
709 return self.activeguards
709
710
710 def setguards(self, idx, guards):
711 def setguards(self, idx, guards):
711 for g in guards:
712 for g in guards:
712 if len(g) < 2:
713 if len(g) < 2:
713 raise error.Abort(_(b'guard %r too short') % g)
714 raise error.Abort(_(b'guard %r too short') % g)
714 if g[0] not in b'-+':
715 if g[0] not in b'-+':
715 raise error.Abort(_(b'guard %r starts with invalid char') % g)
716 raise error.Abort(_(b'guard %r starts with invalid char') % g)
716 bad = self.checkguard(g[1:])
717 bad = self.checkguard(g[1:])
717 if bad:
718 if bad:
718 raise error.Abort(bad)
719 raise error.Abort(bad)
719 drop = self.guard_re.sub(b'', self.fullseries[idx])
720 drop = self.guard_re.sub(b'', self.fullseries[idx])
720 self.fullseries[idx] = drop + b''.join([b' #' + g for g in guards])
721 self.fullseries[idx] = drop + b''.join([b' #' + g for g in guards])
721 self.parseseries()
722 self.parseseries()
722 self.seriesdirty = True
723 self.seriesdirty = True
723
724
724 def pushable(self, idx):
725 def pushable(self, idx):
725 if isinstance(idx, bytes):
726 if isinstance(idx, bytes):
726 idx = self.series.index(idx)
727 idx = self.series.index(idx)
727 patchguards = self.seriesguards[idx]
728 patchguards = self.seriesguards[idx]
728 if not patchguards:
729 if not patchguards:
729 return True, None
730 return True, None
730 guards = self.active()
731 guards = self.active()
731 exactneg = [
732 exactneg = [
732 g for g in patchguards if g.startswith(b'-') and g[1:] in guards
733 g for g in patchguards if g.startswith(b'-') and g[1:] in guards
733 ]
734 ]
734 if exactneg:
735 if exactneg:
735 return False, stringutil.pprint(exactneg[0])
736 return False, stringutil.pprint(exactneg[0])
736 pos = [g for g in patchguards if g.startswith(b'+')]
737 pos = [g for g in patchguards if g.startswith(b'+')]
737 exactpos = [g for g in pos if g[1:] in guards]
738 exactpos = [g for g in pos if g[1:] in guards]
738 if pos:
739 if pos:
739 if exactpos:
740 if exactpos:
740 return True, stringutil.pprint(exactpos[0])
741 return True, stringutil.pprint(exactpos[0])
741 return False, b' '.join([stringutil.pprint(p) for p in pos])
742 return False, b' '.join([stringutil.pprint(p) for p in pos])
742 return True, b''
743 return True, b''
743
744
744 def explainpushable(self, idx, all_patches=False):
745 def explainpushable(self, idx, all_patches=False):
745 if all_patches:
746 if all_patches:
746 write = self.ui.write
747 write = self.ui.write
747 else:
748 else:
748 write = self.ui.warn
749 write = self.ui.warn
749
750
750 if all_patches or self.ui.verbose:
751 if all_patches or self.ui.verbose:
751 if isinstance(idx, bytes):
752 if isinstance(idx, bytes):
752 idx = self.series.index(idx)
753 idx = self.series.index(idx)
753 pushable, why = self.pushable(idx)
754 pushable, why = self.pushable(idx)
754 if all_patches and pushable:
755 if all_patches and pushable:
755 if why is None:
756 if why is None:
756 write(
757 write(
757 _(b'allowing %s - no guards in effect\n')
758 _(b'allowing %s - no guards in effect\n')
758 % self.series[idx]
759 % self.series[idx]
759 )
760 )
760 else:
761 else:
761 if not why:
762 if not why:
762 write(
763 write(
763 _(b'allowing %s - no matching negative guards\n')
764 _(b'allowing %s - no matching negative guards\n')
764 % self.series[idx]
765 % self.series[idx]
765 )
766 )
766 else:
767 else:
767 write(
768 write(
768 _(b'allowing %s - guarded by %s\n')
769 _(b'allowing %s - guarded by %s\n')
769 % (self.series[idx], why)
770 % (self.series[idx], why)
770 )
771 )
771 if not pushable:
772 if not pushable:
772 if why:
773 if why:
773 write(
774 write(
774 _(b'skipping %s - guarded by %s\n')
775 _(b'skipping %s - guarded by %s\n')
775 % (self.series[idx], why)
776 % (self.series[idx], why)
776 )
777 )
777 else:
778 else:
778 write(
779 write(
779 _(b'skipping %s - no matching guards\n')
780 _(b'skipping %s - no matching guards\n')
780 % self.series[idx]
781 % self.series[idx]
781 )
782 )
782
783
783 def savedirty(self):
784 def savedirty(self):
784 def writelist(items, path):
785 def writelist(items, path):
785 fp = self.opener(path, b'wb')
786 fp = self.opener(path, b'wb')
786 for i in items:
787 for i in items:
787 fp.write(b"%s\n" % i)
788 fp.write(b"%s\n" % i)
788 fp.close()
789 fp.close()
789
790
790 if self.applieddirty:
791 if self.applieddirty:
791 writelist(map(bytes, self.applied), self.statuspath)
792 writelist(map(bytes, self.applied), self.statuspath)
792 self.applieddirty = False
793 self.applieddirty = False
793 if self.seriesdirty:
794 if self.seriesdirty:
794 writelist(self.fullseries, self.seriespath)
795 writelist(self.fullseries, self.seriespath)
795 self.seriesdirty = False
796 self.seriesdirty = False
796 if self.guardsdirty:
797 if self.guardsdirty:
797 writelist(self.activeguards, self.guardspath)
798 writelist(self.activeguards, self.guardspath)
798 self.guardsdirty = False
799 self.guardsdirty = False
799 if self.added:
800 if self.added:
800 qrepo = self.qrepo()
801 qrepo = self.qrepo()
801 if qrepo:
802 if qrepo:
802 qrepo[None].add(f for f in self.added if f not in qrepo[None])
803 qrepo[None].add(f for f in self.added if f not in qrepo[None])
803 self.added = []
804 self.added = []
804
805
805 def removeundo(self, repo):
806 def removeundo(self, repo):
806 undo = repo.sjoin(b'undo')
807 undo = repo.sjoin(b'undo')
807 if not os.path.exists(undo):
808 if not os.path.exists(undo):
808 return
809 return
809 try:
810 try:
810 os.unlink(undo)
811 os.unlink(undo)
811 except OSError as inst:
812 except OSError as inst:
812 self.ui.warn(
813 self.ui.warn(
813 _(b'error removing undo: %s\n') % stringutil.forcebytestr(inst)
814 _(b'error removing undo: %s\n') % stringutil.forcebytestr(inst)
814 )
815 )
815
816
816 def backup(self, repo, files, copy=False):
817 def backup(self, repo, files, copy=False):
817 # backup local changes in --force case
818 # backup local changes in --force case
818 for f in sorted(files):
819 for f in sorted(files):
819 absf = repo.wjoin(f)
820 absf = repo.wjoin(f)
820 if os.path.lexists(absf):
821 if os.path.lexists(absf):
821 absorig = scmutil.backuppath(self.ui, repo, f)
822 absorig = scmutil.backuppath(self.ui, repo, f)
822 self.ui.note(
823 self.ui.note(
823 _(b'saving current version of %s as %s\n')
824 _(b'saving current version of %s as %s\n')
824 % (f, os.path.relpath(absorig))
825 % (f, os.path.relpath(absorig))
825 )
826 )
826
827
827 if copy:
828 if copy:
828 util.copyfile(absf, absorig)
829 util.copyfile(absf, absorig)
829 else:
830 else:
830 util.rename(absf, absorig)
831 util.rename(absf, absorig)
831
832
832 def printdiff(
833 def printdiff(
833 self,
834 self,
834 repo,
835 repo,
835 diffopts,
836 diffopts,
836 node1,
837 node1,
837 node2=None,
838 node2=None,
838 files=None,
839 files=None,
839 fp=None,
840 fp=None,
840 changes=None,
841 changes=None,
841 opts=None,
842 opts=None,
842 ):
843 ):
843 if opts is None:
844 if opts is None:
844 opts = {}
845 opts = {}
845 stat = opts.get(b'stat')
846 stat = opts.get(b'stat')
846 m = scmutil.match(repo[node1], files, opts)
847 m = scmutil.match(repo[node1], files, opts)
847 logcmdutil.diffordiffstat(
848 logcmdutil.diffordiffstat(
848 self.ui,
849 self.ui,
849 repo,
850 repo,
850 diffopts,
851 diffopts,
851 repo[node1],
852 repo[node1],
852 repo[node2],
853 repo[node2],
853 m,
854 m,
854 changes,
855 changes,
855 stat,
856 stat,
856 fp,
857 fp,
857 )
858 )
858
859
859 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
860 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
860 # first try just applying the patch
861 # first try just applying the patch
861 (err, n) = self.apply(
862 (err, n) = self.apply(
862 repo, [patch], update_status=False, strict=True, merge=rev
863 repo, [patch], update_status=False, strict=True, merge=rev
863 )
864 )
864
865
865 if err == 0:
866 if err == 0:
866 return (err, n)
867 return (err, n)
867
868
868 if n is None:
869 if n is None:
869 raise error.Abort(_(b"apply failed for patch %s") % patch)
870 raise error.Abort(_(b"apply failed for patch %s") % patch)
870
871
871 self.ui.warn(_(b"patch didn't work out, merging %s\n") % patch)
872 self.ui.warn(_(b"patch didn't work out, merging %s\n") % patch)
872
873
873 # apply failed, strip away that rev and merge.
874 # apply failed, strip away that rev and merge.
874 hg.clean(repo, head)
875 hg.clean(repo, head)
875 strip(self.ui, repo, [n], update=False, backup=False)
876 strip(self.ui, repo, [n], update=False, backup=False)
876
877
877 ctx = repo[rev]
878 ctx = repo[rev]
878 ret = hg.merge(ctx, remind=False)
879 ret = hg.merge(ctx, remind=False)
879 if ret:
880 if ret:
880 raise error.Abort(_(b"update returned %d") % ret)
881 raise error.Abort(_(b"update returned %d") % ret)
881 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
882 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
882 if n is None:
883 if n is None:
883 raise error.Abort(_(b"repo commit failed"))
884 raise error.Abort(_(b"repo commit failed"))
884 try:
885 try:
885 ph = patchheader(mergeq.join(patch), self.plainmode)
886 ph = patchheader(mergeq.join(patch), self.plainmode)
886 except Exception:
887 except Exception:
887 raise error.Abort(_(b"unable to read %s") % patch)
888 raise error.Abort(_(b"unable to read %s") % patch)
888
889
889 diffopts = self.patchopts(diffopts, patch)
890 diffopts = self.patchopts(diffopts, patch)
890 patchf = self.opener(patch, b"w")
891 patchf = self.opener(patch, b"w")
891 comments = bytes(ph)
892 comments = bytes(ph)
892 if comments:
893 if comments:
893 patchf.write(comments)
894 patchf.write(comments)
894 self.printdiff(repo, diffopts, head, n, fp=patchf)
895 self.printdiff(repo, diffopts, head, n, fp=patchf)
895 patchf.close()
896 patchf.close()
896 self.removeundo(repo)
897 self.removeundo(repo)
897 return (0, n)
898 return (0, n)
898
899
899 def qparents(self, repo, rev=None):
900 def qparents(self, repo, rev=None):
900 """return the mq handled parent or p1
901 """return the mq handled parent or p1
901
902
902 In some case where mq get himself in being the parent of a merge the
903 In some case where mq get himself in being the parent of a merge the
903 appropriate parent may be p2.
904 appropriate parent may be p2.
904 (eg: an in progress merge started with mq disabled)
905 (eg: an in progress merge started with mq disabled)
905
906
906 If no parent are managed by mq, p1 is returned.
907 If no parent are managed by mq, p1 is returned.
907 """
908 """
908 if rev is None:
909 if rev is None:
909 (p1, p2) = repo.dirstate.parents()
910 (p1, p2) = repo.dirstate.parents()
910 if p2 == nullid:
911 if p2 == nullid:
911 return p1
912 return p1
912 if not self.applied:
913 if not self.applied:
913 return None
914 return None
914 return self.applied[-1].node
915 return self.applied[-1].node
915 p1, p2 = repo.changelog.parents(rev)
916 p1, p2 = repo.changelog.parents(rev)
916 if p2 != nullid and p2 in [x.node for x in self.applied]:
917 if p2 != nullid and p2 in [x.node for x in self.applied]:
917 return p2
918 return p2
918 return p1
919 return p1
919
920
920 def mergepatch(self, repo, mergeq, series, diffopts):
921 def mergepatch(self, repo, mergeq, series, diffopts):
921 if not self.applied:
922 if not self.applied:
922 # each of the patches merged in will have two parents. This
923 # each of the patches merged in will have two parents. This
923 # can confuse the qrefresh, qdiff, and strip code because it
924 # can confuse the qrefresh, qdiff, and strip code because it
924 # needs to know which parent is actually in the patch queue.
925 # needs to know which parent is actually in the patch queue.
925 # so, we insert a merge marker with only one parent. This way
926 # so, we insert a merge marker with only one parent. This way
926 # the first patch in the queue is never a merge patch
927 # the first patch in the queue is never a merge patch
927 #
928 #
928 pname = b".hg.patches.merge.marker"
929 pname = b".hg.patches.merge.marker"
929 n = newcommit(repo, None, b'[mq]: merge marker', force=True)
930 n = newcommit(repo, None, b'[mq]: merge marker', force=True)
930 self.removeundo(repo)
931 self.removeundo(repo)
931 self.applied.append(statusentry(n, pname))
932 self.applied.append(statusentry(n, pname))
932 self.applieddirty = True
933 self.applieddirty = True
933
934
934 head = self.qparents(repo)
935 head = self.qparents(repo)
935
936
936 for patch in series:
937 for patch in series:
937 patch = mergeq.lookup(patch, strict=True)
938 patch = mergeq.lookup(patch, strict=True)
938 if not patch:
939 if not patch:
939 self.ui.warn(_(b"patch %s does not exist\n") % patch)
940 self.ui.warn(_(b"patch %s does not exist\n") % patch)
940 return (1, None)
941 return (1, None)
941 pushable, reason = self.pushable(patch)
942 pushable, reason = self.pushable(patch)
942 if not pushable:
943 if not pushable:
943 self.explainpushable(patch, all_patches=True)
944 self.explainpushable(patch, all_patches=True)
944 continue
945 continue
945 info = mergeq.isapplied(patch)
946 info = mergeq.isapplied(patch)
946 if not info:
947 if not info:
947 self.ui.warn(_(b"patch %s is not applied\n") % patch)
948 self.ui.warn(_(b"patch %s is not applied\n") % patch)
948 return (1, None)
949 return (1, None)
949 rev = info[1]
950 rev = info[1]
950 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
951 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
951 if head:
952 if head:
952 self.applied.append(statusentry(head, patch))
953 self.applied.append(statusentry(head, patch))
953 self.applieddirty = True
954 self.applieddirty = True
954 if err:
955 if err:
955 return (err, head)
956 return (err, head)
956 self.savedirty()
957 self.savedirty()
957 return (0, head)
958 return (0, head)
958
959
959 def patch(self, repo, patchfile):
960 def patch(self, repo, patchfile):
960 """Apply patchfile to the working directory.
961 """Apply patchfile to the working directory.
961 patchfile: name of patch file"""
962 patchfile: name of patch file"""
962 files = set()
963 files = set()
963 try:
964 try:
964 fuzz = patchmod.patch(
965 fuzz = patchmod.patch(
965 self.ui, repo, patchfile, strip=1, files=files, eolmode=None
966 self.ui, repo, patchfile, strip=1, files=files, eolmode=None
966 )
967 )
967 return (True, list(files), fuzz)
968 return (True, list(files), fuzz)
968 except Exception as inst:
969 except Exception as inst:
969 self.ui.note(stringutil.forcebytestr(inst) + b'\n')
970 self.ui.note(stringutil.forcebytestr(inst) + b'\n')
970 if not self.ui.verbose:
971 if not self.ui.verbose:
971 self.ui.warn(_(b"patch failed, unable to continue (try -v)\n"))
972 self.ui.warn(_(b"patch failed, unable to continue (try -v)\n"))
972 self.ui.traceback()
973 self.ui.traceback()
973 return (False, list(files), False)
974 return (False, list(files), False)
974
975
975 def apply(
976 def apply(
976 self,
977 self,
977 repo,
978 repo,
978 series,
979 series,
979 list=False,
980 list=False,
980 update_status=True,
981 update_status=True,
981 strict=False,
982 strict=False,
982 patchdir=None,
983 patchdir=None,
983 merge=None,
984 merge=None,
984 all_files=None,
985 all_files=None,
985 tobackup=None,
986 tobackup=None,
986 keepchanges=False,
987 keepchanges=False,
987 ):
988 ):
988 wlock = lock = tr = None
989 wlock = lock = tr = None
989 try:
990 try:
990 wlock = repo.wlock()
991 wlock = repo.wlock()
991 lock = repo.lock()
992 lock = repo.lock()
992 tr = repo.transaction(b"qpush")
993 tr = repo.transaction(b"qpush")
993 try:
994 try:
994 ret = self._apply(
995 ret = self._apply(
995 repo,
996 repo,
996 series,
997 series,
997 list,
998 list,
998 update_status,
999 update_status,
999 strict,
1000 strict,
1000 patchdir,
1001 patchdir,
1001 merge,
1002 merge,
1002 all_files=all_files,
1003 all_files=all_files,
1003 tobackup=tobackup,
1004 tobackup=tobackup,
1004 keepchanges=keepchanges,
1005 keepchanges=keepchanges,
1005 )
1006 )
1006 tr.close()
1007 tr.close()
1007 self.savedirty()
1008 self.savedirty()
1008 return ret
1009 return ret
1009 except AbortNoCleanup:
1010 except AbortNoCleanup:
1010 tr.close()
1011 tr.close()
1011 self.savedirty()
1012 self.savedirty()
1012 raise
1013 raise
1013 except: # re-raises
1014 except: # re-raises
1014 try:
1015 try:
1015 tr.abort()
1016 tr.abort()
1016 finally:
1017 finally:
1017 self.invalidate()
1018 self.invalidate()
1018 raise
1019 raise
1019 finally:
1020 finally:
1020 release(tr, lock, wlock)
1021 release(tr, lock, wlock)
1021 self.removeundo(repo)
1022 self.removeundo(repo)
1022
1023
1023 def _apply(
1024 def _apply(
1024 self,
1025 self,
1025 repo,
1026 repo,
1026 series,
1027 series,
1027 list=False,
1028 list=False,
1028 update_status=True,
1029 update_status=True,
1029 strict=False,
1030 strict=False,
1030 patchdir=None,
1031 patchdir=None,
1031 merge=None,
1032 merge=None,
1032 all_files=None,
1033 all_files=None,
1033 tobackup=None,
1034 tobackup=None,
1034 keepchanges=False,
1035 keepchanges=False,
1035 ):
1036 ):
1036 """returns (error, hash)
1037 """returns (error, hash)
1037
1038
1038 error = 1 for unable to read, 2 for patch failed, 3 for patch
1039 error = 1 for unable to read, 2 for patch failed, 3 for patch
1039 fuzz. tobackup is None or a set of files to backup before they
1040 fuzz. tobackup is None or a set of files to backup before they
1040 are modified by a patch.
1041 are modified by a patch.
1041 """
1042 """
1042 # TODO unify with commands.py
1043 # TODO unify with commands.py
1043 if not patchdir:
1044 if not patchdir:
1044 patchdir = self.path
1045 patchdir = self.path
1045 err = 0
1046 err = 0
1046 n = None
1047 n = None
1047 for patchname in series:
1048 for patchname in series:
1048 pushable, reason = self.pushable(patchname)
1049 pushable, reason = self.pushable(patchname)
1049 if not pushable:
1050 if not pushable:
1050 self.explainpushable(patchname, all_patches=True)
1051 self.explainpushable(patchname, all_patches=True)
1051 continue
1052 continue
1052 self.ui.status(_(b"applying %s\n") % patchname)
1053 self.ui.status(_(b"applying %s\n") % patchname)
1053 pf = os.path.join(patchdir, patchname)
1054 pf = os.path.join(patchdir, patchname)
1054
1055
1055 try:
1056 try:
1056 ph = patchheader(self.join(patchname), self.plainmode)
1057 ph = patchheader(self.join(patchname), self.plainmode)
1057 except IOError:
1058 except IOError:
1058 self.ui.warn(_(b"unable to read %s\n") % patchname)
1059 self.ui.warn(_(b"unable to read %s\n") % patchname)
1059 err = 1
1060 err = 1
1060 break
1061 break
1061
1062
1062 message = ph.message
1063 message = ph.message
1063 if not message:
1064 if not message:
1064 # The commit message should not be translated
1065 # The commit message should not be translated
1065 message = b"imported patch %s\n" % patchname
1066 message = b"imported patch %s\n" % patchname
1066 else:
1067 else:
1067 if list:
1068 if list:
1068 # The commit message should not be translated
1069 # The commit message should not be translated
1069 message.append(b"\nimported patch %s" % patchname)
1070 message.append(b"\nimported patch %s" % patchname)
1070 message = b'\n'.join(message)
1071 message = b'\n'.join(message)
1071
1072
1072 if ph.haspatch:
1073 if ph.haspatch:
1073 if tobackup:
1074 if tobackup:
1074 touched = patchmod.changedfiles(self.ui, repo, pf)
1075 touched = patchmod.changedfiles(self.ui, repo, pf)
1075 touched = set(touched) & tobackup
1076 touched = set(touched) & tobackup
1076 if touched and keepchanges:
1077 if touched and keepchanges:
1077 raise AbortNoCleanup(
1078 raise AbortNoCleanup(
1078 _(b"conflicting local changes found"),
1079 _(b"conflicting local changes found"),
1079 hint=_(b"did you forget to qrefresh?"),
1080 hint=_(b"did you forget to qrefresh?"),
1080 )
1081 )
1081 self.backup(repo, touched, copy=True)
1082 self.backup(repo, touched, copy=True)
1082 tobackup = tobackup - touched
1083 tobackup = tobackup - touched
1083 (patcherr, files, fuzz) = self.patch(repo, pf)
1084 (patcherr, files, fuzz) = self.patch(repo, pf)
1084 if all_files is not None:
1085 if all_files is not None:
1085 all_files.update(files)
1086 all_files.update(files)
1086 patcherr = not patcherr
1087 patcherr = not patcherr
1087 else:
1088 else:
1088 self.ui.warn(_(b"patch %s is empty\n") % patchname)
1089 self.ui.warn(_(b"patch %s is empty\n") % patchname)
1089 patcherr, files, fuzz = 0, [], 0
1090 patcherr, files, fuzz = 0, [], 0
1090
1091
1091 if merge and files:
1092 if merge and files:
1092 # Mark as removed/merged and update dirstate parent info
1093 # Mark as removed/merged and update dirstate parent info
1093 removed = []
1094 removed = []
1094 merged = []
1095 merged = []
1095 for f in files:
1096 for f in files:
1096 if os.path.lexists(repo.wjoin(f)):
1097 if os.path.lexists(repo.wjoin(f)):
1097 merged.append(f)
1098 merged.append(f)
1098 else:
1099 else:
1099 removed.append(f)
1100 removed.append(f)
1100 with repo.dirstate.parentchange():
1101 with repo.dirstate.parentchange():
1101 for f in removed:
1102 for f in removed:
1102 repo.dirstate.remove(f)
1103 repo.dirstate.remove(f)
1103 for f in merged:
1104 for f in merged:
1104 repo.dirstate.merge(f)
1105 repo.dirstate.merge(f)
1105 p1 = repo.dirstate.p1()
1106 p1 = repo.dirstate.p1()
1106 repo.setparents(p1, merge)
1107 repo.setparents(p1, merge)
1107
1108
1108 if all_files and b'.hgsubstate' in all_files:
1109 if all_files and b'.hgsubstate' in all_files:
1109 wctx = repo[None]
1110 wctx = repo[None]
1110 pctx = repo[b'.']
1111 pctx = repo[b'.']
1111 overwrite = False
1112 overwrite = False
1112 mergedsubstate = subrepoutil.submerge(
1113 mergedsubstate = subrepoutil.submerge(
1113 repo, pctx, wctx, wctx, overwrite
1114 repo, pctx, wctx, wctx, overwrite
1114 )
1115 )
1115 files += mergedsubstate.keys()
1116 files += mergedsubstate.keys()
1116
1117
1117 match = scmutil.matchfiles(repo, files or [])
1118 match = scmutil.matchfiles(repo, files or [])
1118 oldtip = repo.changelog.tip()
1119 oldtip = repo.changelog.tip()
1119 n = newcommit(
1120 n = newcommit(
1120 repo, None, message, ph.user, ph.date, match=match, force=True
1121 repo, None, message, ph.user, ph.date, match=match, force=True
1121 )
1122 )
1122 if repo.changelog.tip() == oldtip:
1123 if repo.changelog.tip() == oldtip:
1123 raise error.Abort(
1124 raise error.Abort(
1124 _(b"qpush exactly duplicates child changeset")
1125 _(b"qpush exactly duplicates child changeset")
1125 )
1126 )
1126 if n is None:
1127 if n is None:
1127 raise error.Abort(_(b"repository commit failed"))
1128 raise error.Abort(_(b"repository commit failed"))
1128
1129
1129 if update_status:
1130 if update_status:
1130 self.applied.append(statusentry(n, patchname))
1131 self.applied.append(statusentry(n, patchname))
1131
1132
1132 if patcherr:
1133 if patcherr:
1133 self.ui.warn(
1134 self.ui.warn(
1134 _(b"patch failed, rejects left in working directory\n")
1135 _(b"patch failed, rejects left in working directory\n")
1135 )
1136 )
1136 err = 2
1137 err = 2
1137 break
1138 break
1138
1139
1139 if fuzz and strict:
1140 if fuzz and strict:
1140 self.ui.warn(_(b"fuzz found when applying patch, stopping\n"))
1141 self.ui.warn(_(b"fuzz found when applying patch, stopping\n"))
1141 err = 3
1142 err = 3
1142 break
1143 break
1143 return (err, n)
1144 return (err, n)
1144
1145
1145 def _cleanup(self, patches, numrevs, keep=False):
1146 def _cleanup(self, patches, numrevs, keep=False):
1146 if not keep:
1147 if not keep:
1147 r = self.qrepo()
1148 r = self.qrepo()
1148 if r:
1149 if r:
1149 r[None].forget(patches)
1150 r[None].forget(patches)
1150 for p in patches:
1151 for p in patches:
1151 try:
1152 try:
1152 os.unlink(self.join(p))
1153 os.unlink(self.join(p))
1153 except OSError as inst:
1154 except OSError as inst:
1154 if inst.errno != errno.ENOENT:
1155 if inst.errno != errno.ENOENT:
1155 raise
1156 raise
1156
1157
1157 qfinished = []
1158 qfinished = []
1158 if numrevs:
1159 if numrevs:
1159 qfinished = self.applied[:numrevs]
1160 qfinished = self.applied[:numrevs]
1160 del self.applied[:numrevs]
1161 del self.applied[:numrevs]
1161 self.applieddirty = True
1162 self.applieddirty = True
1162
1163
1163 unknown = []
1164 unknown = []
1164
1165
1165 sortedseries = []
1166 sortedseries = []
1166 for p in patches:
1167 for p in patches:
1167 idx = self.findseries(p)
1168 idx = self.findseries(p)
1168 if idx is None:
1169 if idx is None:
1169 sortedseries.append((-1, p))
1170 sortedseries.append((-1, p))
1170 else:
1171 else:
1171 sortedseries.append((idx, p))
1172 sortedseries.append((idx, p))
1172
1173
1173 sortedseries.sort(reverse=True)
1174 sortedseries.sort(reverse=True)
1174 for (i, p) in sortedseries:
1175 for (i, p) in sortedseries:
1175 if i != -1:
1176 if i != -1:
1176 del self.fullseries[i]
1177 del self.fullseries[i]
1177 else:
1178 else:
1178 unknown.append(p)
1179 unknown.append(p)
1179
1180
1180 if unknown:
1181 if unknown:
1181 if numrevs:
1182 if numrevs:
1182 rev = {entry.name: entry.node for entry in qfinished}
1183 rev = {entry.name: entry.node for entry in qfinished}
1183 for p in unknown:
1184 for p in unknown:
1184 msg = _(b'revision %s refers to unknown patches: %s\n')
1185 msg = _(b'revision %s refers to unknown patches: %s\n')
1185 self.ui.warn(msg % (short(rev[p]), p))
1186 self.ui.warn(msg % (short(rev[p]), p))
1186 else:
1187 else:
1187 msg = _(b'unknown patches: %s\n')
1188 msg = _(b'unknown patches: %s\n')
1188 raise error.Abort(b''.join(msg % p for p in unknown))
1189 raise error.Abort(b''.join(msg % p for p in unknown))
1189
1190
1190 self.parseseries()
1191 self.parseseries()
1191 self.seriesdirty = True
1192 self.seriesdirty = True
1192 return [entry.node for entry in qfinished]
1193 return [entry.node for entry in qfinished]
1193
1194
1194 def _revpatches(self, repo, revs):
1195 def _revpatches(self, repo, revs):
1195 firstrev = repo[self.applied[0].node].rev()
1196 firstrev = repo[self.applied[0].node].rev()
1196 patches = []
1197 patches = []
1197 for i, rev in enumerate(revs):
1198 for i, rev in enumerate(revs):
1198
1199
1199 if rev < firstrev:
1200 if rev < firstrev:
1200 raise error.Abort(_(b'revision %d is not managed') % rev)
1201 raise error.Abort(_(b'revision %d is not managed') % rev)
1201
1202
1202 ctx = repo[rev]
1203 ctx = repo[rev]
1203 base = self.applied[i].node
1204 base = self.applied[i].node
1204 if ctx.node() != base:
1205 if ctx.node() != base:
1205 msg = _(b'cannot delete revision %d above applied patches')
1206 msg = _(b'cannot delete revision %d above applied patches')
1206 raise error.Abort(msg % rev)
1207 raise error.Abort(msg % rev)
1207
1208
1208 patch = self.applied[i].name
1209 patch = self.applied[i].name
1209 for fmt in (b'[mq]: %s', b'imported patch %s'):
1210 for fmt in (b'[mq]: %s', b'imported patch %s'):
1210 if ctx.description() == fmt % patch:
1211 if ctx.description() == fmt % patch:
1211 msg = _(b'patch %s finalized without changeset message\n')
1212 msg = _(b'patch %s finalized without changeset message\n')
1212 repo.ui.status(msg % patch)
1213 repo.ui.status(msg % patch)
1213 break
1214 break
1214
1215
1215 patches.append(patch)
1216 patches.append(patch)
1216 return patches
1217 return patches
1217
1218
1218 def finish(self, repo, revs):
1219 def finish(self, repo, revs):
1219 # Manually trigger phase computation to ensure phasedefaults is
1220 # Manually trigger phase computation to ensure phasedefaults is
1220 # executed before we remove the patches.
1221 # executed before we remove the patches.
1221 repo._phasecache
1222 repo._phasecache
1222 patches = self._revpatches(repo, sorted(revs))
1223 patches = self._revpatches(repo, sorted(revs))
1223 qfinished = self._cleanup(patches, len(patches))
1224 qfinished = self._cleanup(patches, len(patches))
1224 if qfinished and repo.ui.configbool(b'mq', b'secret'):
1225 if qfinished and repo.ui.configbool(b'mq', b'secret'):
1225 # only use this logic when the secret option is added
1226 # only use this logic when the secret option is added
1226 oldqbase = repo[qfinished[0]]
1227 oldqbase = repo[qfinished[0]]
1227 tphase = phases.newcommitphase(repo.ui)
1228 tphase = phases.newcommitphase(repo.ui)
1228 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1229 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1229 with repo.transaction(b'qfinish') as tr:
1230 with repo.transaction(b'qfinish') as tr:
1230 phases.advanceboundary(repo, tr, tphase, qfinished)
1231 phases.advanceboundary(repo, tr, tphase, qfinished)
1231
1232
1232 def delete(self, repo, patches, opts):
1233 def delete(self, repo, patches, opts):
1233 if not patches and not opts.get(b'rev'):
1234 if not patches and not opts.get(b'rev'):
1234 raise error.Abort(
1235 raise error.Abort(
1235 _(b'qdelete requires at least one revision or patch name')
1236 _(b'qdelete requires at least one revision or patch name')
1236 )
1237 )
1237
1238
1238 realpatches = []
1239 realpatches = []
1239 for patch in patches:
1240 for patch in patches:
1240 patch = self.lookup(patch, strict=True)
1241 patch = self.lookup(patch, strict=True)
1241 info = self.isapplied(patch)
1242 info = self.isapplied(patch)
1242 if info:
1243 if info:
1243 raise error.Abort(_(b"cannot delete applied patch %s") % patch)
1244 raise error.Abort(_(b"cannot delete applied patch %s") % patch)
1244 if patch not in self.series:
1245 if patch not in self.series:
1245 raise error.Abort(_(b"patch %s not in series file") % patch)
1246 raise error.Abort(_(b"patch %s not in series file") % patch)
1246 if patch not in realpatches:
1247 if patch not in realpatches:
1247 realpatches.append(patch)
1248 realpatches.append(patch)
1248
1249
1249 numrevs = 0
1250 numrevs = 0
1250 if opts.get(b'rev'):
1251 if opts.get(b'rev'):
1251 if not self.applied:
1252 if not self.applied:
1252 raise error.Abort(_(b'no patches applied'))
1253 raise error.Abort(_(b'no patches applied'))
1253 revs = scmutil.revrange(repo, opts.get(b'rev'))
1254 revs = scmutil.revrange(repo, opts.get(b'rev'))
1254 revs.sort()
1255 revs.sort()
1255 revpatches = self._revpatches(repo, revs)
1256 revpatches = self._revpatches(repo, revs)
1256 realpatches += revpatches
1257 realpatches += revpatches
1257 numrevs = len(revpatches)
1258 numrevs = len(revpatches)
1258
1259
1259 self._cleanup(realpatches, numrevs, opts.get(b'keep'))
1260 self._cleanup(realpatches, numrevs, opts.get(b'keep'))
1260
1261
1261 def checktoppatch(self, repo):
1262 def checktoppatch(self, repo):
1262 '''check that working directory is at qtip'''
1263 '''check that working directory is at qtip'''
1263 if self.applied:
1264 if self.applied:
1264 top = self.applied[-1].node
1265 top = self.applied[-1].node
1265 patch = self.applied[-1].name
1266 patch = self.applied[-1].name
1266 if repo.dirstate.p1() != top:
1267 if repo.dirstate.p1() != top:
1267 raise error.Abort(_(b"working directory revision is not qtip"))
1268 raise error.Abort(_(b"working directory revision is not qtip"))
1268 return top, patch
1269 return top, patch
1269 return None, None
1270 return None, None
1270
1271
1271 def putsubstate2changes(self, substatestate, changes):
1272 def putsubstate2changes(self, substatestate, changes):
1272 if isinstance(changes, list):
1273 if isinstance(changes, list):
1273 mar = changes[:3]
1274 mar = changes[:3]
1274 else:
1275 else:
1275 mar = (changes.modified, changes.added, changes.removed)
1276 mar = (changes.modified, changes.added, changes.removed)
1276 if any((b'.hgsubstate' in files for files in mar)):
1277 if any((b'.hgsubstate' in files for files in mar)):
1277 return # already listed up
1278 return # already listed up
1278 # not yet listed up
1279 # not yet listed up
1279 if substatestate in b'a?':
1280 if substatestate in b'a?':
1280 mar[1].append(b'.hgsubstate')
1281 mar[1].append(b'.hgsubstate')
1281 elif substatestate in b'r':
1282 elif substatestate in b'r':
1282 mar[2].append(b'.hgsubstate')
1283 mar[2].append(b'.hgsubstate')
1283 else: # modified
1284 else: # modified
1284 mar[0].append(b'.hgsubstate')
1285 mar[0].append(b'.hgsubstate')
1285
1286
1286 def checklocalchanges(self, repo, force=False, refresh=True):
1287 def checklocalchanges(self, repo, force=False, refresh=True):
1287 excsuffix = b''
1288 excsuffix = b''
1288 if refresh:
1289 if refresh:
1289 excsuffix = b', qrefresh first'
1290 excsuffix = b', qrefresh first'
1290 # plain versions for i18n tool to detect them
1291 # plain versions for i18n tool to detect them
1291 _(b"local changes found, qrefresh first")
1292 _(b"local changes found, qrefresh first")
1292 _(b"local changed subrepos found, qrefresh first")
1293 _(b"local changed subrepos found, qrefresh first")
1293
1294
1294 s = repo.status()
1295 s = repo.status()
1295 if not force:
1296 if not force:
1296 cmdutil.checkunfinished(repo)
1297 cmdutil.checkunfinished(repo)
1297 if s.modified or s.added or s.removed or s.deleted:
1298 if s.modified or s.added or s.removed or s.deleted:
1298 _(b"local changes found") # i18n tool detection
1299 _(b"local changes found") # i18n tool detection
1299 raise error.Abort(_(b"local changes found" + excsuffix))
1300 raise error.Abort(_(b"local changes found" + excsuffix))
1300 if checksubstate(repo):
1301 if checksubstate(repo):
1301 _(b"local changed subrepos found") # i18n tool detection
1302 _(b"local changed subrepos found") # i18n tool detection
1302 raise error.Abort(
1303 raise error.Abort(
1303 _(b"local changed subrepos found" + excsuffix)
1304 _(b"local changed subrepos found" + excsuffix)
1304 )
1305 )
1305 else:
1306 else:
1306 cmdutil.checkunfinished(repo, skipmerge=True)
1307 cmdutil.checkunfinished(repo, skipmerge=True)
1307 return s
1308 return s
1308
1309
1309 _reserved = (b'series', b'status', b'guards', b'.', b'..')
1310 _reserved = (b'series', b'status', b'guards', b'.', b'..')
1310
1311
1311 def checkreservedname(self, name):
1312 def checkreservedname(self, name):
1312 if name in self._reserved:
1313 if name in self._reserved:
1313 raise error.Abort(
1314 raise error.Abort(
1314 _(b'"%s" cannot be used as the name of a patch') % name
1315 _(b'"%s" cannot be used as the name of a patch') % name
1315 )
1316 )
1316 if name != name.strip():
1317 if name != name.strip():
1317 # whitespace is stripped by parseseries()
1318 # whitespace is stripped by parseseries()
1318 raise error.Abort(
1319 raise error.Abort(
1319 _(b'patch name cannot begin or end with whitespace')
1320 _(b'patch name cannot begin or end with whitespace')
1320 )
1321 )
1321 for prefix in (b'.hg', b'.mq'):
1322 for prefix in (b'.hg', b'.mq'):
1322 if name.startswith(prefix):
1323 if name.startswith(prefix):
1323 raise error.Abort(
1324 raise error.Abort(
1324 _(b'patch name cannot begin with "%s"') % prefix
1325 _(b'patch name cannot begin with "%s"') % prefix
1325 )
1326 )
1326 for c in (b'#', b':', b'\r', b'\n'):
1327 for c in (b'#', b':', b'\r', b'\n'):
1327 if c in name:
1328 if c in name:
1328 raise error.Abort(
1329 raise error.Abort(
1329 _(b'%r cannot be used in the name of a patch')
1330 _(b'%r cannot be used in the name of a patch')
1330 % pycompat.bytestr(c)
1331 % pycompat.bytestr(c)
1331 )
1332 )
1332
1333
1333 def checkpatchname(self, name, force=False):
1334 def checkpatchname(self, name, force=False):
1334 self.checkreservedname(name)
1335 self.checkreservedname(name)
1335 if not force and os.path.exists(self.join(name)):
1336 if not force and os.path.exists(self.join(name)):
1336 if os.path.isdir(self.join(name)):
1337 if os.path.isdir(self.join(name)):
1337 raise error.Abort(
1338 raise error.Abort(
1338 _(b'"%s" already exists as a directory') % name
1339 _(b'"%s" already exists as a directory') % name
1339 )
1340 )
1340 else:
1341 else:
1341 raise error.Abort(_(b'patch "%s" already exists') % name)
1342 raise error.Abort(_(b'patch "%s" already exists') % name)
1342
1343
1343 def makepatchname(self, title, fallbackname):
1344 def makepatchname(self, title, fallbackname):
1344 """Return a suitable filename for title, adding a suffix to make
1345 """Return a suitable filename for title, adding a suffix to make
1345 it unique in the existing list"""
1346 it unique in the existing list"""
1346 namebase = re.sub(br'[\s\W_]+', b'_', title.lower()).strip(b'_')
1347 namebase = re.sub(br'[\s\W_]+', b'_', title.lower()).strip(b'_')
1347 namebase = namebase[:75] # avoid too long name (issue5117)
1348 namebase = namebase[:75] # avoid too long name (issue5117)
1348 if namebase:
1349 if namebase:
1349 try:
1350 try:
1350 self.checkreservedname(namebase)
1351 self.checkreservedname(namebase)
1351 except error.Abort:
1352 except error.Abort:
1352 namebase = fallbackname
1353 namebase = fallbackname
1353 else:
1354 else:
1354 namebase = fallbackname
1355 namebase = fallbackname
1355 name = namebase
1356 name = namebase
1356 i = 0
1357 i = 0
1357 while True:
1358 while True:
1358 if name not in self.fullseries:
1359 if name not in self.fullseries:
1359 try:
1360 try:
1360 self.checkpatchname(name)
1361 self.checkpatchname(name)
1361 break
1362 break
1362 except error.Abort:
1363 except error.Abort:
1363 pass
1364 pass
1364 i += 1
1365 i += 1
1365 name = b'%s__%d' % (namebase, i)
1366 name = b'%s__%d' % (namebase, i)
1366 return name
1367 return name
1367
1368
1368 def checkkeepchanges(self, keepchanges, force):
1369 def checkkeepchanges(self, keepchanges, force):
1369 if force and keepchanges:
1370 if force and keepchanges:
1370 raise error.Abort(_(b'cannot use both --force and --keep-changes'))
1371 raise error.Abort(_(b'cannot use both --force and --keep-changes'))
1371
1372
1372 def new(self, repo, patchfn, *pats, **opts):
1373 def new(self, repo, patchfn, *pats, **opts):
1373 """options:
1374 """options:
1374 msg: a string or a no-argument function returning a string
1375 msg: a string or a no-argument function returning a string
1375 """
1376 """
1376 opts = pycompat.byteskwargs(opts)
1377 opts = pycompat.byteskwargs(opts)
1377 msg = opts.get(b'msg')
1378 msg = opts.get(b'msg')
1378 edit = opts.get(b'edit')
1379 edit = opts.get(b'edit')
1379 editform = opts.get(b'editform', b'mq.qnew')
1380 editform = opts.get(b'editform', b'mq.qnew')
1380 user = opts.get(b'user')
1381 user = opts.get(b'user')
1381 date = opts.get(b'date')
1382 date = opts.get(b'date')
1382 if date:
1383 if date:
1383 date = dateutil.parsedate(date)
1384 date = dateutil.parsedate(date)
1384 diffopts = self.diffopts({b'git': opts.get(b'git')}, plain=True)
1385 diffopts = self.diffopts({b'git': opts.get(b'git')}, plain=True)
1385 if opts.get(b'checkname', True):
1386 if opts.get(b'checkname', True):
1386 self.checkpatchname(patchfn)
1387 self.checkpatchname(patchfn)
1387 inclsubs = checksubstate(repo)
1388 inclsubs = checksubstate(repo)
1388 if inclsubs:
1389 if inclsubs:
1389 substatestate = repo.dirstate[b'.hgsubstate']
1390 substatestate = repo.dirstate[b'.hgsubstate']
1390 if opts.get(b'include') or opts.get(b'exclude') or pats:
1391 if opts.get(b'include') or opts.get(b'exclude') or pats:
1391 # detect missing files in pats
1392 # detect missing files in pats
1392 def badfn(f, msg):
1393 def badfn(f, msg):
1393 if f != b'.hgsubstate': # .hgsubstate is auto-created
1394 if f != b'.hgsubstate': # .hgsubstate is auto-created
1394 raise error.Abort(b'%s: %s' % (f, msg))
1395 raise error.Abort(b'%s: %s' % (f, msg))
1395
1396
1396 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1397 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1397 changes = repo.status(match=match)
1398 changes = repo.status(match=match)
1398 else:
1399 else:
1399 changes = self.checklocalchanges(repo, force=True)
1400 changes = self.checklocalchanges(repo, force=True)
1400 commitfiles = list(inclsubs)
1401 commitfiles = list(inclsubs)
1401 commitfiles.extend(changes.modified)
1402 commitfiles.extend(changes.modified)
1402 commitfiles.extend(changes.added)
1403 commitfiles.extend(changes.added)
1403 commitfiles.extend(changes.removed)
1404 commitfiles.extend(changes.removed)
1404 match = scmutil.matchfiles(repo, commitfiles)
1405 match = scmutil.matchfiles(repo, commitfiles)
1405 if len(repo[None].parents()) > 1:
1406 if len(repo[None].parents()) > 1:
1406 raise error.Abort(_(b'cannot manage merge changesets'))
1407 raise error.Abort(_(b'cannot manage merge changesets'))
1407 self.checktoppatch(repo)
1408 self.checktoppatch(repo)
1408 insert = self.fullseriesend()
1409 insert = self.fullseriesend()
1409 with repo.wlock():
1410 with repo.wlock():
1410 try:
1411 try:
1411 # if patch file write fails, abort early
1412 # if patch file write fails, abort early
1412 p = self.opener(patchfn, b"w")
1413 p = self.opener(patchfn, b"w")
1413 except IOError as e:
1414 except IOError as e:
1414 raise error.Abort(
1415 raise error.Abort(
1415 _(b'cannot write patch "%s": %s')
1416 _(b'cannot write patch "%s": %s')
1416 % (patchfn, encoding.strtolocal(e.strerror))
1417 % (patchfn, encoding.strtolocal(e.strerror))
1417 )
1418 )
1418 try:
1419 try:
1419 defaultmsg = b"[mq]: %s" % patchfn
1420 defaultmsg = b"[mq]: %s" % patchfn
1420 editor = cmdutil.getcommiteditor(editform=editform)
1421 editor = cmdutil.getcommiteditor(editform=editform)
1421 if edit:
1422 if edit:
1422
1423
1423 def finishdesc(desc):
1424 def finishdesc(desc):
1424 if desc.rstrip():
1425 if desc.rstrip():
1425 return desc
1426 return desc
1426 else:
1427 else:
1427 return defaultmsg
1428 return defaultmsg
1428
1429
1429 # i18n: this message is shown in editor with "HG: " prefix
1430 # i18n: this message is shown in editor with "HG: " prefix
1430 extramsg = _(b'Leave message empty to use default message.')
1431 extramsg = _(b'Leave message empty to use default message.')
1431 editor = cmdutil.getcommiteditor(
1432 editor = cmdutil.getcommiteditor(
1432 finishdesc=finishdesc,
1433 finishdesc=finishdesc,
1433 extramsg=extramsg,
1434 extramsg=extramsg,
1434 editform=editform,
1435 editform=editform,
1435 )
1436 )
1436 commitmsg = msg
1437 commitmsg = msg
1437 else:
1438 else:
1438 commitmsg = msg or defaultmsg
1439 commitmsg = msg or defaultmsg
1439
1440
1440 n = newcommit(
1441 n = newcommit(
1441 repo,
1442 repo,
1442 None,
1443 None,
1443 commitmsg,
1444 commitmsg,
1444 user,
1445 user,
1445 date,
1446 date,
1446 match=match,
1447 match=match,
1447 force=True,
1448 force=True,
1448 editor=editor,
1449 editor=editor,
1449 )
1450 )
1450 if n is None:
1451 if n is None:
1451 raise error.Abort(_(b"repo commit failed"))
1452 raise error.Abort(_(b"repo commit failed"))
1452 try:
1453 try:
1453 self.fullseries[insert:insert] = [patchfn]
1454 self.fullseries[insert:insert] = [patchfn]
1454 self.applied.append(statusentry(n, patchfn))
1455 self.applied.append(statusentry(n, patchfn))
1455 self.parseseries()
1456 self.parseseries()
1456 self.seriesdirty = True
1457 self.seriesdirty = True
1457 self.applieddirty = True
1458 self.applieddirty = True
1458 nctx = repo[n]
1459 nctx = repo[n]
1459 ph = patchheader(self.join(patchfn), self.plainmode)
1460 ph = patchheader(self.join(patchfn), self.plainmode)
1460 if user:
1461 if user:
1461 ph.setuser(user)
1462 ph.setuser(user)
1462 if date:
1463 if date:
1463 ph.setdate(b'%d %d' % date)
1464 ph.setdate(b'%d %d' % date)
1464 ph.setparent(hex(nctx.p1().node()))
1465 ph.setparent(hex(nctx.p1().node()))
1465 msg = nctx.description().strip()
1466 msg = nctx.description().strip()
1466 if msg == defaultmsg.strip():
1467 if msg == defaultmsg.strip():
1467 msg = b''
1468 msg = b''
1468 ph.setmessage(msg)
1469 ph.setmessage(msg)
1469 p.write(bytes(ph))
1470 p.write(bytes(ph))
1470 if commitfiles:
1471 if commitfiles:
1471 parent = self.qparents(repo, n)
1472 parent = self.qparents(repo, n)
1472 if inclsubs:
1473 if inclsubs:
1473 self.putsubstate2changes(substatestate, changes)
1474 self.putsubstate2changes(substatestate, changes)
1474 chunks = patchmod.diff(
1475 chunks = patchmod.diff(
1475 repo,
1476 repo,
1476 node1=parent,
1477 node1=parent,
1477 node2=n,
1478 node2=n,
1478 changes=changes,
1479 changes=changes,
1479 opts=diffopts,
1480 opts=diffopts,
1480 )
1481 )
1481 for chunk in chunks:
1482 for chunk in chunks:
1482 p.write(chunk)
1483 p.write(chunk)
1483 p.close()
1484 p.close()
1484 r = self.qrepo()
1485 r = self.qrepo()
1485 if r:
1486 if r:
1486 r[None].add([patchfn])
1487 r[None].add([patchfn])
1487 except: # re-raises
1488 except: # re-raises
1488 repo.rollback()
1489 repo.rollback()
1489 raise
1490 raise
1490 except Exception:
1491 except Exception:
1491 patchpath = self.join(patchfn)
1492 patchpath = self.join(patchfn)
1492 try:
1493 try:
1493 os.unlink(patchpath)
1494 os.unlink(patchpath)
1494 except OSError:
1495 except OSError:
1495 self.ui.warn(_(b'error unlinking %s\n') % patchpath)
1496 self.ui.warn(_(b'error unlinking %s\n') % patchpath)
1496 raise
1497 raise
1497 self.removeundo(repo)
1498 self.removeundo(repo)
1498
1499
1499 def isapplied(self, patch):
1500 def isapplied(self, patch):
1500 """returns (index, rev, patch)"""
1501 """returns (index, rev, patch)"""
1501 for i, a in enumerate(self.applied):
1502 for i, a in enumerate(self.applied):
1502 if a.name == patch:
1503 if a.name == patch:
1503 return (i, a.node, a.name)
1504 return (i, a.node, a.name)
1504 return None
1505 return None
1505
1506
1506 # if the exact patch name does not exist, we try a few
1507 # if the exact patch name does not exist, we try a few
1507 # variations. If strict is passed, we try only #1
1508 # variations. If strict is passed, we try only #1
1508 #
1509 #
1509 # 1) a number (as string) to indicate an offset in the series file
1510 # 1) a number (as string) to indicate an offset in the series file
1510 # 2) a unique substring of the patch name was given
1511 # 2) a unique substring of the patch name was given
1511 # 3) patchname[-+]num to indicate an offset in the series file
1512 # 3) patchname[-+]num to indicate an offset in the series file
1512 def lookup(self, patch, strict=False):
1513 def lookup(self, patch, strict=False):
1513 def partialname(s):
1514 def partialname(s):
1514 if s in self.series:
1515 if s in self.series:
1515 return s
1516 return s
1516 matches = [x for x in self.series if s in x]
1517 matches = [x for x in self.series if s in x]
1517 if len(matches) > 1:
1518 if len(matches) > 1:
1518 self.ui.warn(_(b'patch name "%s" is ambiguous:\n') % s)
1519 self.ui.warn(_(b'patch name "%s" is ambiguous:\n') % s)
1519 for m in matches:
1520 for m in matches:
1520 self.ui.warn(b' %s\n' % m)
1521 self.ui.warn(b' %s\n' % m)
1521 return None
1522 return None
1522 if matches:
1523 if matches:
1523 return matches[0]
1524 return matches[0]
1524 if self.series and self.applied:
1525 if self.series and self.applied:
1525 if s == b'qtip':
1526 if s == b'qtip':
1526 return self.series[self.seriesend(True) - 1]
1527 return self.series[self.seriesend(True) - 1]
1527 if s == b'qbase':
1528 if s == b'qbase':
1528 return self.series[0]
1529 return self.series[0]
1529 return None
1530 return None
1530
1531
1531 if patch in self.series:
1532 if patch in self.series:
1532 return patch
1533 return patch
1533
1534
1534 if not os.path.isfile(self.join(patch)):
1535 if not os.path.isfile(self.join(patch)):
1535 try:
1536 try:
1536 sno = int(patch)
1537 sno = int(patch)
1537 except (ValueError, OverflowError):
1538 except (ValueError, OverflowError):
1538 pass
1539 pass
1539 else:
1540 else:
1540 if -len(self.series) <= sno < len(self.series):
1541 if -len(self.series) <= sno < len(self.series):
1541 return self.series[sno]
1542 return self.series[sno]
1542
1543
1543 if not strict:
1544 if not strict:
1544 res = partialname(patch)
1545 res = partialname(patch)
1545 if res:
1546 if res:
1546 return res
1547 return res
1547 minus = patch.rfind(b'-')
1548 minus = patch.rfind(b'-')
1548 if minus >= 0:
1549 if minus >= 0:
1549 res = partialname(patch[:minus])
1550 res = partialname(patch[:minus])
1550 if res:
1551 if res:
1551 i = self.series.index(res)
1552 i = self.series.index(res)
1552 try:
1553 try:
1553 off = int(patch[minus + 1 :] or 1)
1554 off = int(patch[minus + 1 :] or 1)
1554 except (ValueError, OverflowError):
1555 except (ValueError, OverflowError):
1555 pass
1556 pass
1556 else:
1557 else:
1557 if i - off >= 0:
1558 if i - off >= 0:
1558 return self.series[i - off]
1559 return self.series[i - off]
1559 plus = patch.rfind(b'+')
1560 plus = patch.rfind(b'+')
1560 if plus >= 0:
1561 if plus >= 0:
1561 res = partialname(patch[:plus])
1562 res = partialname(patch[:plus])
1562 if res:
1563 if res:
1563 i = self.series.index(res)
1564 i = self.series.index(res)
1564 try:
1565 try:
1565 off = int(patch[plus + 1 :] or 1)
1566 off = int(patch[plus + 1 :] or 1)
1566 except (ValueError, OverflowError):
1567 except (ValueError, OverflowError):
1567 pass
1568 pass
1568 else:
1569 else:
1569 if i + off < len(self.series):
1570 if i + off < len(self.series):
1570 return self.series[i + off]
1571 return self.series[i + off]
1571 raise error.Abort(_(b"patch %s not in series") % patch)
1572 raise error.Abort(_(b"patch %s not in series") % patch)
1572
1573
1573 def push(
1574 def push(
1574 self,
1575 self,
1575 repo,
1576 repo,
1576 patch=None,
1577 patch=None,
1577 force=False,
1578 force=False,
1578 list=False,
1579 list=False,
1579 mergeq=None,
1580 mergeq=None,
1580 all=False,
1581 all=False,
1581 move=False,
1582 move=False,
1582 exact=False,
1583 exact=False,
1583 nobackup=False,
1584 nobackup=False,
1584 keepchanges=False,
1585 keepchanges=False,
1585 ):
1586 ):
1586 self.checkkeepchanges(keepchanges, force)
1587 self.checkkeepchanges(keepchanges, force)
1587 diffopts = self.diffopts()
1588 diffopts = self.diffopts()
1588 with repo.wlock():
1589 with repo.wlock():
1589 heads = []
1590 heads = []
1590 for hs in repo.branchmap().iterheads():
1591 for hs in repo.branchmap().iterheads():
1591 heads.extend(hs)
1592 heads.extend(hs)
1592 if not heads:
1593 if not heads:
1593 heads = [nullid]
1594 heads = [nullid]
1594 if repo.dirstate.p1() not in heads and not exact:
1595 if repo.dirstate.p1() not in heads and not exact:
1595 self.ui.status(_(b"(working directory not at a head)\n"))
1596 self.ui.status(_(b"(working directory not at a head)\n"))
1596
1597
1597 if not self.series:
1598 if not self.series:
1598 self.ui.warn(_(b'no patches in series\n'))
1599 self.ui.warn(_(b'no patches in series\n'))
1599 return 0
1600 return 0
1600
1601
1601 # Suppose our series file is: A B C and the current 'top'
1602 # Suppose our series file is: A B C and the current 'top'
1602 # patch is B. qpush C should be performed (moving forward)
1603 # patch is B. qpush C should be performed (moving forward)
1603 # qpush B is a NOP (no change) qpush A is an error (can't
1604 # qpush B is a NOP (no change) qpush A is an error (can't
1604 # go backwards with qpush)
1605 # go backwards with qpush)
1605 if patch:
1606 if patch:
1606 patch = self.lookup(patch)
1607 patch = self.lookup(patch)
1607 info = self.isapplied(patch)
1608 info = self.isapplied(patch)
1608 if info and info[0] >= len(self.applied) - 1:
1609 if info and info[0] >= len(self.applied) - 1:
1609 self.ui.warn(
1610 self.ui.warn(
1610 _(b'qpush: %s is already at the top\n') % patch
1611 _(b'qpush: %s is already at the top\n') % patch
1611 )
1612 )
1612 return 0
1613 return 0
1613
1614
1614 pushable, reason = self.pushable(patch)
1615 pushable, reason = self.pushable(patch)
1615 if pushable:
1616 if pushable:
1616 if self.series.index(patch) < self.seriesend():
1617 if self.series.index(patch) < self.seriesend():
1617 raise error.Abort(
1618 raise error.Abort(
1618 _(b"cannot push to a previous patch: %s") % patch
1619 _(b"cannot push to a previous patch: %s") % patch
1619 )
1620 )
1620 else:
1621 else:
1621 if reason:
1622 if reason:
1622 reason = _(b'guarded by %s') % reason
1623 reason = _(b'guarded by %s') % reason
1623 else:
1624 else:
1624 reason = _(b'no matching guards')
1625 reason = _(b'no matching guards')
1625 self.ui.warn(
1626 self.ui.warn(
1626 _(b"cannot push '%s' - %s\n") % (patch, reason)
1627 _(b"cannot push '%s' - %s\n") % (patch, reason)
1627 )
1628 )
1628 return 1
1629 return 1
1629 elif all:
1630 elif all:
1630 patch = self.series[-1]
1631 patch = self.series[-1]
1631 if self.isapplied(patch):
1632 if self.isapplied(patch):
1632 self.ui.warn(_(b'all patches are currently applied\n'))
1633 self.ui.warn(_(b'all patches are currently applied\n'))
1633 return 0
1634 return 0
1634
1635
1635 # Following the above example, starting at 'top' of B:
1636 # Following the above example, starting at 'top' of B:
1636 # qpush should be performed (pushes C), but a subsequent
1637 # qpush should be performed (pushes C), but a subsequent
1637 # qpush without an argument is an error (nothing to
1638 # qpush without an argument is an error (nothing to
1638 # apply). This allows a loop of "...while hg qpush..." to
1639 # apply). This allows a loop of "...while hg qpush..." to
1639 # work as it detects an error when done
1640 # work as it detects an error when done
1640 start = self.seriesend()
1641 start = self.seriesend()
1641 if start == len(self.series):
1642 if start == len(self.series):
1642 self.ui.warn(_(b'patch series already fully applied\n'))
1643 self.ui.warn(_(b'patch series already fully applied\n'))
1643 return 1
1644 return 1
1644 if not force and not keepchanges:
1645 if not force and not keepchanges:
1645 self.checklocalchanges(repo, refresh=self.applied)
1646 self.checklocalchanges(repo, refresh=self.applied)
1646
1647
1647 if exact:
1648 if exact:
1648 if keepchanges:
1649 if keepchanges:
1649 raise error.Abort(
1650 raise error.Abort(
1650 _(b"cannot use --exact and --keep-changes together")
1651 _(b"cannot use --exact and --keep-changes together")
1651 )
1652 )
1652 if move:
1653 if move:
1653 raise error.Abort(
1654 raise error.Abort(
1654 _(b'cannot use --exact and --move together')
1655 _(b'cannot use --exact and --move together')
1655 )
1656 )
1656 if self.applied:
1657 if self.applied:
1657 raise error.Abort(
1658 raise error.Abort(
1658 _(b'cannot push --exact with applied patches')
1659 _(b'cannot push --exact with applied patches')
1659 )
1660 )
1660 root = self.series[start]
1661 root = self.series[start]
1661 target = patchheader(self.join(root), self.plainmode).parent
1662 target = patchheader(self.join(root), self.plainmode).parent
1662 if not target:
1663 if not target:
1663 raise error.Abort(
1664 raise error.Abort(
1664 _(b"%s does not have a parent recorded") % root
1665 _(b"%s does not have a parent recorded") % root
1665 )
1666 )
1666 if not repo[target] == repo[b'.']:
1667 if not repo[target] == repo[b'.']:
1667 hg.update(repo, target)
1668 hg.update(repo, target)
1668
1669
1669 if move:
1670 if move:
1670 if not patch:
1671 if not patch:
1671 raise error.Abort(_(b"please specify the patch to move"))
1672 raise error.Abort(_(b"please specify the patch to move"))
1672 for fullstart, rpn in enumerate(self.fullseries):
1673 for fullstart, rpn in enumerate(self.fullseries):
1673 # strip markers for patch guards
1674 # strip markers for patch guards
1674 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1675 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1675 break
1676 break
1676 for i, rpn in enumerate(self.fullseries[fullstart:]):
1677 for i, rpn in enumerate(self.fullseries[fullstart:]):
1677 # strip markers for patch guards
1678 # strip markers for patch guards
1678 if self.guard_re.split(rpn, 1)[0] == patch:
1679 if self.guard_re.split(rpn, 1)[0] == patch:
1679 break
1680 break
1680 index = fullstart + i
1681 index = fullstart + i
1681 assert index < len(self.fullseries)
1682 assert index < len(self.fullseries)
1682 fullpatch = self.fullseries[index]
1683 fullpatch = self.fullseries[index]
1683 del self.fullseries[index]
1684 del self.fullseries[index]
1684 self.fullseries.insert(fullstart, fullpatch)
1685 self.fullseries.insert(fullstart, fullpatch)
1685 self.parseseries()
1686 self.parseseries()
1686 self.seriesdirty = True
1687 self.seriesdirty = True
1687
1688
1688 self.applieddirty = True
1689 self.applieddirty = True
1689 if start > 0:
1690 if start > 0:
1690 self.checktoppatch(repo)
1691 self.checktoppatch(repo)
1691 if not patch:
1692 if not patch:
1692 patch = self.series[start]
1693 patch = self.series[start]
1693 end = start + 1
1694 end = start + 1
1694 else:
1695 else:
1695 end = self.series.index(patch, start) + 1
1696 end = self.series.index(patch, start) + 1
1696
1697
1697 tobackup = set()
1698 tobackup = set()
1698 if (not nobackup and force) or keepchanges:
1699 if (not nobackup and force) or keepchanges:
1699 status = self.checklocalchanges(repo, force=True)
1700 status = self.checklocalchanges(repo, force=True)
1700 if keepchanges:
1701 if keepchanges:
1701 tobackup.update(
1702 tobackup.update(
1702 status.modified
1703 status.modified
1703 + status.added
1704 + status.added
1704 + status.removed
1705 + status.removed
1705 + status.deleted
1706 + status.deleted
1706 )
1707 )
1707 else:
1708 else:
1708 tobackup.update(status.modified + status.added)
1709 tobackup.update(status.modified + status.added)
1709
1710
1710 s = self.series[start:end]
1711 s = self.series[start:end]
1711 all_files = set()
1712 all_files = set()
1712 try:
1713 try:
1713 if mergeq:
1714 if mergeq:
1714 ret = self.mergepatch(repo, mergeq, s, diffopts)
1715 ret = self.mergepatch(repo, mergeq, s, diffopts)
1715 else:
1716 else:
1716 ret = self.apply(
1717 ret = self.apply(
1717 repo,
1718 repo,
1718 s,
1719 s,
1719 list,
1720 list,
1720 all_files=all_files,
1721 all_files=all_files,
1721 tobackup=tobackup,
1722 tobackup=tobackup,
1722 keepchanges=keepchanges,
1723 keepchanges=keepchanges,
1723 )
1724 )
1724 except AbortNoCleanup:
1725 except AbortNoCleanup:
1725 raise
1726 raise
1726 except: # re-raises
1727 except: # re-raises
1727 self.ui.warn(_(b'cleaning up working directory...\n'))
1728 self.ui.warn(_(b'cleaning up working directory...\n'))
1728 cmdutil.revert(
1729 cmdutil.revert(
1729 self.ui,
1730 self.ui,
1730 repo,
1731 repo,
1731 repo[b'.'],
1732 repo[b'.'],
1732 no_backup=True,
1733 no_backup=True,
1733 )
1734 )
1734 # only remove unknown files that we know we touched or
1735 # only remove unknown files that we know we touched or
1735 # created while patching
1736 # created while patching
1736 for f in all_files:
1737 for f in all_files:
1737 if f not in repo.dirstate:
1738 if f not in repo.dirstate:
1738 repo.wvfs.unlinkpath(f, ignoremissing=True)
1739 repo.wvfs.unlinkpath(f, ignoremissing=True)
1739 self.ui.warn(_(b'done\n'))
1740 self.ui.warn(_(b'done\n'))
1740 raise
1741 raise
1741
1742
1742 if not self.applied:
1743 if not self.applied:
1743 return ret[0]
1744 return ret[0]
1744 top = self.applied[-1].name
1745 top = self.applied[-1].name
1745 if ret[0] and ret[0] > 1:
1746 if ret[0] and ret[0] > 1:
1746 msg = _(b"errors during apply, please fix and qrefresh %s\n")
1747 msg = _(b"errors during apply, please fix and qrefresh %s\n")
1747 self.ui.write(msg % top)
1748 self.ui.write(msg % top)
1748 else:
1749 else:
1749 self.ui.write(_(b"now at: %s\n") % top)
1750 self.ui.write(_(b"now at: %s\n") % top)
1750 return ret[0]
1751 return ret[0]
1751
1752
1752 def pop(
1753 def pop(
1753 self,
1754 self,
1754 repo,
1755 repo,
1755 patch=None,
1756 patch=None,
1756 force=False,
1757 force=False,
1757 update=True,
1758 update=True,
1758 all=False,
1759 all=False,
1759 nobackup=False,
1760 nobackup=False,
1760 keepchanges=False,
1761 keepchanges=False,
1761 ):
1762 ):
1762 self.checkkeepchanges(keepchanges, force)
1763 self.checkkeepchanges(keepchanges, force)
1763 with repo.wlock():
1764 with repo.wlock():
1764 if patch:
1765 if patch:
1765 # index, rev, patch
1766 # index, rev, patch
1766 info = self.isapplied(patch)
1767 info = self.isapplied(patch)
1767 if not info:
1768 if not info:
1768 patch = self.lookup(patch)
1769 patch = self.lookup(patch)
1769 info = self.isapplied(patch)
1770 info = self.isapplied(patch)
1770 if not info:
1771 if not info:
1771 raise error.Abort(_(b"patch %s is not applied") % patch)
1772 raise error.Abort(_(b"patch %s is not applied") % patch)
1772
1773
1773 if not self.applied:
1774 if not self.applied:
1774 # Allow qpop -a to work repeatedly,
1775 # Allow qpop -a to work repeatedly,
1775 # but not qpop without an argument
1776 # but not qpop without an argument
1776 self.ui.warn(_(b"no patches applied\n"))
1777 self.ui.warn(_(b"no patches applied\n"))
1777 return not all
1778 return not all
1778
1779
1779 if all:
1780 if all:
1780 start = 0
1781 start = 0
1781 elif patch:
1782 elif patch:
1782 start = info[0] + 1
1783 start = info[0] + 1
1783 else:
1784 else:
1784 start = len(self.applied) - 1
1785 start = len(self.applied) - 1
1785
1786
1786 if start >= len(self.applied):
1787 if start >= len(self.applied):
1787 self.ui.warn(_(b"qpop: %s is already at the top\n") % patch)
1788 self.ui.warn(_(b"qpop: %s is already at the top\n") % patch)
1788 return
1789 return
1789
1790
1790 if not update:
1791 if not update:
1791 parents = repo.dirstate.parents()
1792 parents = repo.dirstate.parents()
1792 rr = [x.node for x in self.applied]
1793 rr = [x.node for x in self.applied]
1793 for p in parents:
1794 for p in parents:
1794 if p in rr:
1795 if p in rr:
1795 self.ui.warn(_(b"qpop: forcing dirstate update\n"))
1796 self.ui.warn(_(b"qpop: forcing dirstate update\n"))
1796 update = True
1797 update = True
1797 else:
1798 else:
1798 parents = [p.node() for p in repo[None].parents()]
1799 parents = [p.node() for p in repo[None].parents()]
1799 update = any(
1800 update = any(
1800 entry.node in parents for entry in self.applied[start:]
1801 entry.node in parents for entry in self.applied[start:]
1801 )
1802 )
1802
1803
1803 tobackup = set()
1804 tobackup = set()
1804 if update:
1805 if update:
1805 s = self.checklocalchanges(repo, force=force or keepchanges)
1806 s = self.checklocalchanges(repo, force=force or keepchanges)
1806 if force:
1807 if force:
1807 if not nobackup:
1808 if not nobackup:
1808 tobackup.update(s.modified + s.added)
1809 tobackup.update(s.modified + s.added)
1809 elif keepchanges:
1810 elif keepchanges:
1810 tobackup.update(
1811 tobackup.update(
1811 s.modified + s.added + s.removed + s.deleted
1812 s.modified + s.added + s.removed + s.deleted
1812 )
1813 )
1813
1814
1814 self.applieddirty = True
1815 self.applieddirty = True
1815 end = len(self.applied)
1816 end = len(self.applied)
1816 rev = self.applied[start].node
1817 rev = self.applied[start].node
1817
1818
1818 try:
1819 try:
1819 heads = repo.changelog.heads(rev)
1820 heads = repo.changelog.heads(rev)
1820 except error.LookupError:
1821 except error.LookupError:
1821 node = short(rev)
1822 node = short(rev)
1822 raise error.Abort(_(b'trying to pop unknown node %s') % node)
1823 raise error.Abort(_(b'trying to pop unknown node %s') % node)
1823
1824
1824 if heads != [self.applied[-1].node]:
1825 if heads != [self.applied[-1].node]:
1825 raise error.Abort(
1826 raise error.Abort(
1826 _(
1827 _(
1827 b"popping would remove a revision not "
1828 b"popping would remove a revision not "
1828 b"managed by this patch queue"
1829 b"managed by this patch queue"
1829 )
1830 )
1830 )
1831 )
1831 if not repo[self.applied[-1].node].mutable():
1832 if not repo[self.applied[-1].node].mutable():
1832 raise error.Abort(
1833 raise error.Abort(
1833 _(b"popping would remove a public revision"),
1834 _(b"popping would remove a public revision"),
1834 hint=_(b"see 'hg help phases' for details"),
1835 hint=_(b"see 'hg help phases' for details"),
1835 )
1836 )
1836
1837
1837 # we know there are no local changes, so we can make a simplified
1838 # we know there are no local changes, so we can make a simplified
1838 # form of hg.update.
1839 # form of hg.update.
1839 if update:
1840 if update:
1840 qp = self.qparents(repo, rev)
1841 qp = self.qparents(repo, rev)
1841 ctx = repo[qp]
1842 ctx = repo[qp]
1842 st = repo.status(qp, b'.')
1843 st = repo.status(qp, b'.')
1843 m, a, r, d = st.modified, st.added, st.removed, st.deleted
1844 m, a, r, d = st.modified, st.added, st.removed, st.deleted
1844 if d:
1845 if d:
1845 raise error.Abort(_(b"deletions found between repo revs"))
1846 raise error.Abort(_(b"deletions found between repo revs"))
1846
1847
1847 tobackup = set(a + m + r) & tobackup
1848 tobackup = set(a + m + r) & tobackup
1848 if keepchanges and tobackup:
1849 if keepchanges and tobackup:
1849 raise error.Abort(_(b"local changes found, qrefresh first"))
1850 raise error.Abort(_(b"local changes found, qrefresh first"))
1850 self.backup(repo, tobackup)
1851 self.backup(repo, tobackup)
1851 with repo.dirstate.parentchange():
1852 with repo.dirstate.parentchange():
1852 for f in a:
1853 for f in a:
1853 repo.wvfs.unlinkpath(f, ignoremissing=True)
1854 repo.wvfs.unlinkpath(f, ignoremissing=True)
1854 repo.dirstate.drop(f)
1855 repo.dirstate.drop(f)
1855 for f in m + r:
1856 for f in m + r:
1856 fctx = ctx[f]
1857 fctx = ctx[f]
1857 repo.wwrite(f, fctx.data(), fctx.flags())
1858 repo.wwrite(f, fctx.data(), fctx.flags())
1858 repo.dirstate.normal(f)
1859 repo.dirstate.normal(f)
1859 repo.setparents(qp, nullid)
1860 repo.setparents(qp, nullid)
1860 for patch in reversed(self.applied[start:end]):
1861 for patch in reversed(self.applied[start:end]):
1861 self.ui.status(_(b"popping %s\n") % patch.name)
1862 self.ui.status(_(b"popping %s\n") % patch.name)
1862 del self.applied[start:end]
1863 del self.applied[start:end]
1863 strip(self.ui, repo, [rev], update=False, backup=False)
1864 strip(self.ui, repo, [rev], update=False, backup=False)
1864 for s, state in repo[b'.'].substate.items():
1865 for s, state in repo[b'.'].substate.items():
1865 repo[b'.'].sub(s).get(state)
1866 repo[b'.'].sub(s).get(state)
1866 if self.applied:
1867 if self.applied:
1867 self.ui.write(_(b"now at: %s\n") % self.applied[-1].name)
1868 self.ui.write(_(b"now at: %s\n") % self.applied[-1].name)
1868 else:
1869 else:
1869 self.ui.write(_(b"patch queue now empty\n"))
1870 self.ui.write(_(b"patch queue now empty\n"))
1870
1871
1871 def diff(self, repo, pats, opts):
1872 def diff(self, repo, pats, opts):
1872 top, patch = self.checktoppatch(repo)
1873 top, patch = self.checktoppatch(repo)
1873 if not top:
1874 if not top:
1874 self.ui.write(_(b"no patches applied\n"))
1875 self.ui.write(_(b"no patches applied\n"))
1875 return
1876 return
1876 qp = self.qparents(repo, top)
1877 qp = self.qparents(repo, top)
1877 if opts.get(b'reverse'):
1878 if opts.get(b'reverse'):
1878 node1, node2 = None, qp
1879 node1, node2 = None, qp
1879 else:
1880 else:
1880 node1, node2 = qp, None
1881 node1, node2 = qp, None
1881 diffopts = self.diffopts(opts, patch)
1882 diffopts = self.diffopts(opts, patch)
1882 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1883 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1883
1884
1884 def refresh(self, repo, pats=None, **opts):
1885 def refresh(self, repo, pats=None, **opts):
1885 opts = pycompat.byteskwargs(opts)
1886 opts = pycompat.byteskwargs(opts)
1886 if not self.applied:
1887 if not self.applied:
1887 self.ui.write(_(b"no patches applied\n"))
1888 self.ui.write(_(b"no patches applied\n"))
1888 return 1
1889 return 1
1889 msg = opts.get(b'msg', b'').rstrip()
1890 msg = opts.get(b'msg', b'').rstrip()
1890 edit = opts.get(b'edit')
1891 edit = opts.get(b'edit')
1891 editform = opts.get(b'editform', b'mq.qrefresh')
1892 editform = opts.get(b'editform', b'mq.qrefresh')
1892 newuser = opts.get(b'user')
1893 newuser = opts.get(b'user')
1893 newdate = opts.get(b'date')
1894 newdate = opts.get(b'date')
1894 if newdate:
1895 if newdate:
1895 newdate = b'%d %d' % dateutil.parsedate(newdate)
1896 newdate = b'%d %d' % dateutil.parsedate(newdate)
1896 wlock = repo.wlock()
1897 wlock = repo.wlock()
1897
1898
1898 try:
1899 try:
1899 self.checktoppatch(repo)
1900 self.checktoppatch(repo)
1900 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1901 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1901 if repo.changelog.heads(top) != [top]:
1902 if repo.changelog.heads(top) != [top]:
1902 raise error.Abort(
1903 raise error.Abort(
1903 _(b"cannot qrefresh a revision with children")
1904 _(b"cannot qrefresh a revision with children")
1904 )
1905 )
1905 if not repo[top].mutable():
1906 if not repo[top].mutable():
1906 raise error.Abort(
1907 raise error.Abort(
1907 _(b"cannot qrefresh public revision"),
1908 _(b"cannot qrefresh public revision"),
1908 hint=_(b"see 'hg help phases' for details"),
1909 hint=_(b"see 'hg help phases' for details"),
1909 )
1910 )
1910
1911
1911 cparents = repo.changelog.parents(top)
1912 cparents = repo.changelog.parents(top)
1912 patchparent = self.qparents(repo, top)
1913 patchparent = self.qparents(repo, top)
1913
1914
1914 inclsubs = checksubstate(repo, patchparent)
1915 inclsubs = checksubstate(repo, patchparent)
1915 if inclsubs:
1916 if inclsubs:
1916 substatestate = repo.dirstate[b'.hgsubstate']
1917 substatestate = repo.dirstate[b'.hgsubstate']
1917
1918
1918 ph = patchheader(self.join(patchfn), self.plainmode)
1919 ph = patchheader(self.join(patchfn), self.plainmode)
1919 diffopts = self.diffopts(
1920 diffopts = self.diffopts(
1920 {b'git': opts.get(b'git')}, patchfn, plain=True
1921 {b'git': opts.get(b'git')}, patchfn, plain=True
1921 )
1922 )
1922 if newuser:
1923 if newuser:
1923 ph.setuser(newuser)
1924 ph.setuser(newuser)
1924 if newdate:
1925 if newdate:
1925 ph.setdate(newdate)
1926 ph.setdate(newdate)
1926 ph.setparent(hex(patchparent))
1927 ph.setparent(hex(patchparent))
1927
1928
1928 # only commit new patch when write is complete
1929 # only commit new patch when write is complete
1929 patchf = self.opener(patchfn, b'w', atomictemp=True)
1930 patchf = self.opener(patchfn, b'w', atomictemp=True)
1930
1931
1931 # update the dirstate in place, strip off the qtip commit
1932 # update the dirstate in place, strip off the qtip commit
1932 # and then commit.
1933 # and then commit.
1933 #
1934 #
1934 # this should really read:
1935 # this should really read:
1935 # st = repo.status(top, patchparent)
1936 # st = repo.status(top, patchparent)
1936 # but we do it backwards to take advantage of manifest/changelog
1937 # but we do it backwards to take advantage of manifest/changelog
1937 # caching against the next repo.status call
1938 # caching against the next repo.status call
1938 st = repo.status(patchparent, top)
1939 st = repo.status(patchparent, top)
1939 mm, aa, dd = st.modified, st.added, st.removed
1940 mm, aa, dd = st.modified, st.added, st.removed
1940 ctx = repo[top]
1941 ctx = repo[top]
1941 aaa = aa[:]
1942 aaa = aa[:]
1942 match1 = scmutil.match(repo[None], pats, opts)
1943 match1 = scmutil.match(repo[None], pats, opts)
1943 # in short mode, we only diff the files included in the
1944 # in short mode, we only diff the files included in the
1944 # patch already plus specified files
1945 # patch already plus specified files
1945 if opts.get(b'short'):
1946 if opts.get(b'short'):
1946 # if amending a patch, we start with existing
1947 # if amending a patch, we start with existing
1947 # files plus specified files - unfiltered
1948 # files plus specified files - unfiltered
1948 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
1949 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
1949 # filter with include/exclude options
1950 # filter with include/exclude options
1950 match1 = scmutil.match(repo[None], opts=opts)
1951 match1 = scmutil.match(repo[None], opts=opts)
1951 else:
1952 else:
1952 match = scmutil.matchall(repo)
1953 match = scmutil.matchall(repo)
1953 stb = repo.status(match=match)
1954 stb = repo.status(match=match)
1954 m, a, r, d = stb.modified, stb.added, stb.removed, stb.deleted
1955 m, a, r, d = stb.modified, stb.added, stb.removed, stb.deleted
1955 mm = set(mm)
1956 mm = set(mm)
1956 aa = set(aa)
1957 aa = set(aa)
1957 dd = set(dd)
1958 dd = set(dd)
1958
1959
1959 # we might end up with files that were added between
1960 # we might end up with files that were added between
1960 # qtip and the dirstate parent, but then changed in the
1961 # qtip and the dirstate parent, but then changed in the
1961 # local dirstate. in this case, we want them to only
1962 # local dirstate. in this case, we want them to only
1962 # show up in the added section
1963 # show up in the added section
1963 for x in m:
1964 for x in m:
1964 if x not in aa:
1965 if x not in aa:
1965 mm.add(x)
1966 mm.add(x)
1966 # we might end up with files added by the local dirstate that
1967 # we might end up with files added by the local dirstate that
1967 # were deleted by the patch. In this case, they should only
1968 # were deleted by the patch. In this case, they should only
1968 # show up in the changed section.
1969 # show up in the changed section.
1969 for x in a:
1970 for x in a:
1970 if x in dd:
1971 if x in dd:
1971 dd.remove(x)
1972 dd.remove(x)
1972 mm.add(x)
1973 mm.add(x)
1973 else:
1974 else:
1974 aa.add(x)
1975 aa.add(x)
1975 # make sure any files deleted in the local dirstate
1976 # make sure any files deleted in the local dirstate
1976 # are not in the add or change column of the patch
1977 # are not in the add or change column of the patch
1977 forget = []
1978 forget = []
1978 for x in d + r:
1979 for x in d + r:
1979 if x in aa:
1980 if x in aa:
1980 aa.remove(x)
1981 aa.remove(x)
1981 forget.append(x)
1982 forget.append(x)
1982 continue
1983 continue
1983 else:
1984 else:
1984 mm.discard(x)
1985 mm.discard(x)
1985 dd.add(x)
1986 dd.add(x)
1986
1987
1987 m = list(mm)
1988 m = list(mm)
1988 r = list(dd)
1989 r = list(dd)
1989 a = list(aa)
1990 a = list(aa)
1990
1991
1991 # create 'match' that includes the files to be recommitted.
1992 # create 'match' that includes the files to be recommitted.
1992 # apply match1 via repo.status to ensure correct case handling.
1993 # apply match1 via repo.status to ensure correct case handling.
1993 st = repo.status(patchparent, match=match1)
1994 st = repo.status(patchparent, match=match1)
1994 cm, ca, cr, cd = st.modified, st.added, st.removed, st.deleted
1995 cm, ca, cr, cd = st.modified, st.added, st.removed, st.deleted
1995 allmatches = set(cm + ca + cr + cd)
1996 allmatches = set(cm + ca + cr + cd)
1996 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1997 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1997
1998
1998 files = set(inclsubs)
1999 files = set(inclsubs)
1999 for x in refreshchanges:
2000 for x in refreshchanges:
2000 files.update(x)
2001 files.update(x)
2001 match = scmutil.matchfiles(repo, files)
2002 match = scmutil.matchfiles(repo, files)
2002
2003
2003 bmlist = repo[top].bookmarks()
2004 bmlist = repo[top].bookmarks()
2004
2005
2005 dsguard = None
2006 dsguard = None
2006 try:
2007 try:
2007 dsguard = dirstateguard.dirstateguard(repo, b'mq.refresh')
2008 dsguard = dirstateguard.dirstateguard(repo, b'mq.refresh')
2008 if diffopts.git or diffopts.upgrade:
2009 if diffopts.git or diffopts.upgrade:
2009 copies = {}
2010 copies = {}
2010 for dst in a:
2011 for dst in a:
2011 src = repo.dirstate.copied(dst)
2012 src = repo.dirstate.copied(dst)
2012 # during qfold, the source file for copies may
2013 # during qfold, the source file for copies may
2013 # be removed. Treat this as a simple add.
2014 # be removed. Treat this as a simple add.
2014 if src is not None and src in repo.dirstate:
2015 if src is not None and src in repo.dirstate:
2015 copies.setdefault(src, []).append(dst)
2016 copies.setdefault(src, []).append(dst)
2016 repo.dirstate.add(dst)
2017 repo.dirstate.add(dst)
2017 # remember the copies between patchparent and qtip
2018 # remember the copies between patchparent and qtip
2018 for dst in aaa:
2019 for dst in aaa:
2019 src = ctx[dst].copysource()
2020 src = ctx[dst].copysource()
2020 if src:
2021 if src:
2021 copies.setdefault(src, []).extend(
2022 copies.setdefault(src, []).extend(
2022 copies.get(dst, [])
2023 copies.get(dst, [])
2023 )
2024 )
2024 if dst in a:
2025 if dst in a:
2025 copies[src].append(dst)
2026 copies[src].append(dst)
2026 # we can't copy a file created by the patch itself
2027 # we can't copy a file created by the patch itself
2027 if dst in copies:
2028 if dst in copies:
2028 del copies[dst]
2029 del copies[dst]
2029 for src, dsts in pycompat.iteritems(copies):
2030 for src, dsts in pycompat.iteritems(copies):
2030 for dst in dsts:
2031 for dst in dsts:
2031 repo.dirstate.copy(src, dst)
2032 repo.dirstate.copy(src, dst)
2032 else:
2033 else:
2033 for dst in a:
2034 for dst in a:
2034 repo.dirstate.add(dst)
2035 repo.dirstate.add(dst)
2035 # Drop useless copy information
2036 # Drop useless copy information
2036 for f in list(repo.dirstate.copies()):
2037 for f in list(repo.dirstate.copies()):
2037 repo.dirstate.copy(None, f)
2038 repo.dirstate.copy(None, f)
2038 for f in r:
2039 for f in r:
2039 repo.dirstate.remove(f)
2040 repo.dirstate.remove(f)
2040 # if the patch excludes a modified file, mark that
2041 # if the patch excludes a modified file, mark that
2041 # file with mtime=0 so status can see it.
2042 # file with mtime=0 so status can see it.
2042 mm = []
2043 mm = []
2043 for i in pycompat.xrange(len(m) - 1, -1, -1):
2044 for i in pycompat.xrange(len(m) - 1, -1, -1):
2044 if not match1(m[i]):
2045 if not match1(m[i]):
2045 mm.append(m[i])
2046 mm.append(m[i])
2046 del m[i]
2047 del m[i]
2047 for f in m:
2048 for f in m:
2048 repo.dirstate.normal(f)
2049 repo.dirstate.normal(f)
2049 for f in mm:
2050 for f in mm:
2050 repo.dirstate.normallookup(f)
2051 repo.dirstate.normallookup(f)
2051 for f in forget:
2052 for f in forget:
2052 repo.dirstate.drop(f)
2053 repo.dirstate.drop(f)
2053
2054
2054 user = ph.user or ctx.user()
2055 user = ph.user or ctx.user()
2055
2056
2056 oldphase = repo[top].phase()
2057 oldphase = repo[top].phase()
2057
2058
2058 # assumes strip can roll itself back if interrupted
2059 # assumes strip can roll itself back if interrupted
2059 repo.setparents(*cparents)
2060 repo.setparents(*cparents)
2060 self.applied.pop()
2061 self.applied.pop()
2061 self.applieddirty = True
2062 self.applieddirty = True
2062 strip(self.ui, repo, [top], update=False, backup=False)
2063 strip(self.ui, repo, [top], update=False, backup=False)
2063 dsguard.close()
2064 dsguard.close()
2064 finally:
2065 finally:
2065 release(dsguard)
2066 release(dsguard)
2066
2067
2067 try:
2068 try:
2068 # might be nice to attempt to roll back strip after this
2069 # might be nice to attempt to roll back strip after this
2069
2070
2070 defaultmsg = b"[mq]: %s" % patchfn
2071 defaultmsg = b"[mq]: %s" % patchfn
2071 editor = cmdutil.getcommiteditor(editform=editform)
2072 editor = cmdutil.getcommiteditor(editform=editform)
2072 if edit:
2073 if edit:
2073
2074
2074 def finishdesc(desc):
2075 def finishdesc(desc):
2075 if desc.rstrip():
2076 if desc.rstrip():
2076 ph.setmessage(desc)
2077 ph.setmessage(desc)
2077 return desc
2078 return desc
2078 return defaultmsg
2079 return defaultmsg
2079
2080
2080 # i18n: this message is shown in editor with "HG: " prefix
2081 # i18n: this message is shown in editor with "HG: " prefix
2081 extramsg = _(b'Leave message empty to use default message.')
2082 extramsg = _(b'Leave message empty to use default message.')
2082 editor = cmdutil.getcommiteditor(
2083 editor = cmdutil.getcommiteditor(
2083 finishdesc=finishdesc,
2084 finishdesc=finishdesc,
2084 extramsg=extramsg,
2085 extramsg=extramsg,
2085 editform=editform,
2086 editform=editform,
2086 )
2087 )
2087 message = msg or b"\n".join(ph.message)
2088 message = msg or b"\n".join(ph.message)
2088 elif not msg:
2089 elif not msg:
2089 if not ph.message:
2090 if not ph.message:
2090 message = defaultmsg
2091 message = defaultmsg
2091 else:
2092 else:
2092 message = b"\n".join(ph.message)
2093 message = b"\n".join(ph.message)
2093 else:
2094 else:
2094 message = msg
2095 message = msg
2095 ph.setmessage(msg)
2096 ph.setmessage(msg)
2096
2097
2097 # Ensure we create a new changeset in the same phase than
2098 # Ensure we create a new changeset in the same phase than
2098 # the old one.
2099 # the old one.
2099 lock = tr = None
2100 lock = tr = None
2100 try:
2101 try:
2101 lock = repo.lock()
2102 lock = repo.lock()
2102 tr = repo.transaction(b'mq')
2103 tr = repo.transaction(b'mq')
2103 n = newcommit(
2104 n = newcommit(
2104 repo,
2105 repo,
2105 oldphase,
2106 oldphase,
2106 message,
2107 message,
2107 user,
2108 user,
2108 ph.date,
2109 ph.date,
2109 match=match,
2110 match=match,
2110 force=True,
2111 force=True,
2111 editor=editor,
2112 editor=editor,
2112 )
2113 )
2113 # only write patch after a successful commit
2114 # only write patch after a successful commit
2114 c = [list(x) for x in refreshchanges]
2115 c = [list(x) for x in refreshchanges]
2115 if inclsubs:
2116 if inclsubs:
2116 self.putsubstate2changes(substatestate, c)
2117 self.putsubstate2changes(substatestate, c)
2117 chunks = patchmod.diff(
2118 chunks = patchmod.diff(
2118 repo, patchparent, changes=c, opts=diffopts
2119 repo, patchparent, changes=c, opts=diffopts
2119 )
2120 )
2120 comments = bytes(ph)
2121 comments = bytes(ph)
2121 if comments:
2122 if comments:
2122 patchf.write(comments)
2123 patchf.write(comments)
2123 for chunk in chunks:
2124 for chunk in chunks:
2124 patchf.write(chunk)
2125 patchf.write(chunk)
2125 patchf.close()
2126 patchf.close()
2126
2127
2127 marks = repo._bookmarks
2128 marks = repo._bookmarks
2128 marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
2129 marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
2129 tr.close()
2130 tr.close()
2130
2131
2131 self.applied.append(statusentry(n, patchfn))
2132 self.applied.append(statusentry(n, patchfn))
2132 finally:
2133 finally:
2133 lockmod.release(tr, lock)
2134 lockmod.release(tr, lock)
2134 except: # re-raises
2135 except: # re-raises
2135 ctx = repo[cparents[0]]
2136 ctx = repo[cparents[0]]
2136 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2137 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2137 self.savedirty()
2138 self.savedirty()
2138 self.ui.warn(
2139 self.ui.warn(
2139 _(
2140 _(
2140 b'qrefresh interrupted while patch was popped! '
2141 b'qrefresh interrupted while patch was popped! '
2141 b'(revert --all, qpush to recover)\n'
2142 b'(revert --all, qpush to recover)\n'
2142 )
2143 )
2143 )
2144 )
2144 raise
2145 raise
2145 finally:
2146 finally:
2146 wlock.release()
2147 wlock.release()
2147 self.removeundo(repo)
2148 self.removeundo(repo)
2148
2149
2149 def init(self, repo, create=False):
2150 def init(self, repo, create=False):
2150 if not create and os.path.isdir(self.path):
2151 if not create and os.path.isdir(self.path):
2151 raise error.Abort(_(b"patch queue directory already exists"))
2152 raise error.Abort(_(b"patch queue directory already exists"))
2152 try:
2153 try:
2153 os.mkdir(self.path)
2154 os.mkdir(self.path)
2154 except OSError as inst:
2155 except OSError as inst:
2155 if inst.errno != errno.EEXIST or not create:
2156 if inst.errno != errno.EEXIST or not create:
2156 raise
2157 raise
2157 if create:
2158 if create:
2158 return self.qrepo(create=True)
2159 return self.qrepo(create=True)
2159
2160
2160 def unapplied(self, repo, patch=None):
2161 def unapplied(self, repo, patch=None):
2161 if patch and patch not in self.series:
2162 if patch and patch not in self.series:
2162 raise error.Abort(_(b"patch %s is not in series file") % patch)
2163 raise error.Abort(_(b"patch %s is not in series file") % patch)
2163 if not patch:
2164 if not patch:
2164 start = self.seriesend()
2165 start = self.seriesend()
2165 else:
2166 else:
2166 start = self.series.index(patch) + 1
2167 start = self.series.index(patch) + 1
2167 unapplied = []
2168 unapplied = []
2168 for i in pycompat.xrange(start, len(self.series)):
2169 for i in pycompat.xrange(start, len(self.series)):
2169 pushable, reason = self.pushable(i)
2170 pushable, reason = self.pushable(i)
2170 if pushable:
2171 if pushable:
2171 unapplied.append((i, self.series[i]))
2172 unapplied.append((i, self.series[i]))
2172 self.explainpushable(i)
2173 self.explainpushable(i)
2173 return unapplied
2174 return unapplied
2174
2175
2175 def qseries(
2176 def qseries(
2176 self,
2177 self,
2177 repo,
2178 repo,
2178 missing=None,
2179 missing=None,
2179 start=0,
2180 start=0,
2180 length=None,
2181 length=None,
2181 status=None,
2182 status=None,
2182 summary=False,
2183 summary=False,
2183 ):
2184 ):
2184 def displayname(pfx, patchname, state):
2185 def displayname(pfx, patchname, state):
2185 if pfx:
2186 if pfx:
2186 self.ui.write(pfx)
2187 self.ui.write(pfx)
2187 if summary:
2188 if summary:
2188 ph = patchheader(self.join(patchname), self.plainmode)
2189 ph = patchheader(self.join(patchname), self.plainmode)
2189 if ph.message:
2190 if ph.message:
2190 msg = ph.message[0]
2191 msg = ph.message[0]
2191 else:
2192 else:
2192 msg = b''
2193 msg = b''
2193
2194
2194 if self.ui.formatted():
2195 if self.ui.formatted():
2195 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
2196 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
2196 if width > 0:
2197 if width > 0:
2197 msg = stringutil.ellipsis(msg, width)
2198 msg = stringutil.ellipsis(msg, width)
2198 else:
2199 else:
2199 msg = b''
2200 msg = b''
2200 self.ui.write(patchname, label=b'qseries.' + state)
2201 self.ui.write(patchname, label=b'qseries.' + state)
2201 self.ui.write(b': ')
2202 self.ui.write(b': ')
2202 self.ui.write(msg, label=b'qseries.message.' + state)
2203 self.ui.write(msg, label=b'qseries.message.' + state)
2203 else:
2204 else:
2204 self.ui.write(patchname, label=b'qseries.' + state)
2205 self.ui.write(patchname, label=b'qseries.' + state)
2205 self.ui.write(b'\n')
2206 self.ui.write(b'\n')
2206
2207
2207 applied = {p.name for p in self.applied}
2208 applied = {p.name for p in self.applied}
2208 if length is None:
2209 if length is None:
2209 length = len(self.series) - start
2210 length = len(self.series) - start
2210 if not missing:
2211 if not missing:
2211 if self.ui.verbose:
2212 if self.ui.verbose:
2212 idxwidth = len(b"%d" % (start + length - 1))
2213 idxwidth = len(b"%d" % (start + length - 1))
2213 for i in pycompat.xrange(start, start + length):
2214 for i in pycompat.xrange(start, start + length):
2214 patch = self.series[i]
2215 patch = self.series[i]
2215 if patch in applied:
2216 if patch in applied:
2216 char, state = b'A', b'applied'
2217 char, state = b'A', b'applied'
2217 elif self.pushable(i)[0]:
2218 elif self.pushable(i)[0]:
2218 char, state = b'U', b'unapplied'
2219 char, state = b'U', b'unapplied'
2219 else:
2220 else:
2220 char, state = b'G', b'guarded'
2221 char, state = b'G', b'guarded'
2221 pfx = b''
2222 pfx = b''
2222 if self.ui.verbose:
2223 if self.ui.verbose:
2223 pfx = b'%*d %s ' % (idxwidth, i, char)
2224 pfx = b'%*d %s ' % (idxwidth, i, char)
2224 elif status and status != char:
2225 elif status and status != char:
2225 continue
2226 continue
2226 displayname(pfx, patch, state)
2227 displayname(pfx, patch, state)
2227 else:
2228 else:
2228 msng_list = []
2229 msng_list = []
2229 for root, dirs, files in os.walk(self.path):
2230 for root, dirs, files in os.walk(self.path):
2230 d = root[len(self.path) + 1 :]
2231 d = root[len(self.path) + 1 :]
2231 for f in files:
2232 for f in files:
2232 fl = os.path.join(d, f)
2233 fl = os.path.join(d, f)
2233 if (
2234 if (
2234 fl not in self.series
2235 fl not in self.series
2235 and fl
2236 and fl
2236 not in (
2237 not in (
2237 self.statuspath,
2238 self.statuspath,
2238 self.seriespath,
2239 self.seriespath,
2239 self.guardspath,
2240 self.guardspath,
2240 )
2241 )
2241 and not fl.startswith(b'.')
2242 and not fl.startswith(b'.')
2242 ):
2243 ):
2243 msng_list.append(fl)
2244 msng_list.append(fl)
2244 for x in sorted(msng_list):
2245 for x in sorted(msng_list):
2245 pfx = self.ui.verbose and b'D ' or b''
2246 pfx = self.ui.verbose and b'D ' or b''
2246 displayname(pfx, x, b'missing')
2247 displayname(pfx, x, b'missing')
2247
2248
2248 def issaveline(self, l):
2249 def issaveline(self, l):
2249 if l.name == b'.hg.patches.save.line':
2250 if l.name == b'.hg.patches.save.line':
2250 return True
2251 return True
2251
2252
2252 def qrepo(self, create=False):
2253 def qrepo(self, create=False):
2253 ui = self.baseui.copy()
2254 ui = self.baseui.copy()
2254 # copy back attributes set by ui.pager()
2255 # copy back attributes set by ui.pager()
2255 if self.ui.pageractive and not ui.pageractive:
2256 if self.ui.pageractive and not ui.pageractive:
2256 ui.pageractive = self.ui.pageractive
2257 ui.pageractive = self.ui.pageractive
2257 # internal config: ui.formatted
2258 # internal config: ui.formatted
2258 ui.setconfig(
2259 ui.setconfig(
2259 b'ui',
2260 b'ui',
2260 b'formatted',
2261 b'formatted',
2261 self.ui.config(b'ui', b'formatted'),
2262 self.ui.config(b'ui', b'formatted'),
2262 b'mqpager',
2263 b'mqpager',
2263 )
2264 )
2264 ui.setconfig(
2265 ui.setconfig(
2265 b'ui',
2266 b'ui',
2266 b'interactive',
2267 b'interactive',
2267 self.ui.config(b'ui', b'interactive'),
2268 self.ui.config(b'ui', b'interactive'),
2268 b'mqpager',
2269 b'mqpager',
2269 )
2270 )
2270 if create or os.path.isdir(self.join(b".hg")):
2271 if create or os.path.isdir(self.join(b".hg")):
2271 return hg.repository(ui, path=self.path, create=create)
2272 return hg.repository(ui, path=self.path, create=create)
2272
2273
2273 def restore(self, repo, rev, delete=None, qupdate=None):
2274 def restore(self, repo, rev, delete=None, qupdate=None):
2274 desc = repo[rev].description().strip()
2275 desc = repo[rev].description().strip()
2275 lines = desc.splitlines()
2276 lines = desc.splitlines()
2276 datastart = None
2277 datastart = None
2277 series = []
2278 series = []
2278 applied = []
2279 applied = []
2279 qpp = None
2280 qpp = None
2280 for i, line in enumerate(lines):
2281 for i, line in enumerate(lines):
2281 if line == b'Patch Data:':
2282 if line == b'Patch Data:':
2282 datastart = i + 1
2283 datastart = i + 1
2283 elif line.startswith(b'Dirstate:'):
2284 elif line.startswith(b'Dirstate:'):
2284 l = line.rstrip()
2285 l = line.rstrip()
2285 l = l[10:].split(b' ')
2286 l = l[10:].split(b' ')
2286 qpp = [bin(x) for x in l]
2287 qpp = [bin(x) for x in l]
2287 elif datastart is not None:
2288 elif datastart is not None:
2288 l = line.rstrip()
2289 l = line.rstrip()
2289 n, name = l.split(b':', 1)
2290 n, name = l.split(b':', 1)
2290 if n:
2291 if n:
2291 applied.append(statusentry(bin(n), name))
2292 applied.append(statusentry(bin(n), name))
2292 else:
2293 else:
2293 series.append(l)
2294 series.append(l)
2294 if datastart is None:
2295 if datastart is None:
2295 self.ui.warn(_(b"no saved patch data found\n"))
2296 self.ui.warn(_(b"no saved patch data found\n"))
2296 return 1
2297 return 1
2297 self.ui.warn(_(b"restoring status: %s\n") % lines[0])
2298 self.ui.warn(_(b"restoring status: %s\n") % lines[0])
2298 self.fullseries = series
2299 self.fullseries = series
2299 self.applied = applied
2300 self.applied = applied
2300 self.parseseries()
2301 self.parseseries()
2301 self.seriesdirty = True
2302 self.seriesdirty = True
2302 self.applieddirty = True
2303 self.applieddirty = True
2303 heads = repo.changelog.heads()
2304 heads = repo.changelog.heads()
2304 if delete:
2305 if delete:
2305 if rev not in heads:
2306 if rev not in heads:
2306 self.ui.warn(_(b"save entry has children, leaving it alone\n"))
2307 self.ui.warn(_(b"save entry has children, leaving it alone\n"))
2307 else:
2308 else:
2308 self.ui.warn(_(b"removing save entry %s\n") % short(rev))
2309 self.ui.warn(_(b"removing save entry %s\n") % short(rev))
2309 pp = repo.dirstate.parents()
2310 pp = repo.dirstate.parents()
2310 if rev in pp:
2311 if rev in pp:
2311 update = True
2312 update = True
2312 else:
2313 else:
2313 update = False
2314 update = False
2314 strip(self.ui, repo, [rev], update=update, backup=False)
2315 strip(self.ui, repo, [rev], update=update, backup=False)
2315 if qpp:
2316 if qpp:
2316 self.ui.warn(
2317 self.ui.warn(
2317 _(b"saved queue repository parents: %s %s\n")
2318 _(b"saved queue repository parents: %s %s\n")
2318 % (short(qpp[0]), short(qpp[1]))
2319 % (short(qpp[0]), short(qpp[1]))
2319 )
2320 )
2320 if qupdate:
2321 if qupdate:
2321 self.ui.status(_(b"updating queue directory\n"))
2322 self.ui.status(_(b"updating queue directory\n"))
2322 r = self.qrepo()
2323 r = self.qrepo()
2323 if not r:
2324 if not r:
2324 self.ui.warn(_(b"unable to load queue repository\n"))
2325 self.ui.warn(_(b"unable to load queue repository\n"))
2325 return 1
2326 return 1
2326 hg.clean(r, qpp[0])
2327 hg.clean(r, qpp[0])
2327
2328
2328 def save(self, repo, msg=None):
2329 def save(self, repo, msg=None):
2329 if not self.applied:
2330 if not self.applied:
2330 self.ui.warn(_(b"save: no patches applied, exiting\n"))
2331 self.ui.warn(_(b"save: no patches applied, exiting\n"))
2331 return 1
2332 return 1
2332 if self.issaveline(self.applied[-1]):
2333 if self.issaveline(self.applied[-1]):
2333 self.ui.warn(_(b"status is already saved\n"))
2334 self.ui.warn(_(b"status is already saved\n"))
2334 return 1
2335 return 1
2335
2336
2336 if not msg:
2337 if not msg:
2337 msg = _(b"hg patches saved state")
2338 msg = _(b"hg patches saved state")
2338 else:
2339 else:
2339 msg = b"hg patches: " + msg.rstrip(b'\r\n')
2340 msg = b"hg patches: " + msg.rstrip(b'\r\n')
2340 r = self.qrepo()
2341 r = self.qrepo()
2341 if r:
2342 if r:
2342 pp = r.dirstate.parents()
2343 pp = r.dirstate.parents()
2343 msg += b"\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2344 msg += b"\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2344 msg += b"\n\nPatch Data:\n"
2345 msg += b"\n\nPatch Data:\n"
2345 msg += b''.join(b'%s\n' % x for x in self.applied)
2346 msg += b''.join(b'%s\n' % x for x in self.applied)
2346 msg += b''.join(b':%s\n' % x for x in self.fullseries)
2347 msg += b''.join(b':%s\n' % x for x in self.fullseries)
2347 n = repo.commit(msg, force=True)
2348 n = repo.commit(msg, force=True)
2348 if not n:
2349 if not n:
2349 self.ui.warn(_(b"repo commit failed\n"))
2350 self.ui.warn(_(b"repo commit failed\n"))
2350 return 1
2351 return 1
2351 self.applied.append(statusentry(n, b'.hg.patches.save.line'))
2352 self.applied.append(statusentry(n, b'.hg.patches.save.line'))
2352 self.applieddirty = True
2353 self.applieddirty = True
2353 self.removeundo(repo)
2354 self.removeundo(repo)
2354
2355
2355 def fullseriesend(self):
2356 def fullseriesend(self):
2356 if self.applied:
2357 if self.applied:
2357 p = self.applied[-1].name
2358 p = self.applied[-1].name
2358 end = self.findseries(p)
2359 end = self.findseries(p)
2359 if end is None:
2360 if end is None:
2360 return len(self.fullseries)
2361 return len(self.fullseries)
2361 return end + 1
2362 return end + 1
2362 return 0
2363 return 0
2363
2364
2364 def seriesend(self, all_patches=False):
2365 def seriesend(self, all_patches=False):
2365 """If all_patches is False, return the index of the next pushable patch
2366 """If all_patches is False, return the index of the next pushable patch
2366 in the series, or the series length. If all_patches is True, return the
2367 in the series, or the series length. If all_patches is True, return the
2367 index of the first patch past the last applied one.
2368 index of the first patch past the last applied one.
2368 """
2369 """
2369 end = 0
2370 end = 0
2370
2371
2371 def nextpatch(start):
2372 def nextpatch(start):
2372 if all_patches or start >= len(self.series):
2373 if all_patches or start >= len(self.series):
2373 return start
2374 return start
2374 for i in pycompat.xrange(start, len(self.series)):
2375 for i in pycompat.xrange(start, len(self.series)):
2375 p, reason = self.pushable(i)
2376 p, reason = self.pushable(i)
2376 if p:
2377 if p:
2377 return i
2378 return i
2378 self.explainpushable(i)
2379 self.explainpushable(i)
2379 return len(self.series)
2380 return len(self.series)
2380
2381
2381 if self.applied:
2382 if self.applied:
2382 p = self.applied[-1].name
2383 p = self.applied[-1].name
2383 try:
2384 try:
2384 end = self.series.index(p)
2385 end = self.series.index(p)
2385 except ValueError:
2386 except ValueError:
2386 return 0
2387 return 0
2387 return nextpatch(end + 1)
2388 return nextpatch(end + 1)
2388 return nextpatch(end)
2389 return nextpatch(end)
2389
2390
2390 def appliedname(self, index):
2391 def appliedname(self, index):
2391 pname = self.applied[index].name
2392 pname = self.applied[index].name
2392 if not self.ui.verbose:
2393 if not self.ui.verbose:
2393 p = pname
2394 p = pname
2394 else:
2395 else:
2395 p = (b"%d" % self.series.index(pname)) + b" " + pname
2396 p = (b"%d" % self.series.index(pname)) + b" " + pname
2396 return p
2397 return p
2397
2398
2398 def qimport(
2399 def qimport(
2399 self,
2400 self,
2400 repo,
2401 repo,
2401 files,
2402 files,
2402 patchname=None,
2403 patchname=None,
2403 rev=None,
2404 rev=None,
2404 existing=None,
2405 existing=None,
2405 force=None,
2406 force=None,
2406 git=False,
2407 git=False,
2407 ):
2408 ):
2408 def checkseries(patchname):
2409 def checkseries(patchname):
2409 if patchname in self.series:
2410 if patchname in self.series:
2410 raise error.Abort(
2411 raise error.Abort(
2411 _(b'patch %s is already in the series file') % patchname
2412 _(b'patch %s is already in the series file') % patchname
2412 )
2413 )
2413
2414
2414 if rev:
2415 if rev:
2415 if files:
2416 if files:
2416 raise error.Abort(
2417 raise error.Abort(
2417 _(b'option "-r" not valid when importing files')
2418 _(b'option "-r" not valid when importing files')
2418 )
2419 )
2419 rev = scmutil.revrange(repo, rev)
2420 rev = scmutil.revrange(repo, rev)
2420 rev.sort(reverse=True)
2421 rev.sort(reverse=True)
2421 elif not files:
2422 elif not files:
2422 raise error.Abort(_(b'no files or revisions specified'))
2423 raise error.Abort(_(b'no files or revisions specified'))
2423 if (len(files) > 1 or len(rev) > 1) and patchname:
2424 if (len(files) > 1 or len(rev) > 1) and patchname:
2424 raise error.Abort(
2425 raise error.Abort(
2425 _(b'option "-n" not valid when importing multiple patches')
2426 _(b'option "-n" not valid when importing multiple patches')
2426 )
2427 )
2427 imported = []
2428 imported = []
2428 if rev:
2429 if rev:
2429 # If mq patches are applied, we can only import revisions
2430 # If mq patches are applied, we can only import revisions
2430 # that form a linear path to qbase.
2431 # that form a linear path to qbase.
2431 # Otherwise, they should form a linear path to a head.
2432 # Otherwise, they should form a linear path to a head.
2432 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2433 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2433 if len(heads) > 1:
2434 if len(heads) > 1:
2434 raise error.Abort(
2435 raise error.Abort(
2435 _(b'revision %d is the root of more than one branch')
2436 _(b'revision %d is the root of more than one branch')
2436 % rev.last()
2437 % rev.last()
2437 )
2438 )
2438 if self.applied:
2439 if self.applied:
2439 base = repo.changelog.node(rev.first())
2440 base = repo.changelog.node(rev.first())
2440 if base in [n.node for n in self.applied]:
2441 if base in [n.node for n in self.applied]:
2441 raise error.Abort(
2442 raise error.Abort(
2442 _(b'revision %d is already managed') % rev.first()
2443 _(b'revision %d is already managed') % rev.first()
2443 )
2444 )
2444 if heads != [self.applied[-1].node]:
2445 if heads != [self.applied[-1].node]:
2445 raise error.Abort(
2446 raise error.Abort(
2446 _(b'revision %d is not the parent of the queue')
2447 _(b'revision %d is not the parent of the queue')
2447 % rev.first()
2448 % rev.first()
2448 )
2449 )
2449 base = repo.changelog.rev(self.applied[0].node)
2450 base = repo.changelog.rev(self.applied[0].node)
2450 lastparent = repo.changelog.parentrevs(base)[0]
2451 lastparent = repo.changelog.parentrevs(base)[0]
2451 else:
2452 else:
2452 if heads != [repo.changelog.node(rev.first())]:
2453 if heads != [repo.changelog.node(rev.first())]:
2453 raise error.Abort(
2454 raise error.Abort(
2454 _(b'revision %d has unmanaged children') % rev.first()
2455 _(b'revision %d has unmanaged children') % rev.first()
2455 )
2456 )
2456 lastparent = None
2457 lastparent = None
2457
2458
2458 diffopts = self.diffopts({b'git': git})
2459 diffopts = self.diffopts({b'git': git})
2459 with repo.transaction(b'qimport') as tr:
2460 with repo.transaction(b'qimport') as tr:
2460 for r in rev:
2461 for r in rev:
2461 if not repo[r].mutable():
2462 if not repo[r].mutable():
2462 raise error.Abort(
2463 raise error.Abort(
2463 _(b'revision %d is not mutable') % r,
2464 _(b'revision %d is not mutable') % r,
2464 hint=_(b"see 'hg help phases' " b'for details'),
2465 hint=_(b"see 'hg help phases' " b'for details'),
2465 )
2466 )
2466 p1, p2 = repo.changelog.parentrevs(r)
2467 p1, p2 = repo.changelog.parentrevs(r)
2467 n = repo.changelog.node(r)
2468 n = repo.changelog.node(r)
2468 if p2 != nullrev:
2469 if p2 != nullrev:
2469 raise error.Abort(
2470 raise error.Abort(
2470 _(b'cannot import merge revision %d') % r
2471 _(b'cannot import merge revision %d') % r
2471 )
2472 )
2472 if lastparent and lastparent != r:
2473 if lastparent and lastparent != r:
2473 raise error.Abort(
2474 raise error.Abort(
2474 _(b'revision %d is not the parent of %d')
2475 _(b'revision %d is not the parent of %d')
2475 % (r, lastparent)
2476 % (r, lastparent)
2476 )
2477 )
2477 lastparent = p1
2478 lastparent = p1
2478
2479
2479 if not patchname:
2480 if not patchname:
2480 patchname = self.makepatchname(
2481 patchname = self.makepatchname(
2481 repo[r].description().split(b'\n', 1)[0],
2482 repo[r].description().split(b'\n', 1)[0],
2482 b'%d.diff' % r,
2483 b'%d.diff' % r,
2483 )
2484 )
2484 checkseries(patchname)
2485 checkseries(patchname)
2485 self.checkpatchname(patchname, force)
2486 self.checkpatchname(patchname, force)
2486 self.fullseries.insert(0, patchname)
2487 self.fullseries.insert(0, patchname)
2487
2488
2488 with self.opener(patchname, b"w") as fp:
2489 with self.opener(patchname, b"w") as fp:
2489 cmdutil.exportfile(repo, [n], fp, opts=diffopts)
2490 cmdutil.exportfile(repo, [n], fp, opts=diffopts)
2490
2491
2491 se = statusentry(n, patchname)
2492 se = statusentry(n, patchname)
2492 self.applied.insert(0, se)
2493 self.applied.insert(0, se)
2493
2494
2494 self.added.append(patchname)
2495 self.added.append(patchname)
2495 imported.append(patchname)
2496 imported.append(patchname)
2496 patchname = None
2497 patchname = None
2497 if rev and repo.ui.configbool(b'mq', b'secret'):
2498 if rev and repo.ui.configbool(b'mq', b'secret'):
2498 # if we added anything with --rev, move the secret root
2499 # if we added anything with --rev, move the secret root
2499 phases.retractboundary(repo, tr, phases.secret, [n])
2500 phases.retractboundary(repo, tr, phases.secret, [n])
2500 self.parseseries()
2501 self.parseseries()
2501 self.applieddirty = True
2502 self.applieddirty = True
2502 self.seriesdirty = True
2503 self.seriesdirty = True
2503
2504
2504 for i, filename in enumerate(files):
2505 for i, filename in enumerate(files):
2505 if existing:
2506 if existing:
2506 if filename == b'-':
2507 if filename == b'-':
2507 raise error.Abort(
2508 raise error.Abort(
2508 _(b'-e is incompatible with import from -')
2509 _(b'-e is incompatible with import from -')
2509 )
2510 )
2510 filename = normname(filename)
2511 filename = normname(filename)
2511 self.checkreservedname(filename)
2512 self.checkreservedname(filename)
2512 if util.url(filename).islocal():
2513 if urlutil.url(filename).islocal():
2513 originpath = self.join(filename)
2514 originpath = self.join(filename)
2514 if not os.path.isfile(originpath):
2515 if not os.path.isfile(originpath):
2515 raise error.Abort(
2516 raise error.Abort(
2516 _(b"patch %s does not exist") % filename
2517 _(b"patch %s does not exist") % filename
2517 )
2518 )
2518
2519
2519 if patchname:
2520 if patchname:
2520 self.checkpatchname(patchname, force)
2521 self.checkpatchname(patchname, force)
2521
2522
2522 self.ui.write(
2523 self.ui.write(
2523 _(b'renaming %s to %s\n') % (filename, patchname)
2524 _(b'renaming %s to %s\n') % (filename, patchname)
2524 )
2525 )
2525 util.rename(originpath, self.join(patchname))
2526 util.rename(originpath, self.join(patchname))
2526 else:
2527 else:
2527 patchname = filename
2528 patchname = filename
2528
2529
2529 else:
2530 else:
2530 if filename == b'-' and not patchname:
2531 if filename == b'-' and not patchname:
2531 raise error.Abort(
2532 raise error.Abort(
2532 _(b'need --name to import a patch from -')
2533 _(b'need --name to import a patch from -')
2533 )
2534 )
2534 elif not patchname:
2535 elif not patchname:
2535 patchname = normname(
2536 patchname = normname(
2536 os.path.basename(filename.rstrip(b'/'))
2537 os.path.basename(filename.rstrip(b'/'))
2537 )
2538 )
2538 self.checkpatchname(patchname, force)
2539 self.checkpatchname(patchname, force)
2539 try:
2540 try:
2540 if filename == b'-':
2541 if filename == b'-':
2541 text = self.ui.fin.read()
2542 text = self.ui.fin.read()
2542 else:
2543 else:
2543 fp = hg.openpath(self.ui, filename)
2544 fp = hg.openpath(self.ui, filename)
2544 text = fp.read()
2545 text = fp.read()
2545 fp.close()
2546 fp.close()
2546 except (OSError, IOError):
2547 except (OSError, IOError):
2547 raise error.Abort(_(b"unable to read file %s") % filename)
2548 raise error.Abort(_(b"unable to read file %s") % filename)
2548 patchf = self.opener(patchname, b"w")
2549 patchf = self.opener(patchname, b"w")
2549 patchf.write(text)
2550 patchf.write(text)
2550 patchf.close()
2551 patchf.close()
2551 if not force:
2552 if not force:
2552 checkseries(patchname)
2553 checkseries(patchname)
2553 if patchname not in self.series:
2554 if patchname not in self.series:
2554 index = self.fullseriesend() + i
2555 index = self.fullseriesend() + i
2555 self.fullseries[index:index] = [patchname]
2556 self.fullseries[index:index] = [patchname]
2556 self.parseseries()
2557 self.parseseries()
2557 self.seriesdirty = True
2558 self.seriesdirty = True
2558 self.ui.warn(_(b"adding %s to series file\n") % patchname)
2559 self.ui.warn(_(b"adding %s to series file\n") % patchname)
2559 self.added.append(patchname)
2560 self.added.append(patchname)
2560 imported.append(patchname)
2561 imported.append(patchname)
2561 patchname = None
2562 patchname = None
2562
2563
2563 self.removeundo(repo)
2564 self.removeundo(repo)
2564 return imported
2565 return imported
2565
2566
2566
2567
2567 def fixkeepchangesopts(ui, opts):
2568 def fixkeepchangesopts(ui, opts):
2568 if (
2569 if (
2569 not ui.configbool(b'mq', b'keepchanges')
2570 not ui.configbool(b'mq', b'keepchanges')
2570 or opts.get(b'force')
2571 or opts.get(b'force')
2571 or opts.get(b'exact')
2572 or opts.get(b'exact')
2572 ):
2573 ):
2573 return opts
2574 return opts
2574 opts = dict(opts)
2575 opts = dict(opts)
2575 opts[b'keep_changes'] = True
2576 opts[b'keep_changes'] = True
2576 return opts
2577 return opts
2577
2578
2578
2579
2579 @command(
2580 @command(
2580 b"qdelete|qremove|qrm",
2581 b"qdelete|qremove|qrm",
2581 [
2582 [
2582 (b'k', b'keep', None, _(b'keep patch file')),
2583 (b'k', b'keep', None, _(b'keep patch file')),
2583 (
2584 (
2584 b'r',
2585 b'r',
2585 b'rev',
2586 b'rev',
2586 [],
2587 [],
2587 _(b'stop managing a revision (DEPRECATED)'),
2588 _(b'stop managing a revision (DEPRECATED)'),
2588 _(b'REV'),
2589 _(b'REV'),
2589 ),
2590 ),
2590 ],
2591 ],
2591 _(b'hg qdelete [-k] [PATCH]...'),
2592 _(b'hg qdelete [-k] [PATCH]...'),
2592 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2593 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2593 )
2594 )
2594 def delete(ui, repo, *patches, **opts):
2595 def delete(ui, repo, *patches, **opts):
2595 """remove patches from queue
2596 """remove patches from queue
2596
2597
2597 The patches must not be applied, and at least one patch is required. Exact
2598 The patches must not be applied, and at least one patch is required. Exact
2598 patch identifiers must be given. With -k/--keep, the patch files are
2599 patch identifiers must be given. With -k/--keep, the patch files are
2599 preserved in the patch directory.
2600 preserved in the patch directory.
2600
2601
2601 To stop managing a patch and move it into permanent history,
2602 To stop managing a patch and move it into permanent history,
2602 use the :hg:`qfinish` command."""
2603 use the :hg:`qfinish` command."""
2603 q = repo.mq
2604 q = repo.mq
2604 q.delete(repo, patches, pycompat.byteskwargs(opts))
2605 q.delete(repo, patches, pycompat.byteskwargs(opts))
2605 q.savedirty()
2606 q.savedirty()
2606 return 0
2607 return 0
2607
2608
2608
2609
2609 @command(
2610 @command(
2610 b"qapplied",
2611 b"qapplied",
2611 [(b'1', b'last', None, _(b'show only the preceding applied patch'))]
2612 [(b'1', b'last', None, _(b'show only the preceding applied patch'))]
2612 + seriesopts,
2613 + seriesopts,
2613 _(b'hg qapplied [-1] [-s] [PATCH]'),
2614 _(b'hg qapplied [-1] [-s] [PATCH]'),
2614 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2615 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2615 )
2616 )
2616 def applied(ui, repo, patch=None, **opts):
2617 def applied(ui, repo, patch=None, **opts):
2617 """print the patches already applied
2618 """print the patches already applied
2618
2619
2619 Returns 0 on success."""
2620 Returns 0 on success."""
2620
2621
2621 q = repo.mq
2622 q = repo.mq
2622 opts = pycompat.byteskwargs(opts)
2623 opts = pycompat.byteskwargs(opts)
2623
2624
2624 if patch:
2625 if patch:
2625 if patch not in q.series:
2626 if patch not in q.series:
2626 raise error.Abort(_(b"patch %s is not in series file") % patch)
2627 raise error.Abort(_(b"patch %s is not in series file") % patch)
2627 end = q.series.index(patch) + 1
2628 end = q.series.index(patch) + 1
2628 else:
2629 else:
2629 end = q.seriesend(True)
2630 end = q.seriesend(True)
2630
2631
2631 if opts.get(b'last') and not end:
2632 if opts.get(b'last') and not end:
2632 ui.write(_(b"no patches applied\n"))
2633 ui.write(_(b"no patches applied\n"))
2633 return 1
2634 return 1
2634 elif opts.get(b'last') and end == 1:
2635 elif opts.get(b'last') and end == 1:
2635 ui.write(_(b"only one patch applied\n"))
2636 ui.write(_(b"only one patch applied\n"))
2636 return 1
2637 return 1
2637 elif opts.get(b'last'):
2638 elif opts.get(b'last'):
2638 start = end - 2
2639 start = end - 2
2639 end = 1
2640 end = 1
2640 else:
2641 else:
2641 start = 0
2642 start = 0
2642
2643
2643 q.qseries(
2644 q.qseries(
2644 repo, length=end, start=start, status=b'A', summary=opts.get(b'summary')
2645 repo, length=end, start=start, status=b'A', summary=opts.get(b'summary')
2645 )
2646 )
2646
2647
2647
2648
2648 @command(
2649 @command(
2649 b"qunapplied",
2650 b"qunapplied",
2650 [(b'1', b'first', None, _(b'show only the first patch'))] + seriesopts,
2651 [(b'1', b'first', None, _(b'show only the first patch'))] + seriesopts,
2651 _(b'hg qunapplied [-1] [-s] [PATCH]'),
2652 _(b'hg qunapplied [-1] [-s] [PATCH]'),
2652 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2653 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2653 )
2654 )
2654 def unapplied(ui, repo, patch=None, **opts):
2655 def unapplied(ui, repo, patch=None, **opts):
2655 """print the patches not yet applied
2656 """print the patches not yet applied
2656
2657
2657 Returns 0 on success."""
2658 Returns 0 on success."""
2658
2659
2659 q = repo.mq
2660 q = repo.mq
2660 opts = pycompat.byteskwargs(opts)
2661 opts = pycompat.byteskwargs(opts)
2661 if patch:
2662 if patch:
2662 if patch not in q.series:
2663 if patch not in q.series:
2663 raise error.Abort(_(b"patch %s is not in series file") % patch)
2664 raise error.Abort(_(b"patch %s is not in series file") % patch)
2664 start = q.series.index(patch) + 1
2665 start = q.series.index(patch) + 1
2665 else:
2666 else:
2666 start = q.seriesend(True)
2667 start = q.seriesend(True)
2667
2668
2668 if start == len(q.series) and opts.get(b'first'):
2669 if start == len(q.series) and opts.get(b'first'):
2669 ui.write(_(b"all patches applied\n"))
2670 ui.write(_(b"all patches applied\n"))
2670 return 1
2671 return 1
2671
2672
2672 if opts.get(b'first'):
2673 if opts.get(b'first'):
2673 length = 1
2674 length = 1
2674 else:
2675 else:
2675 length = None
2676 length = None
2676 q.qseries(
2677 q.qseries(
2677 repo,
2678 repo,
2678 start=start,
2679 start=start,
2679 length=length,
2680 length=length,
2680 status=b'U',
2681 status=b'U',
2681 summary=opts.get(b'summary'),
2682 summary=opts.get(b'summary'),
2682 )
2683 )
2683
2684
2684
2685
2685 @command(
2686 @command(
2686 b"qimport",
2687 b"qimport",
2687 [
2688 [
2688 (b'e', b'existing', None, _(b'import file in patch directory')),
2689 (b'e', b'existing', None, _(b'import file in patch directory')),
2689 (b'n', b'name', b'', _(b'name of patch file'), _(b'NAME')),
2690 (b'n', b'name', b'', _(b'name of patch file'), _(b'NAME')),
2690 (b'f', b'force', None, _(b'overwrite existing files')),
2691 (b'f', b'force', None, _(b'overwrite existing files')),
2691 (
2692 (
2692 b'r',
2693 b'r',
2693 b'rev',
2694 b'rev',
2694 [],
2695 [],
2695 _(b'place existing revisions under mq control'),
2696 _(b'place existing revisions under mq control'),
2696 _(b'REV'),
2697 _(b'REV'),
2697 ),
2698 ),
2698 (b'g', b'git', None, _(b'use git extended diff format')),
2699 (b'g', b'git', None, _(b'use git extended diff format')),
2699 (b'P', b'push', None, _(b'qpush after importing')),
2700 (b'P', b'push', None, _(b'qpush after importing')),
2700 ],
2701 ],
2701 _(b'hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
2702 _(b'hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
2702 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2703 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2703 )
2704 )
2704 def qimport(ui, repo, *filename, **opts):
2705 def qimport(ui, repo, *filename, **opts):
2705 """import a patch or existing changeset
2706 """import a patch or existing changeset
2706
2707
2707 The patch is inserted into the series after the last applied
2708 The patch is inserted into the series after the last applied
2708 patch. If no patches have been applied, qimport prepends the patch
2709 patch. If no patches have been applied, qimport prepends the patch
2709 to the series.
2710 to the series.
2710
2711
2711 The patch will have the same name as its source file unless you
2712 The patch will have the same name as its source file unless you
2712 give it a new one with -n/--name.
2713 give it a new one with -n/--name.
2713
2714
2714 You can register an existing patch inside the patch directory with
2715 You can register an existing patch inside the patch directory with
2715 the -e/--existing flag.
2716 the -e/--existing flag.
2716
2717
2717 With -f/--force, an existing patch of the same name will be
2718 With -f/--force, an existing patch of the same name will be
2718 overwritten.
2719 overwritten.
2719
2720
2720 An existing changeset may be placed under mq control with -r/--rev
2721 An existing changeset may be placed under mq control with -r/--rev
2721 (e.g. qimport --rev . -n patch will place the current revision
2722 (e.g. qimport --rev . -n patch will place the current revision
2722 under mq control). With -g/--git, patches imported with --rev will
2723 under mq control). With -g/--git, patches imported with --rev will
2723 use the git diff format. See the diffs help topic for information
2724 use the git diff format. See the diffs help topic for information
2724 on why this is important for preserving rename/copy information
2725 on why this is important for preserving rename/copy information
2725 and permission changes. Use :hg:`qfinish` to remove changesets
2726 and permission changes. Use :hg:`qfinish` to remove changesets
2726 from mq control.
2727 from mq control.
2727
2728
2728 To import a patch from standard input, pass - as the patch file.
2729 To import a patch from standard input, pass - as the patch file.
2729 When importing from standard input, a patch name must be specified
2730 When importing from standard input, a patch name must be specified
2730 using the --name flag.
2731 using the --name flag.
2731
2732
2732 To import an existing patch while renaming it::
2733 To import an existing patch while renaming it::
2733
2734
2734 hg qimport -e existing-patch -n new-name
2735 hg qimport -e existing-patch -n new-name
2735
2736
2736 Returns 0 if import succeeded.
2737 Returns 0 if import succeeded.
2737 """
2738 """
2738 opts = pycompat.byteskwargs(opts)
2739 opts = pycompat.byteskwargs(opts)
2739 with repo.lock(): # cause this may move phase
2740 with repo.lock(): # cause this may move phase
2740 q = repo.mq
2741 q = repo.mq
2741 try:
2742 try:
2742 imported = q.qimport(
2743 imported = q.qimport(
2743 repo,
2744 repo,
2744 filename,
2745 filename,
2745 patchname=opts.get(b'name'),
2746 patchname=opts.get(b'name'),
2746 existing=opts.get(b'existing'),
2747 existing=opts.get(b'existing'),
2747 force=opts.get(b'force'),
2748 force=opts.get(b'force'),
2748 rev=opts.get(b'rev'),
2749 rev=opts.get(b'rev'),
2749 git=opts.get(b'git'),
2750 git=opts.get(b'git'),
2750 )
2751 )
2751 finally:
2752 finally:
2752 q.savedirty()
2753 q.savedirty()
2753
2754
2754 if imported and opts.get(b'push') and not opts.get(b'rev'):
2755 if imported and opts.get(b'push') and not opts.get(b'rev'):
2755 return q.push(repo, imported[-1])
2756 return q.push(repo, imported[-1])
2756 return 0
2757 return 0
2757
2758
2758
2759
2759 def qinit(ui, repo, create):
2760 def qinit(ui, repo, create):
2760 """initialize a new queue repository
2761 """initialize a new queue repository
2761
2762
2762 This command also creates a series file for ordering patches, and
2763 This command also creates a series file for ordering patches, and
2763 an mq-specific .hgignore file in the queue repository, to exclude
2764 an mq-specific .hgignore file in the queue repository, to exclude
2764 the status and guards files (these contain mostly transient state).
2765 the status and guards files (these contain mostly transient state).
2765
2766
2766 Returns 0 if initialization succeeded."""
2767 Returns 0 if initialization succeeded."""
2767 q = repo.mq
2768 q = repo.mq
2768 r = q.init(repo, create)
2769 r = q.init(repo, create)
2769 q.savedirty()
2770 q.savedirty()
2770 if r:
2771 if r:
2771 if not os.path.exists(r.wjoin(b'.hgignore')):
2772 if not os.path.exists(r.wjoin(b'.hgignore')):
2772 fp = r.wvfs(b'.hgignore', b'w')
2773 fp = r.wvfs(b'.hgignore', b'w')
2773 fp.write(b'^\\.hg\n')
2774 fp.write(b'^\\.hg\n')
2774 fp.write(b'^\\.mq\n')
2775 fp.write(b'^\\.mq\n')
2775 fp.write(b'syntax: glob\n')
2776 fp.write(b'syntax: glob\n')
2776 fp.write(b'status\n')
2777 fp.write(b'status\n')
2777 fp.write(b'guards\n')
2778 fp.write(b'guards\n')
2778 fp.close()
2779 fp.close()
2779 if not os.path.exists(r.wjoin(b'series')):
2780 if not os.path.exists(r.wjoin(b'series')):
2780 r.wvfs(b'series', b'w').close()
2781 r.wvfs(b'series', b'w').close()
2781 r[None].add([b'.hgignore', b'series'])
2782 r[None].add([b'.hgignore', b'series'])
2782 commands.add(ui, r)
2783 commands.add(ui, r)
2783 return 0
2784 return 0
2784
2785
2785
2786
2786 @command(
2787 @command(
2787 b"qinit",
2788 b"qinit",
2788 [(b'c', b'create-repo', None, _(b'create queue repository'))],
2789 [(b'c', b'create-repo', None, _(b'create queue repository'))],
2789 _(b'hg qinit [-c]'),
2790 _(b'hg qinit [-c]'),
2790 helpcategory=command.CATEGORY_REPO_CREATION,
2791 helpcategory=command.CATEGORY_REPO_CREATION,
2791 helpbasic=True,
2792 helpbasic=True,
2792 )
2793 )
2793 def init(ui, repo, **opts):
2794 def init(ui, repo, **opts):
2794 """init a new queue repository (DEPRECATED)
2795 """init a new queue repository (DEPRECATED)
2795
2796
2796 The queue repository is unversioned by default. If
2797 The queue repository is unversioned by default. If
2797 -c/--create-repo is specified, qinit will create a separate nested
2798 -c/--create-repo is specified, qinit will create a separate nested
2798 repository for patches (qinit -c may also be run later to convert
2799 repository for patches (qinit -c may also be run later to convert
2799 an unversioned patch repository into a versioned one). You can use
2800 an unversioned patch repository into a versioned one). You can use
2800 qcommit to commit changes to this queue repository.
2801 qcommit to commit changes to this queue repository.
2801
2802
2802 This command is deprecated. Without -c, it's implied by other relevant
2803 This command is deprecated. Without -c, it's implied by other relevant
2803 commands. With -c, use :hg:`init --mq` instead."""
2804 commands. With -c, use :hg:`init --mq` instead."""
2804 return qinit(ui, repo, create=opts.get('create_repo'))
2805 return qinit(ui, repo, create=opts.get('create_repo'))
2805
2806
2806
2807
2807 @command(
2808 @command(
2808 b"qclone",
2809 b"qclone",
2809 [
2810 [
2810 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
2811 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
2811 (
2812 (
2812 b'U',
2813 b'U',
2813 b'noupdate',
2814 b'noupdate',
2814 None,
2815 None,
2815 _(b'do not update the new working directories'),
2816 _(b'do not update the new working directories'),
2816 ),
2817 ),
2817 (
2818 (
2818 b'',
2819 b'',
2819 b'uncompressed',
2820 b'uncompressed',
2820 None,
2821 None,
2821 _(b'use uncompressed transfer (fast over LAN)'),
2822 _(b'use uncompressed transfer (fast over LAN)'),
2822 ),
2823 ),
2823 (
2824 (
2824 b'p',
2825 b'p',
2825 b'patches',
2826 b'patches',
2826 b'',
2827 b'',
2827 _(b'location of source patch repository'),
2828 _(b'location of source patch repository'),
2828 _(b'REPO'),
2829 _(b'REPO'),
2829 ),
2830 ),
2830 ]
2831 ]
2831 + cmdutil.remoteopts,
2832 + cmdutil.remoteopts,
2832 _(b'hg qclone [OPTION]... SOURCE [DEST]'),
2833 _(b'hg qclone [OPTION]... SOURCE [DEST]'),
2833 helpcategory=command.CATEGORY_REPO_CREATION,
2834 helpcategory=command.CATEGORY_REPO_CREATION,
2834 norepo=True,
2835 norepo=True,
2835 )
2836 )
2836 def clone(ui, source, dest=None, **opts):
2837 def clone(ui, source, dest=None, **opts):
2837 """clone main and patch repository at same time
2838 """clone main and patch repository at same time
2838
2839
2839 If source is local, destination will have no patches applied. If
2840 If source is local, destination will have no patches applied. If
2840 source is remote, this command can not check if patches are
2841 source is remote, this command can not check if patches are
2841 applied in source, so cannot guarantee that patches are not
2842 applied in source, so cannot guarantee that patches are not
2842 applied in destination. If you clone remote repository, be sure
2843 applied in destination. If you clone remote repository, be sure
2843 before that it has no patches applied.
2844 before that it has no patches applied.
2844
2845
2845 Source patch repository is looked for in <src>/.hg/patches by
2846 Source patch repository is looked for in <src>/.hg/patches by
2846 default. Use -p <url> to change.
2847 default. Use -p <url> to change.
2847
2848
2848 The patch directory must be a nested Mercurial repository, as
2849 The patch directory must be a nested Mercurial repository, as
2849 would be created by :hg:`init --mq`.
2850 would be created by :hg:`init --mq`.
2850
2851
2851 Return 0 on success.
2852 Return 0 on success.
2852 """
2853 """
2853 opts = pycompat.byteskwargs(opts)
2854 opts = pycompat.byteskwargs(opts)
2854
2855
2855 def patchdir(repo):
2856 def patchdir(repo):
2856 """compute a patch repo url from a repo object"""
2857 """compute a patch repo url from a repo object"""
2857 url = repo.url()
2858 url = repo.url()
2858 if url.endswith(b'/'):
2859 if url.endswith(b'/'):
2859 url = url[:-1]
2860 url = url[:-1]
2860 return url + b'/.hg/patches'
2861 return url + b'/.hg/patches'
2861
2862
2862 # main repo (destination and sources)
2863 # main repo (destination and sources)
2863 if dest is None:
2864 if dest is None:
2864 dest = hg.defaultdest(source)
2865 dest = hg.defaultdest(source)
2865 sr = hg.peer(ui, opts, ui.expandpath(source))
2866 sr = hg.peer(ui, opts, ui.expandpath(source))
2866
2867
2867 # patches repo (source only)
2868 # patches repo (source only)
2868 if opts.get(b'patches'):
2869 if opts.get(b'patches'):
2869 patchespath = ui.expandpath(opts.get(b'patches'))
2870 patchespath = ui.expandpath(opts.get(b'patches'))
2870 else:
2871 else:
2871 patchespath = patchdir(sr)
2872 patchespath = patchdir(sr)
2872 try:
2873 try:
2873 hg.peer(ui, opts, patchespath)
2874 hg.peer(ui, opts, patchespath)
2874 except error.RepoError:
2875 except error.RepoError:
2875 raise error.Abort(
2876 raise error.Abort(
2876 _(b'versioned patch repository not found (see init --mq)')
2877 _(b'versioned patch repository not found (see init --mq)')
2877 )
2878 )
2878 qbase, destrev = None, None
2879 qbase, destrev = None, None
2879 if sr.local():
2880 if sr.local():
2880 repo = sr.local()
2881 repo = sr.local()
2881 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2882 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2882 qbase = repo.mq.applied[0].node
2883 qbase = repo.mq.applied[0].node
2883 if not hg.islocal(dest):
2884 if not hg.islocal(dest):
2884 heads = set(repo.heads())
2885 heads = set(repo.heads())
2885 destrev = list(heads.difference(repo.heads(qbase)))
2886 destrev = list(heads.difference(repo.heads(qbase)))
2886 destrev.append(repo.changelog.parents(qbase)[0])
2887 destrev.append(repo.changelog.parents(qbase)[0])
2887 elif sr.capable(b'lookup'):
2888 elif sr.capable(b'lookup'):
2888 try:
2889 try:
2889 qbase = sr.lookup(b'qbase')
2890 qbase = sr.lookup(b'qbase')
2890 except error.RepoError:
2891 except error.RepoError:
2891 pass
2892 pass
2892
2893
2893 ui.note(_(b'cloning main repository\n'))
2894 ui.note(_(b'cloning main repository\n'))
2894 sr, dr = hg.clone(
2895 sr, dr = hg.clone(
2895 ui,
2896 ui,
2896 opts,
2897 opts,
2897 sr.url(),
2898 sr.url(),
2898 dest,
2899 dest,
2899 pull=opts.get(b'pull'),
2900 pull=opts.get(b'pull'),
2900 revs=destrev,
2901 revs=destrev,
2901 update=False,
2902 update=False,
2902 stream=opts.get(b'uncompressed'),
2903 stream=opts.get(b'uncompressed'),
2903 )
2904 )
2904
2905
2905 ui.note(_(b'cloning patch repository\n'))
2906 ui.note(_(b'cloning patch repository\n'))
2906 hg.clone(
2907 hg.clone(
2907 ui,
2908 ui,
2908 opts,
2909 opts,
2909 opts.get(b'patches') or patchdir(sr),
2910 opts.get(b'patches') or patchdir(sr),
2910 patchdir(dr),
2911 patchdir(dr),
2911 pull=opts.get(b'pull'),
2912 pull=opts.get(b'pull'),
2912 update=not opts.get(b'noupdate'),
2913 update=not opts.get(b'noupdate'),
2913 stream=opts.get(b'uncompressed'),
2914 stream=opts.get(b'uncompressed'),
2914 )
2915 )
2915
2916
2916 if dr.local():
2917 if dr.local():
2917 repo = dr.local()
2918 repo = dr.local()
2918 if qbase:
2919 if qbase:
2919 ui.note(
2920 ui.note(
2920 _(
2921 _(
2921 b'stripping applied patches from destination '
2922 b'stripping applied patches from destination '
2922 b'repository\n'
2923 b'repository\n'
2923 )
2924 )
2924 )
2925 )
2925 strip(ui, repo, [qbase], update=False, backup=None)
2926 strip(ui, repo, [qbase], update=False, backup=None)
2926 if not opts.get(b'noupdate'):
2927 if not opts.get(b'noupdate'):
2927 ui.note(_(b'updating destination repository\n'))
2928 ui.note(_(b'updating destination repository\n'))
2928 hg.update(repo, repo.changelog.tip())
2929 hg.update(repo, repo.changelog.tip())
2929
2930
2930
2931
2931 @command(
2932 @command(
2932 b"qcommit|qci",
2933 b"qcommit|qci",
2933 commands.table[b"commit|ci"][1],
2934 commands.table[b"commit|ci"][1],
2934 _(b'hg qcommit [OPTION]... [FILE]...'),
2935 _(b'hg qcommit [OPTION]... [FILE]...'),
2935 helpcategory=command.CATEGORY_COMMITTING,
2936 helpcategory=command.CATEGORY_COMMITTING,
2936 inferrepo=True,
2937 inferrepo=True,
2937 )
2938 )
2938 def commit(ui, repo, *pats, **opts):
2939 def commit(ui, repo, *pats, **opts):
2939 """commit changes in the queue repository (DEPRECATED)
2940 """commit changes in the queue repository (DEPRECATED)
2940
2941
2941 This command is deprecated; use :hg:`commit --mq` instead."""
2942 This command is deprecated; use :hg:`commit --mq` instead."""
2942 q = repo.mq
2943 q = repo.mq
2943 r = q.qrepo()
2944 r = q.qrepo()
2944 if not r:
2945 if not r:
2945 raise error.Abort(b'no queue repository')
2946 raise error.Abort(b'no queue repository')
2946 commands.commit(r.ui, r, *pats, **opts)
2947 commands.commit(r.ui, r, *pats, **opts)
2947
2948
2948
2949
2949 @command(
2950 @command(
2950 b"qseries",
2951 b"qseries",
2951 [
2952 [
2952 (b'm', b'missing', None, _(b'print patches not in series')),
2953 (b'm', b'missing', None, _(b'print patches not in series')),
2953 ]
2954 ]
2954 + seriesopts,
2955 + seriesopts,
2955 _(b'hg qseries [-ms]'),
2956 _(b'hg qseries [-ms]'),
2956 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2957 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2957 )
2958 )
2958 def series(ui, repo, **opts):
2959 def series(ui, repo, **opts):
2959 """print the entire series file
2960 """print the entire series file
2960
2961
2961 Returns 0 on success."""
2962 Returns 0 on success."""
2962 repo.mq.qseries(
2963 repo.mq.qseries(
2963 repo, missing=opts.get('missing'), summary=opts.get('summary')
2964 repo, missing=opts.get('missing'), summary=opts.get('summary')
2964 )
2965 )
2965 return 0
2966 return 0
2966
2967
2967
2968
2968 @command(
2969 @command(
2969 b"qtop",
2970 b"qtop",
2970 seriesopts,
2971 seriesopts,
2971 _(b'hg qtop [-s]'),
2972 _(b'hg qtop [-s]'),
2972 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2973 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2973 )
2974 )
2974 def top(ui, repo, **opts):
2975 def top(ui, repo, **opts):
2975 """print the name of the current patch
2976 """print the name of the current patch
2976
2977
2977 Returns 0 on success."""
2978 Returns 0 on success."""
2978 q = repo.mq
2979 q = repo.mq
2979 if q.applied:
2980 if q.applied:
2980 t = q.seriesend(True)
2981 t = q.seriesend(True)
2981 else:
2982 else:
2982 t = 0
2983 t = 0
2983
2984
2984 if t:
2985 if t:
2985 q.qseries(
2986 q.qseries(
2986 repo,
2987 repo,
2987 start=t - 1,
2988 start=t - 1,
2988 length=1,
2989 length=1,
2989 status=b'A',
2990 status=b'A',
2990 summary=opts.get('summary'),
2991 summary=opts.get('summary'),
2991 )
2992 )
2992 else:
2993 else:
2993 ui.write(_(b"no patches applied\n"))
2994 ui.write(_(b"no patches applied\n"))
2994 return 1
2995 return 1
2995
2996
2996
2997
2997 @command(
2998 @command(
2998 b"qnext",
2999 b"qnext",
2999 seriesopts,
3000 seriesopts,
3000 _(b'hg qnext [-s]'),
3001 _(b'hg qnext [-s]'),
3001 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3002 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3002 )
3003 )
3003 def next(ui, repo, **opts):
3004 def next(ui, repo, **opts):
3004 """print the name of the next pushable patch
3005 """print the name of the next pushable patch
3005
3006
3006 Returns 0 on success."""
3007 Returns 0 on success."""
3007 q = repo.mq
3008 q = repo.mq
3008 end = q.seriesend()
3009 end = q.seriesend()
3009 if end == len(q.series):
3010 if end == len(q.series):
3010 ui.write(_(b"all patches applied\n"))
3011 ui.write(_(b"all patches applied\n"))
3011 return 1
3012 return 1
3012 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
3013 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
3013
3014
3014
3015
3015 @command(
3016 @command(
3016 b"qprev",
3017 b"qprev",
3017 seriesopts,
3018 seriesopts,
3018 _(b'hg qprev [-s]'),
3019 _(b'hg qprev [-s]'),
3019 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3020 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3020 )
3021 )
3021 def prev(ui, repo, **opts):
3022 def prev(ui, repo, **opts):
3022 """print the name of the preceding applied patch
3023 """print the name of the preceding applied patch
3023
3024
3024 Returns 0 on success."""
3025 Returns 0 on success."""
3025 q = repo.mq
3026 q = repo.mq
3026 l = len(q.applied)
3027 l = len(q.applied)
3027 if l == 1:
3028 if l == 1:
3028 ui.write(_(b"only one patch applied\n"))
3029 ui.write(_(b"only one patch applied\n"))
3029 return 1
3030 return 1
3030 if not l:
3031 if not l:
3031 ui.write(_(b"no patches applied\n"))
3032 ui.write(_(b"no patches applied\n"))
3032 return 1
3033 return 1
3033 idx = q.series.index(q.applied[-2].name)
3034 idx = q.series.index(q.applied[-2].name)
3034 q.qseries(
3035 q.qseries(
3035 repo, start=idx, length=1, status=b'A', summary=opts.get('summary')
3036 repo, start=idx, length=1, status=b'A', summary=opts.get('summary')
3036 )
3037 )
3037
3038
3038
3039
3039 def setupheaderopts(ui, opts):
3040 def setupheaderopts(ui, opts):
3040 if not opts.get(b'user') and opts.get(b'currentuser'):
3041 if not opts.get(b'user') and opts.get(b'currentuser'):
3041 opts[b'user'] = ui.username()
3042 opts[b'user'] = ui.username()
3042 if not opts.get(b'date') and opts.get(b'currentdate'):
3043 if not opts.get(b'date') and opts.get(b'currentdate'):
3043 opts[b'date'] = b"%d %d" % dateutil.makedate()
3044 opts[b'date'] = b"%d %d" % dateutil.makedate()
3044
3045
3045
3046
3046 @command(
3047 @command(
3047 b"qnew",
3048 b"qnew",
3048 [
3049 [
3049 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
3050 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
3050 (b'f', b'force', None, _(b'import uncommitted changes (DEPRECATED)')),
3051 (b'f', b'force', None, _(b'import uncommitted changes (DEPRECATED)')),
3051 (b'g', b'git', None, _(b'use git extended diff format')),
3052 (b'g', b'git', None, _(b'use git extended diff format')),
3052 (b'U', b'currentuser', None, _(b'add "From: <current user>" to patch')),
3053 (b'U', b'currentuser', None, _(b'add "From: <current user>" to patch')),
3053 (b'u', b'user', b'', _(b'add "From: <USER>" to patch'), _(b'USER')),
3054 (b'u', b'user', b'', _(b'add "From: <USER>" to patch'), _(b'USER')),
3054 (b'D', b'currentdate', None, _(b'add "Date: <current date>" to patch')),
3055 (b'D', b'currentdate', None, _(b'add "Date: <current date>" to patch')),
3055 (b'd', b'date', b'', _(b'add "Date: <DATE>" to patch'), _(b'DATE')),
3056 (b'd', b'date', b'', _(b'add "Date: <DATE>" to patch'), _(b'DATE')),
3056 ]
3057 ]
3057 + cmdutil.walkopts
3058 + cmdutil.walkopts
3058 + cmdutil.commitopts,
3059 + cmdutil.commitopts,
3059 _(b'hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
3060 _(b'hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
3060 helpcategory=command.CATEGORY_COMMITTING,
3061 helpcategory=command.CATEGORY_COMMITTING,
3061 helpbasic=True,
3062 helpbasic=True,
3062 inferrepo=True,
3063 inferrepo=True,
3063 )
3064 )
3064 def new(ui, repo, patch, *args, **opts):
3065 def new(ui, repo, patch, *args, **opts):
3065 """create a new patch
3066 """create a new patch
3066
3067
3067 qnew creates a new patch on top of the currently-applied patch (if
3068 qnew creates a new patch on top of the currently-applied patch (if
3068 any). The patch will be initialized with any outstanding changes
3069 any). The patch will be initialized with any outstanding changes
3069 in the working directory. You may also use -I/--include,
3070 in the working directory. You may also use -I/--include,
3070 -X/--exclude, and/or a list of files after the patch name to add
3071 -X/--exclude, and/or a list of files after the patch name to add
3071 only changes to matching files to the new patch, leaving the rest
3072 only changes to matching files to the new patch, leaving the rest
3072 as uncommitted modifications.
3073 as uncommitted modifications.
3073
3074
3074 -u/--user and -d/--date can be used to set the (given) user and
3075 -u/--user and -d/--date can be used to set the (given) user and
3075 date, respectively. -U/--currentuser and -D/--currentdate set user
3076 date, respectively. -U/--currentuser and -D/--currentdate set user
3076 to current user and date to current date.
3077 to current user and date to current date.
3077
3078
3078 -e/--edit, -m/--message or -l/--logfile set the patch header as
3079 -e/--edit, -m/--message or -l/--logfile set the patch header as
3079 well as the commit message. If none is specified, the header is
3080 well as the commit message. If none is specified, the header is
3080 empty and the commit message is '[mq]: PATCH'.
3081 empty and the commit message is '[mq]: PATCH'.
3081
3082
3082 Use the -g/--git option to keep the patch in the git extended diff
3083 Use the -g/--git option to keep the patch in the git extended diff
3083 format. Read the diffs help topic for more information on why this
3084 format. Read the diffs help topic for more information on why this
3084 is important for preserving permission changes and copy/rename
3085 is important for preserving permission changes and copy/rename
3085 information.
3086 information.
3086
3087
3087 Returns 0 on successful creation of a new patch.
3088 Returns 0 on successful creation of a new patch.
3088 """
3089 """
3089 opts = pycompat.byteskwargs(opts)
3090 opts = pycompat.byteskwargs(opts)
3090 msg = cmdutil.logmessage(ui, opts)
3091 msg = cmdutil.logmessage(ui, opts)
3091 q = repo.mq
3092 q = repo.mq
3092 opts[b'msg'] = msg
3093 opts[b'msg'] = msg
3093 setupheaderopts(ui, opts)
3094 setupheaderopts(ui, opts)
3094 q.new(repo, patch, *args, **pycompat.strkwargs(opts))
3095 q.new(repo, patch, *args, **pycompat.strkwargs(opts))
3095 q.savedirty()
3096 q.savedirty()
3096 return 0
3097 return 0
3097
3098
3098
3099
3099 @command(
3100 @command(
3100 b"qrefresh",
3101 b"qrefresh",
3101 [
3102 [
3102 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
3103 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
3103 (b'g', b'git', None, _(b'use git extended diff format')),
3104 (b'g', b'git', None, _(b'use git extended diff format')),
3104 (
3105 (
3105 b's',
3106 b's',
3106 b'short',
3107 b'short',
3107 None,
3108 None,
3108 _(b'refresh only files already in the patch and specified files'),
3109 _(b'refresh only files already in the patch and specified files'),
3109 ),
3110 ),
3110 (
3111 (
3111 b'U',
3112 b'U',
3112 b'currentuser',
3113 b'currentuser',
3113 None,
3114 None,
3114 _(b'add/update author field in patch with current user'),
3115 _(b'add/update author field in patch with current user'),
3115 ),
3116 ),
3116 (
3117 (
3117 b'u',
3118 b'u',
3118 b'user',
3119 b'user',
3119 b'',
3120 b'',
3120 _(b'add/update author field in patch with given user'),
3121 _(b'add/update author field in patch with given user'),
3121 _(b'USER'),
3122 _(b'USER'),
3122 ),
3123 ),
3123 (
3124 (
3124 b'D',
3125 b'D',
3125 b'currentdate',
3126 b'currentdate',
3126 None,
3127 None,
3127 _(b'add/update date field in patch with current date'),
3128 _(b'add/update date field in patch with current date'),
3128 ),
3129 ),
3129 (
3130 (
3130 b'd',
3131 b'd',
3131 b'date',
3132 b'date',
3132 b'',
3133 b'',
3133 _(b'add/update date field in patch with given date'),
3134 _(b'add/update date field in patch with given date'),
3134 _(b'DATE'),
3135 _(b'DATE'),
3135 ),
3136 ),
3136 ]
3137 ]
3137 + cmdutil.walkopts
3138 + cmdutil.walkopts
3138 + cmdutil.commitopts,
3139 + cmdutil.commitopts,
3139 _(b'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
3140 _(b'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
3140 helpcategory=command.CATEGORY_COMMITTING,
3141 helpcategory=command.CATEGORY_COMMITTING,
3141 helpbasic=True,
3142 helpbasic=True,
3142 inferrepo=True,
3143 inferrepo=True,
3143 )
3144 )
3144 def refresh(ui, repo, *pats, **opts):
3145 def refresh(ui, repo, *pats, **opts):
3145 """update the current patch
3146 """update the current patch
3146
3147
3147 If any file patterns are provided, the refreshed patch will
3148 If any file patterns are provided, the refreshed patch will
3148 contain only the modifications that match those patterns; the
3149 contain only the modifications that match those patterns; the
3149 remaining modifications will remain in the working directory.
3150 remaining modifications will remain in the working directory.
3150
3151
3151 If -s/--short is specified, files currently included in the patch
3152 If -s/--short is specified, files currently included in the patch
3152 will be refreshed just like matched files and remain in the patch.
3153 will be refreshed just like matched files and remain in the patch.
3153
3154
3154 If -e/--edit is specified, Mercurial will start your configured editor for
3155 If -e/--edit is specified, Mercurial will start your configured editor for
3155 you to enter a message. In case qrefresh fails, you will find a backup of
3156 you to enter a message. In case qrefresh fails, you will find a backup of
3156 your message in ``.hg/last-message.txt``.
3157 your message in ``.hg/last-message.txt``.
3157
3158
3158 hg add/remove/copy/rename work as usual, though you might want to
3159 hg add/remove/copy/rename work as usual, though you might want to
3159 use git-style patches (-g/--git or [diff] git=1) to track copies
3160 use git-style patches (-g/--git or [diff] git=1) to track copies
3160 and renames. See the diffs help topic for more information on the
3161 and renames. See the diffs help topic for more information on the
3161 git diff format.
3162 git diff format.
3162
3163
3163 Returns 0 on success.
3164 Returns 0 on success.
3164 """
3165 """
3165 opts = pycompat.byteskwargs(opts)
3166 opts = pycompat.byteskwargs(opts)
3166 q = repo.mq
3167 q = repo.mq
3167 message = cmdutil.logmessage(ui, opts)
3168 message = cmdutil.logmessage(ui, opts)
3168 setupheaderopts(ui, opts)
3169 setupheaderopts(ui, opts)
3169 with repo.wlock():
3170 with repo.wlock():
3170 ret = q.refresh(repo, pats, msg=message, **pycompat.strkwargs(opts))
3171 ret = q.refresh(repo, pats, msg=message, **pycompat.strkwargs(opts))
3171 q.savedirty()
3172 q.savedirty()
3172 return ret
3173 return ret
3173
3174
3174
3175
3175 @command(
3176 @command(
3176 b"qdiff",
3177 b"qdiff",
3177 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
3178 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
3178 _(b'hg qdiff [OPTION]... [FILE]...'),
3179 _(b'hg qdiff [OPTION]... [FILE]...'),
3179 helpcategory=command.CATEGORY_FILE_CONTENTS,
3180 helpcategory=command.CATEGORY_FILE_CONTENTS,
3180 helpbasic=True,
3181 helpbasic=True,
3181 inferrepo=True,
3182 inferrepo=True,
3182 )
3183 )
3183 def diff(ui, repo, *pats, **opts):
3184 def diff(ui, repo, *pats, **opts):
3184 """diff of the current patch and subsequent modifications
3185 """diff of the current patch and subsequent modifications
3185
3186
3186 Shows a diff which includes the current patch as well as any
3187 Shows a diff which includes the current patch as well as any
3187 changes which have been made in the working directory since the
3188 changes which have been made in the working directory since the
3188 last refresh (thus showing what the current patch would become
3189 last refresh (thus showing what the current patch would become
3189 after a qrefresh).
3190 after a qrefresh).
3190
3191
3191 Use :hg:`diff` if you only want to see the changes made since the
3192 Use :hg:`diff` if you only want to see the changes made since the
3192 last qrefresh, or :hg:`export qtip` if you want to see changes
3193 last qrefresh, or :hg:`export qtip` if you want to see changes
3193 made by the current patch without including changes made since the
3194 made by the current patch without including changes made since the
3194 qrefresh.
3195 qrefresh.
3195
3196
3196 Returns 0 on success.
3197 Returns 0 on success.
3197 """
3198 """
3198 ui.pager(b'qdiff')
3199 ui.pager(b'qdiff')
3199 repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
3200 repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
3200 return 0
3201 return 0
3201
3202
3202
3203
3203 @command(
3204 @command(
3204 b'qfold',
3205 b'qfold',
3205 [
3206 [
3206 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
3207 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
3207 (b'k', b'keep', None, _(b'keep folded patch files')),
3208 (b'k', b'keep', None, _(b'keep folded patch files')),
3208 ]
3209 ]
3209 + cmdutil.commitopts,
3210 + cmdutil.commitopts,
3210 _(b'hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
3211 _(b'hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
3211 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
3212 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
3212 )
3213 )
3213 def fold(ui, repo, *files, **opts):
3214 def fold(ui, repo, *files, **opts):
3214 """fold the named patches into the current patch
3215 """fold the named patches into the current patch
3215
3216
3216 Patches must not yet be applied. Each patch will be successively
3217 Patches must not yet be applied. Each patch will be successively
3217 applied to the current patch in the order given. If all the
3218 applied to the current patch in the order given. If all the
3218 patches apply successfully, the current patch will be refreshed
3219 patches apply successfully, the current patch will be refreshed
3219 with the new cumulative patch, and the folded patches will be
3220 with the new cumulative patch, and the folded patches will be
3220 deleted. With -k/--keep, the folded patch files will not be
3221 deleted. With -k/--keep, the folded patch files will not be
3221 removed afterwards.
3222 removed afterwards.
3222
3223
3223 The header for each folded patch will be concatenated with the
3224 The header for each folded patch will be concatenated with the
3224 current patch header, separated by a line of ``* * *``.
3225 current patch header, separated by a line of ``* * *``.
3225
3226
3226 Returns 0 on success."""
3227 Returns 0 on success."""
3227 opts = pycompat.byteskwargs(opts)
3228 opts = pycompat.byteskwargs(opts)
3228 q = repo.mq
3229 q = repo.mq
3229 if not files:
3230 if not files:
3230 raise error.Abort(_(b'qfold requires at least one patch name'))
3231 raise error.Abort(_(b'qfold requires at least one patch name'))
3231 if not q.checktoppatch(repo)[0]:
3232 if not q.checktoppatch(repo)[0]:
3232 raise error.Abort(_(b'no patches applied'))
3233 raise error.Abort(_(b'no patches applied'))
3233 q.checklocalchanges(repo)
3234 q.checklocalchanges(repo)
3234
3235
3235 message = cmdutil.logmessage(ui, opts)
3236 message = cmdutil.logmessage(ui, opts)
3236
3237
3237 parent = q.lookup(b'qtip')
3238 parent = q.lookup(b'qtip')
3238 patches = []
3239 patches = []
3239 messages = []
3240 messages = []
3240 for f in files:
3241 for f in files:
3241 p = q.lookup(f)
3242 p = q.lookup(f)
3242 if p in patches or p == parent:
3243 if p in patches or p == parent:
3243 ui.warn(_(b'skipping already folded patch %s\n') % p)
3244 ui.warn(_(b'skipping already folded patch %s\n') % p)
3244 if q.isapplied(p):
3245 if q.isapplied(p):
3245 raise error.Abort(
3246 raise error.Abort(
3246 _(b'qfold cannot fold already applied patch %s') % p
3247 _(b'qfold cannot fold already applied patch %s') % p
3247 )
3248 )
3248 patches.append(p)
3249 patches.append(p)
3249
3250
3250 for p in patches:
3251 for p in patches:
3251 if not message:
3252 if not message:
3252 ph = patchheader(q.join(p), q.plainmode)
3253 ph = patchheader(q.join(p), q.plainmode)
3253 if ph.message:
3254 if ph.message:
3254 messages.append(ph.message)
3255 messages.append(ph.message)
3255 pf = q.join(p)
3256 pf = q.join(p)
3256 (patchsuccess, files, fuzz) = q.patch(repo, pf)
3257 (patchsuccess, files, fuzz) = q.patch(repo, pf)
3257 if not patchsuccess:
3258 if not patchsuccess:
3258 raise error.Abort(_(b'error folding patch %s') % p)
3259 raise error.Abort(_(b'error folding patch %s') % p)
3259
3260
3260 if not message:
3261 if not message:
3261 ph = patchheader(q.join(parent), q.plainmode)
3262 ph = patchheader(q.join(parent), q.plainmode)
3262 message = ph.message
3263 message = ph.message
3263 for msg in messages:
3264 for msg in messages:
3264 if msg:
3265 if msg:
3265 if message:
3266 if message:
3266 message.append(b'* * *')
3267 message.append(b'* * *')
3267 message.extend(msg)
3268 message.extend(msg)
3268 message = b'\n'.join(message)
3269 message = b'\n'.join(message)
3269
3270
3270 diffopts = q.patchopts(q.diffopts(), *patches)
3271 diffopts = q.patchopts(q.diffopts(), *patches)
3271 with repo.wlock():
3272 with repo.wlock():
3272 q.refresh(
3273 q.refresh(
3273 repo,
3274 repo,
3274 msg=message,
3275 msg=message,
3275 git=diffopts.git,
3276 git=diffopts.git,
3276 edit=opts.get(b'edit'),
3277 edit=opts.get(b'edit'),
3277 editform=b'mq.qfold',
3278 editform=b'mq.qfold',
3278 )
3279 )
3279 q.delete(repo, patches, opts)
3280 q.delete(repo, patches, opts)
3280 q.savedirty()
3281 q.savedirty()
3281
3282
3282
3283
3283 @command(
3284 @command(
3284 b"qgoto",
3285 b"qgoto",
3285 [
3286 [
3286 (
3287 (
3287 b'',
3288 b'',
3288 b'keep-changes',
3289 b'keep-changes',
3289 None,
3290 None,
3290 _(b'tolerate non-conflicting local changes'),
3291 _(b'tolerate non-conflicting local changes'),
3291 ),
3292 ),
3292 (b'f', b'force', None, _(b'overwrite any local changes')),
3293 (b'f', b'force', None, _(b'overwrite any local changes')),
3293 (b'', b'no-backup', None, _(b'do not save backup copies of files')),
3294 (b'', b'no-backup', None, _(b'do not save backup copies of files')),
3294 ],
3295 ],
3295 _(b'hg qgoto [OPTION]... PATCH'),
3296 _(b'hg qgoto [OPTION]... PATCH'),
3296 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3297 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3297 )
3298 )
3298 def goto(ui, repo, patch, **opts):
3299 def goto(ui, repo, patch, **opts):
3299 """push or pop patches until named patch is at top of stack
3300 """push or pop patches until named patch is at top of stack
3300
3301
3301 Returns 0 on success."""
3302 Returns 0 on success."""
3302 opts = pycompat.byteskwargs(opts)
3303 opts = pycompat.byteskwargs(opts)
3303 opts = fixkeepchangesopts(ui, opts)
3304 opts = fixkeepchangesopts(ui, opts)
3304 q = repo.mq
3305 q = repo.mq
3305 patch = q.lookup(patch)
3306 patch = q.lookup(patch)
3306 nobackup = opts.get(b'no_backup')
3307 nobackup = opts.get(b'no_backup')
3307 keepchanges = opts.get(b'keep_changes')
3308 keepchanges = opts.get(b'keep_changes')
3308 if q.isapplied(patch):
3309 if q.isapplied(patch):
3309 ret = q.pop(
3310 ret = q.pop(
3310 repo,
3311 repo,
3311 patch,
3312 patch,
3312 force=opts.get(b'force'),
3313 force=opts.get(b'force'),
3313 nobackup=nobackup,
3314 nobackup=nobackup,
3314 keepchanges=keepchanges,
3315 keepchanges=keepchanges,
3315 )
3316 )
3316 else:
3317 else:
3317 ret = q.push(
3318 ret = q.push(
3318 repo,
3319 repo,
3319 patch,
3320 patch,
3320 force=opts.get(b'force'),
3321 force=opts.get(b'force'),
3321 nobackup=nobackup,
3322 nobackup=nobackup,
3322 keepchanges=keepchanges,
3323 keepchanges=keepchanges,
3323 )
3324 )
3324 q.savedirty()
3325 q.savedirty()
3325 return ret
3326 return ret
3326
3327
3327
3328
3328 @command(
3329 @command(
3329 b"qguard",
3330 b"qguard",
3330 [
3331 [
3331 (b'l', b'list', None, _(b'list all patches and guards')),
3332 (b'l', b'list', None, _(b'list all patches and guards')),
3332 (b'n', b'none', None, _(b'drop all guards')),
3333 (b'n', b'none', None, _(b'drop all guards')),
3333 ],
3334 ],
3334 _(b'hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
3335 _(b'hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
3335 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3336 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3336 )
3337 )
3337 def guard(ui, repo, *args, **opts):
3338 def guard(ui, repo, *args, **opts):
3338 """set or print guards for a patch
3339 """set or print guards for a patch
3339
3340
3340 Guards control whether a patch can be pushed. A patch with no
3341 Guards control whether a patch can be pushed. A patch with no
3341 guards is always pushed. A patch with a positive guard ("+foo") is
3342 guards is always pushed. A patch with a positive guard ("+foo") is
3342 pushed only if the :hg:`qselect` command has activated it. A patch with
3343 pushed only if the :hg:`qselect` command has activated it. A patch with
3343 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
3344 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
3344 has activated it.
3345 has activated it.
3345
3346
3346 With no arguments, print the currently active guards.
3347 With no arguments, print the currently active guards.
3347 With arguments, set guards for the named patch.
3348 With arguments, set guards for the named patch.
3348
3349
3349 .. note::
3350 .. note::
3350
3351
3351 Specifying negative guards now requires '--'.
3352 Specifying negative guards now requires '--'.
3352
3353
3353 To set guards on another patch::
3354 To set guards on another patch::
3354
3355
3355 hg qguard other.patch -- +2.6.17 -stable
3356 hg qguard other.patch -- +2.6.17 -stable
3356
3357
3357 Returns 0 on success.
3358 Returns 0 on success.
3358 """
3359 """
3359
3360
3360 def status(idx):
3361 def status(idx):
3361 guards = q.seriesguards[idx] or [b'unguarded']
3362 guards = q.seriesguards[idx] or [b'unguarded']
3362 if q.series[idx] in applied:
3363 if q.series[idx] in applied:
3363 state = b'applied'
3364 state = b'applied'
3364 elif q.pushable(idx)[0]:
3365 elif q.pushable(idx)[0]:
3365 state = b'unapplied'
3366 state = b'unapplied'
3366 else:
3367 else:
3367 state = b'guarded'
3368 state = b'guarded'
3368 label = b'qguard.patch qguard.%s qseries.%s' % (state, state)
3369 label = b'qguard.patch qguard.%s qseries.%s' % (state, state)
3369 ui.write(b'%s: ' % ui.label(q.series[idx], label))
3370 ui.write(b'%s: ' % ui.label(q.series[idx], label))
3370
3371
3371 for i, guard in enumerate(guards):
3372 for i, guard in enumerate(guards):
3372 if guard.startswith(b'+'):
3373 if guard.startswith(b'+'):
3373 ui.write(guard, label=b'qguard.positive')
3374 ui.write(guard, label=b'qguard.positive')
3374 elif guard.startswith(b'-'):
3375 elif guard.startswith(b'-'):
3375 ui.write(guard, label=b'qguard.negative')
3376 ui.write(guard, label=b'qguard.negative')
3376 else:
3377 else:
3377 ui.write(guard, label=b'qguard.unguarded')
3378 ui.write(guard, label=b'qguard.unguarded')
3378 if i != len(guards) - 1:
3379 if i != len(guards) - 1:
3379 ui.write(b' ')
3380 ui.write(b' ')
3380 ui.write(b'\n')
3381 ui.write(b'\n')
3381
3382
3382 q = repo.mq
3383 q = repo.mq
3383 applied = {p.name for p in q.applied}
3384 applied = {p.name for p in q.applied}
3384 patch = None
3385 patch = None
3385 args = list(args)
3386 args = list(args)
3386 if opts.get('list'):
3387 if opts.get('list'):
3387 if args or opts.get('none'):
3388 if args or opts.get('none'):
3388 raise error.Abort(
3389 raise error.Abort(
3389 _(b'cannot mix -l/--list with options or arguments')
3390 _(b'cannot mix -l/--list with options or arguments')
3390 )
3391 )
3391 for i in pycompat.xrange(len(q.series)):
3392 for i in pycompat.xrange(len(q.series)):
3392 status(i)
3393 status(i)
3393 return
3394 return
3394 if not args or args[0][0:1] in b'-+':
3395 if not args or args[0][0:1] in b'-+':
3395 if not q.applied:
3396 if not q.applied:
3396 raise error.Abort(_(b'no patches applied'))
3397 raise error.Abort(_(b'no patches applied'))
3397 patch = q.applied[-1].name
3398 patch = q.applied[-1].name
3398 if patch is None and args[0][0:1] not in b'-+':
3399 if patch is None and args[0][0:1] not in b'-+':
3399 patch = args.pop(0)
3400 patch = args.pop(0)
3400 if patch is None:
3401 if patch is None:
3401 raise error.Abort(_(b'no patch to work with'))
3402 raise error.Abort(_(b'no patch to work with'))
3402 if args or opts.get('none'):
3403 if args or opts.get('none'):
3403 idx = q.findseries(patch)
3404 idx = q.findseries(patch)
3404 if idx is None:
3405 if idx is None:
3405 raise error.Abort(_(b'no patch named %s') % patch)
3406 raise error.Abort(_(b'no patch named %s') % patch)
3406 q.setguards(idx, args)
3407 q.setguards(idx, args)
3407 q.savedirty()
3408 q.savedirty()
3408 else:
3409 else:
3409 status(q.series.index(q.lookup(patch)))
3410 status(q.series.index(q.lookup(patch)))
3410
3411
3411
3412
3412 @command(
3413 @command(
3413 b"qheader",
3414 b"qheader",
3414 [],
3415 [],
3415 _(b'hg qheader [PATCH]'),
3416 _(b'hg qheader [PATCH]'),
3416 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3417 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3417 )
3418 )
3418 def header(ui, repo, patch=None):
3419 def header(ui, repo, patch=None):
3419 """print the header of the topmost or specified patch
3420 """print the header of the topmost or specified patch
3420
3421
3421 Returns 0 on success."""
3422 Returns 0 on success."""
3422 q = repo.mq
3423 q = repo.mq
3423
3424
3424 if patch:
3425 if patch:
3425 patch = q.lookup(patch)
3426 patch = q.lookup(patch)
3426 else:
3427 else:
3427 if not q.applied:
3428 if not q.applied:
3428 ui.write(_(b'no patches applied\n'))
3429 ui.write(_(b'no patches applied\n'))
3429 return 1
3430 return 1
3430 patch = q.lookup(b'qtip')
3431 patch = q.lookup(b'qtip')
3431 ph = patchheader(q.join(patch), q.plainmode)
3432 ph = patchheader(q.join(patch), q.plainmode)
3432
3433
3433 ui.write(b'\n'.join(ph.message) + b'\n')
3434 ui.write(b'\n'.join(ph.message) + b'\n')
3434
3435
3435
3436
3436 def lastsavename(path):
3437 def lastsavename(path):
3437 (directory, base) = os.path.split(path)
3438 (directory, base) = os.path.split(path)
3438 names = os.listdir(directory)
3439 names = os.listdir(directory)
3439 namere = re.compile(b"%s.([0-9]+)" % base)
3440 namere = re.compile(b"%s.([0-9]+)" % base)
3440 maxindex = None
3441 maxindex = None
3441 maxname = None
3442 maxname = None
3442 for f in names:
3443 for f in names:
3443 m = namere.match(f)
3444 m = namere.match(f)
3444 if m:
3445 if m:
3445 index = int(m.group(1))
3446 index = int(m.group(1))
3446 if maxindex is None or index > maxindex:
3447 if maxindex is None or index > maxindex:
3447 maxindex = index
3448 maxindex = index
3448 maxname = f
3449 maxname = f
3449 if maxname:
3450 if maxname:
3450 return (os.path.join(directory, maxname), maxindex)
3451 return (os.path.join(directory, maxname), maxindex)
3451 return (None, None)
3452 return (None, None)
3452
3453
3453
3454
3454 def savename(path):
3455 def savename(path):
3455 (last, index) = lastsavename(path)
3456 (last, index) = lastsavename(path)
3456 if last is None:
3457 if last is None:
3457 index = 0
3458 index = 0
3458 newpath = path + b".%d" % (index + 1)
3459 newpath = path + b".%d" % (index + 1)
3459 return newpath
3460 return newpath
3460
3461
3461
3462
3462 @command(
3463 @command(
3463 b"qpush",
3464 b"qpush",
3464 [
3465 [
3465 (
3466 (
3466 b'',
3467 b'',
3467 b'keep-changes',
3468 b'keep-changes',
3468 None,
3469 None,
3469 _(b'tolerate non-conflicting local changes'),
3470 _(b'tolerate non-conflicting local changes'),
3470 ),
3471 ),
3471 (b'f', b'force', None, _(b'apply on top of local changes')),
3472 (b'f', b'force', None, _(b'apply on top of local changes')),
3472 (
3473 (
3473 b'e',
3474 b'e',
3474 b'exact',
3475 b'exact',
3475 None,
3476 None,
3476 _(b'apply the target patch to its recorded parent'),
3477 _(b'apply the target patch to its recorded parent'),
3477 ),
3478 ),
3478 (b'l', b'list', None, _(b'list patch name in commit text')),
3479 (b'l', b'list', None, _(b'list patch name in commit text')),
3479 (b'a', b'all', None, _(b'apply all patches')),
3480 (b'a', b'all', None, _(b'apply all patches')),
3480 (b'm', b'merge', None, _(b'merge from another queue (DEPRECATED)')),
3481 (b'm', b'merge', None, _(b'merge from another queue (DEPRECATED)')),
3481 (b'n', b'name', b'', _(b'merge queue name (DEPRECATED)'), _(b'NAME')),
3482 (b'n', b'name', b'', _(b'merge queue name (DEPRECATED)'), _(b'NAME')),
3482 (
3483 (
3483 b'',
3484 b'',
3484 b'move',
3485 b'move',
3485 None,
3486 None,
3486 _(b'reorder patch series and apply only the patch'),
3487 _(b'reorder patch series and apply only the patch'),
3487 ),
3488 ),
3488 (b'', b'no-backup', None, _(b'do not save backup copies of files')),
3489 (b'', b'no-backup', None, _(b'do not save backup copies of files')),
3489 ],
3490 ],
3490 _(b'hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
3491 _(b'hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
3491 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3492 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3492 helpbasic=True,
3493 helpbasic=True,
3493 )
3494 )
3494 def push(ui, repo, patch=None, **opts):
3495 def push(ui, repo, patch=None, **opts):
3495 """push the next patch onto the stack
3496 """push the next patch onto the stack
3496
3497
3497 By default, abort if the working directory contains uncommitted
3498 By default, abort if the working directory contains uncommitted
3498 changes. With --keep-changes, abort only if the uncommitted files
3499 changes. With --keep-changes, abort only if the uncommitted files
3499 overlap with patched files. With -f/--force, backup and patch over
3500 overlap with patched files. With -f/--force, backup and patch over
3500 uncommitted changes.
3501 uncommitted changes.
3501
3502
3502 Return 0 on success.
3503 Return 0 on success.
3503 """
3504 """
3504 q = repo.mq
3505 q = repo.mq
3505 mergeq = None
3506 mergeq = None
3506
3507
3507 opts = pycompat.byteskwargs(opts)
3508 opts = pycompat.byteskwargs(opts)
3508 opts = fixkeepchangesopts(ui, opts)
3509 opts = fixkeepchangesopts(ui, opts)
3509 if opts.get(b'merge'):
3510 if opts.get(b'merge'):
3510 if opts.get(b'name'):
3511 if opts.get(b'name'):
3511 newpath = repo.vfs.join(opts.get(b'name'))
3512 newpath = repo.vfs.join(opts.get(b'name'))
3512 else:
3513 else:
3513 newpath, i = lastsavename(q.path)
3514 newpath, i = lastsavename(q.path)
3514 if not newpath:
3515 if not newpath:
3515 ui.warn(_(b"no saved queues found, please use -n\n"))
3516 ui.warn(_(b"no saved queues found, please use -n\n"))
3516 return 1
3517 return 1
3517 mergeq = queue(ui, repo.baseui, repo.path, newpath)
3518 mergeq = queue(ui, repo.baseui, repo.path, newpath)
3518 ui.warn(_(b"merging with queue at: %s\n") % mergeq.path)
3519 ui.warn(_(b"merging with queue at: %s\n") % mergeq.path)
3519 ret = q.push(
3520 ret = q.push(
3520 repo,
3521 repo,
3521 patch,
3522 patch,
3522 force=opts.get(b'force'),
3523 force=opts.get(b'force'),
3523 list=opts.get(b'list'),
3524 list=opts.get(b'list'),
3524 mergeq=mergeq,
3525 mergeq=mergeq,
3525 all=opts.get(b'all'),
3526 all=opts.get(b'all'),
3526 move=opts.get(b'move'),
3527 move=opts.get(b'move'),
3527 exact=opts.get(b'exact'),
3528 exact=opts.get(b'exact'),
3528 nobackup=opts.get(b'no_backup'),
3529 nobackup=opts.get(b'no_backup'),
3529 keepchanges=opts.get(b'keep_changes'),
3530 keepchanges=opts.get(b'keep_changes'),
3530 )
3531 )
3531 return ret
3532 return ret
3532
3533
3533
3534
3534 @command(
3535 @command(
3535 b"qpop",
3536 b"qpop",
3536 [
3537 [
3537 (b'a', b'all', None, _(b'pop all patches')),
3538 (b'a', b'all', None, _(b'pop all patches')),
3538 (b'n', b'name', b'', _(b'queue name to pop (DEPRECATED)'), _(b'NAME')),
3539 (b'n', b'name', b'', _(b'queue name to pop (DEPRECATED)'), _(b'NAME')),
3539 (
3540 (
3540 b'',
3541 b'',
3541 b'keep-changes',
3542 b'keep-changes',
3542 None,
3543 None,
3543 _(b'tolerate non-conflicting local changes'),
3544 _(b'tolerate non-conflicting local changes'),
3544 ),
3545 ),
3545 (b'f', b'force', None, _(b'forget any local changes to patched files')),
3546 (b'f', b'force', None, _(b'forget any local changes to patched files')),
3546 (b'', b'no-backup', None, _(b'do not save backup copies of files')),
3547 (b'', b'no-backup', None, _(b'do not save backup copies of files')),
3547 ],
3548 ],
3548 _(b'hg qpop [-a] [-f] [PATCH | INDEX]'),
3549 _(b'hg qpop [-a] [-f] [PATCH | INDEX]'),
3549 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3550 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3550 helpbasic=True,
3551 helpbasic=True,
3551 )
3552 )
3552 def pop(ui, repo, patch=None, **opts):
3553 def pop(ui, repo, patch=None, **opts):
3553 """pop the current patch off the stack
3554 """pop the current patch off the stack
3554
3555
3555 Without argument, pops off the top of the patch stack. If given a
3556 Without argument, pops off the top of the patch stack. If given a
3556 patch name, keeps popping off patches until the named patch is at
3557 patch name, keeps popping off patches until the named patch is at
3557 the top of the stack.
3558 the top of the stack.
3558
3559
3559 By default, abort if the working directory contains uncommitted
3560 By default, abort if the working directory contains uncommitted
3560 changes. With --keep-changes, abort only if the uncommitted files
3561 changes. With --keep-changes, abort only if the uncommitted files
3561 overlap with patched files. With -f/--force, backup and discard
3562 overlap with patched files. With -f/--force, backup and discard
3562 changes made to such files.
3563 changes made to such files.
3563
3564
3564 Return 0 on success.
3565 Return 0 on success.
3565 """
3566 """
3566 opts = pycompat.byteskwargs(opts)
3567 opts = pycompat.byteskwargs(opts)
3567 opts = fixkeepchangesopts(ui, opts)
3568 opts = fixkeepchangesopts(ui, opts)
3568 localupdate = True
3569 localupdate = True
3569 if opts.get(b'name'):
3570 if opts.get(b'name'):
3570 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get(b'name')))
3571 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get(b'name')))
3571 ui.warn(_(b'using patch queue: %s\n') % q.path)
3572 ui.warn(_(b'using patch queue: %s\n') % q.path)
3572 localupdate = False
3573 localupdate = False
3573 else:
3574 else:
3574 q = repo.mq
3575 q = repo.mq
3575 ret = q.pop(
3576 ret = q.pop(
3576 repo,
3577 repo,
3577 patch,
3578 patch,
3578 force=opts.get(b'force'),
3579 force=opts.get(b'force'),
3579 update=localupdate,
3580 update=localupdate,
3580 all=opts.get(b'all'),
3581 all=opts.get(b'all'),
3581 nobackup=opts.get(b'no_backup'),
3582 nobackup=opts.get(b'no_backup'),
3582 keepchanges=opts.get(b'keep_changes'),
3583 keepchanges=opts.get(b'keep_changes'),
3583 )
3584 )
3584 q.savedirty()
3585 q.savedirty()
3585 return ret
3586 return ret
3586
3587
3587
3588
3588 @command(
3589 @command(
3589 b"qrename|qmv",
3590 b"qrename|qmv",
3590 [],
3591 [],
3591 _(b'hg qrename PATCH1 [PATCH2]'),
3592 _(b'hg qrename PATCH1 [PATCH2]'),
3592 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3593 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3593 )
3594 )
3594 def rename(ui, repo, patch, name=None, **opts):
3595 def rename(ui, repo, patch, name=None, **opts):
3595 """rename a patch
3596 """rename a patch
3596
3597
3597 With one argument, renames the current patch to PATCH1.
3598 With one argument, renames the current patch to PATCH1.
3598 With two arguments, renames PATCH1 to PATCH2.
3599 With two arguments, renames PATCH1 to PATCH2.
3599
3600
3600 Returns 0 on success."""
3601 Returns 0 on success."""
3601 q = repo.mq
3602 q = repo.mq
3602 if not name:
3603 if not name:
3603 name = patch
3604 name = patch
3604 patch = None
3605 patch = None
3605
3606
3606 if patch:
3607 if patch:
3607 patch = q.lookup(patch)
3608 patch = q.lookup(patch)
3608 else:
3609 else:
3609 if not q.applied:
3610 if not q.applied:
3610 ui.write(_(b'no patches applied\n'))
3611 ui.write(_(b'no patches applied\n'))
3611 return
3612 return
3612 patch = q.lookup(b'qtip')
3613 patch = q.lookup(b'qtip')
3613 absdest = q.join(name)
3614 absdest = q.join(name)
3614 if os.path.isdir(absdest):
3615 if os.path.isdir(absdest):
3615 name = normname(os.path.join(name, os.path.basename(patch)))
3616 name = normname(os.path.join(name, os.path.basename(patch)))
3616 absdest = q.join(name)
3617 absdest = q.join(name)
3617 q.checkpatchname(name)
3618 q.checkpatchname(name)
3618
3619
3619 ui.note(_(b'renaming %s to %s\n') % (patch, name))
3620 ui.note(_(b'renaming %s to %s\n') % (patch, name))
3620 i = q.findseries(patch)
3621 i = q.findseries(patch)
3621 guards = q.guard_re.findall(q.fullseries[i])
3622 guards = q.guard_re.findall(q.fullseries[i])
3622 q.fullseries[i] = name + b''.join([b' #' + g for g in guards])
3623 q.fullseries[i] = name + b''.join([b' #' + g for g in guards])
3623 q.parseseries()
3624 q.parseseries()
3624 q.seriesdirty = True
3625 q.seriesdirty = True
3625
3626
3626 info = q.isapplied(patch)
3627 info = q.isapplied(patch)
3627 if info:
3628 if info:
3628 q.applied[info[0]] = statusentry(info[1], name)
3629 q.applied[info[0]] = statusentry(info[1], name)
3629 q.applieddirty = True
3630 q.applieddirty = True
3630
3631
3631 destdir = os.path.dirname(absdest)
3632 destdir = os.path.dirname(absdest)
3632 if not os.path.isdir(destdir):
3633 if not os.path.isdir(destdir):
3633 os.makedirs(destdir)
3634 os.makedirs(destdir)
3634 util.rename(q.join(patch), absdest)
3635 util.rename(q.join(patch), absdest)
3635 r = q.qrepo()
3636 r = q.qrepo()
3636 if r and patch in r.dirstate:
3637 if r and patch in r.dirstate:
3637 wctx = r[None]
3638 wctx = r[None]
3638 with r.wlock():
3639 with r.wlock():
3639 if r.dirstate[patch] == b'a':
3640 if r.dirstate[patch] == b'a':
3640 r.dirstate.drop(patch)
3641 r.dirstate.drop(patch)
3641 r.dirstate.add(name)
3642 r.dirstate.add(name)
3642 else:
3643 else:
3643 wctx.copy(patch, name)
3644 wctx.copy(patch, name)
3644 wctx.forget([patch])
3645 wctx.forget([patch])
3645
3646
3646 q.savedirty()
3647 q.savedirty()
3647
3648
3648
3649
3649 @command(
3650 @command(
3650 b"qrestore",
3651 b"qrestore",
3651 [
3652 [
3652 (b'd', b'delete', None, _(b'delete save entry')),
3653 (b'd', b'delete', None, _(b'delete save entry')),
3653 (b'u', b'update', None, _(b'update queue working directory')),
3654 (b'u', b'update', None, _(b'update queue working directory')),
3654 ],
3655 ],
3655 _(b'hg qrestore [-d] [-u] REV'),
3656 _(b'hg qrestore [-d] [-u] REV'),
3656 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3657 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3657 )
3658 )
3658 def restore(ui, repo, rev, **opts):
3659 def restore(ui, repo, rev, **opts):
3659 """restore the queue state saved by a revision (DEPRECATED)
3660 """restore the queue state saved by a revision (DEPRECATED)
3660
3661
3661 This command is deprecated, use :hg:`rebase` instead."""
3662 This command is deprecated, use :hg:`rebase` instead."""
3662 rev = repo.lookup(rev)
3663 rev = repo.lookup(rev)
3663 q = repo.mq
3664 q = repo.mq
3664 q.restore(repo, rev, delete=opts.get('delete'), qupdate=opts.get('update'))
3665 q.restore(repo, rev, delete=opts.get('delete'), qupdate=opts.get('update'))
3665 q.savedirty()
3666 q.savedirty()
3666 return 0
3667 return 0
3667
3668
3668
3669
3669 @command(
3670 @command(
3670 b"qsave",
3671 b"qsave",
3671 [
3672 [
3672 (b'c', b'copy', None, _(b'copy patch directory')),
3673 (b'c', b'copy', None, _(b'copy patch directory')),
3673 (b'n', b'name', b'', _(b'copy directory name'), _(b'NAME')),
3674 (b'n', b'name', b'', _(b'copy directory name'), _(b'NAME')),
3674 (b'e', b'empty', None, _(b'clear queue status file')),
3675 (b'e', b'empty', None, _(b'clear queue status file')),
3675 (b'f', b'force', None, _(b'force copy')),
3676 (b'f', b'force', None, _(b'force copy')),
3676 ]
3677 ]
3677 + cmdutil.commitopts,
3678 + cmdutil.commitopts,
3678 _(b'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
3679 _(b'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
3679 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3680 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3680 )
3681 )
3681 def save(ui, repo, **opts):
3682 def save(ui, repo, **opts):
3682 """save current queue state (DEPRECATED)
3683 """save current queue state (DEPRECATED)
3683
3684
3684 This command is deprecated, use :hg:`rebase` instead."""
3685 This command is deprecated, use :hg:`rebase` instead."""
3685 q = repo.mq
3686 q = repo.mq
3686 opts = pycompat.byteskwargs(opts)
3687 opts = pycompat.byteskwargs(opts)
3687 message = cmdutil.logmessage(ui, opts)
3688 message = cmdutil.logmessage(ui, opts)
3688 ret = q.save(repo, msg=message)
3689 ret = q.save(repo, msg=message)
3689 if ret:
3690 if ret:
3690 return ret
3691 return ret
3691 q.savedirty() # save to .hg/patches before copying
3692 q.savedirty() # save to .hg/patches before copying
3692 if opts.get(b'copy'):
3693 if opts.get(b'copy'):
3693 path = q.path
3694 path = q.path
3694 if opts.get(b'name'):
3695 if opts.get(b'name'):
3695 newpath = os.path.join(q.basepath, opts.get(b'name'))
3696 newpath = os.path.join(q.basepath, opts.get(b'name'))
3696 if os.path.exists(newpath):
3697 if os.path.exists(newpath):
3697 if not os.path.isdir(newpath):
3698 if not os.path.isdir(newpath):
3698 raise error.Abort(
3699 raise error.Abort(
3699 _(b'destination %s exists and is not a directory')
3700 _(b'destination %s exists and is not a directory')
3700 % newpath
3701 % newpath
3701 )
3702 )
3702 if not opts.get(b'force'):
3703 if not opts.get(b'force'):
3703 raise error.Abort(
3704 raise error.Abort(
3704 _(b'destination %s exists, use -f to force') % newpath
3705 _(b'destination %s exists, use -f to force') % newpath
3705 )
3706 )
3706 else:
3707 else:
3707 newpath = savename(path)
3708 newpath = savename(path)
3708 ui.warn(_(b"copy %s to %s\n") % (path, newpath))
3709 ui.warn(_(b"copy %s to %s\n") % (path, newpath))
3709 util.copyfiles(path, newpath)
3710 util.copyfiles(path, newpath)
3710 if opts.get(b'empty'):
3711 if opts.get(b'empty'):
3711 del q.applied[:]
3712 del q.applied[:]
3712 q.applieddirty = True
3713 q.applieddirty = True
3713 q.savedirty()
3714 q.savedirty()
3714 return 0
3715 return 0
3715
3716
3716
3717
3717 @command(
3718 @command(
3718 b"qselect",
3719 b"qselect",
3719 [
3720 [
3720 (b'n', b'none', None, _(b'disable all guards')),
3721 (b'n', b'none', None, _(b'disable all guards')),
3721 (b's', b'series', None, _(b'list all guards in series file')),
3722 (b's', b'series', None, _(b'list all guards in series file')),
3722 (b'', b'pop', None, _(b'pop to before first guarded applied patch')),
3723 (b'', b'pop', None, _(b'pop to before first guarded applied patch')),
3723 (b'', b'reapply', None, _(b'pop, then reapply patches')),
3724 (b'', b'reapply', None, _(b'pop, then reapply patches')),
3724 ],
3725 ],
3725 _(b'hg qselect [OPTION]... [GUARD]...'),
3726 _(b'hg qselect [OPTION]... [GUARD]...'),
3726 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3727 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3727 )
3728 )
3728 def select(ui, repo, *args, **opts):
3729 def select(ui, repo, *args, **opts):
3729 """set or print guarded patches to push
3730 """set or print guarded patches to push
3730
3731
3731 Use the :hg:`qguard` command to set or print guards on patch, then use
3732 Use the :hg:`qguard` command to set or print guards on patch, then use
3732 qselect to tell mq which guards to use. A patch will be pushed if
3733 qselect to tell mq which guards to use. A patch will be pushed if
3733 it has no guards or any positive guards match the currently
3734 it has no guards or any positive guards match the currently
3734 selected guard, but will not be pushed if any negative guards
3735 selected guard, but will not be pushed if any negative guards
3735 match the current guard. For example::
3736 match the current guard. For example::
3736
3737
3737 qguard foo.patch -- -stable (negative guard)
3738 qguard foo.patch -- -stable (negative guard)
3738 qguard bar.patch +stable (positive guard)
3739 qguard bar.patch +stable (positive guard)
3739 qselect stable
3740 qselect stable
3740
3741
3741 This activates the "stable" guard. mq will skip foo.patch (because
3742 This activates the "stable" guard. mq will skip foo.patch (because
3742 it has a negative match) but push bar.patch (because it has a
3743 it has a negative match) but push bar.patch (because it has a
3743 positive match).
3744 positive match).
3744
3745
3745 With no arguments, prints the currently active guards.
3746 With no arguments, prints the currently active guards.
3746 With one argument, sets the active guard.
3747 With one argument, sets the active guard.
3747
3748
3748 Use -n/--none to deactivate guards (no other arguments needed).
3749 Use -n/--none to deactivate guards (no other arguments needed).
3749 When no guards are active, patches with positive guards are
3750 When no guards are active, patches with positive guards are
3750 skipped and patches with negative guards are pushed.
3751 skipped and patches with negative guards are pushed.
3751
3752
3752 qselect can change the guards on applied patches. It does not pop
3753 qselect can change the guards on applied patches. It does not pop
3753 guarded patches by default. Use --pop to pop back to the last
3754 guarded patches by default. Use --pop to pop back to the last
3754 applied patch that is not guarded. Use --reapply (which implies
3755 applied patch that is not guarded. Use --reapply (which implies
3755 --pop) to push back to the current patch afterwards, but skip
3756 --pop) to push back to the current patch afterwards, but skip
3756 guarded patches.
3757 guarded patches.
3757
3758
3758 Use -s/--series to print a list of all guards in the series file
3759 Use -s/--series to print a list of all guards in the series file
3759 (no other arguments needed). Use -v for more information.
3760 (no other arguments needed). Use -v for more information.
3760
3761
3761 Returns 0 on success."""
3762 Returns 0 on success."""
3762
3763
3763 q = repo.mq
3764 q = repo.mq
3764 opts = pycompat.byteskwargs(opts)
3765 opts = pycompat.byteskwargs(opts)
3765 guards = q.active()
3766 guards = q.active()
3766 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3767 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3767 if args or opts.get(b'none'):
3768 if args or opts.get(b'none'):
3768 old_unapplied = q.unapplied(repo)
3769 old_unapplied = q.unapplied(repo)
3769 old_guarded = [
3770 old_guarded = [
3770 i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
3771 i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
3771 ]
3772 ]
3772 q.setactive(args)
3773 q.setactive(args)
3773 q.savedirty()
3774 q.savedirty()
3774 if not args:
3775 if not args:
3775 ui.status(_(b'guards deactivated\n'))
3776 ui.status(_(b'guards deactivated\n'))
3776 if not opts.get(b'pop') and not opts.get(b'reapply'):
3777 if not opts.get(b'pop') and not opts.get(b'reapply'):
3777 unapplied = q.unapplied(repo)
3778 unapplied = q.unapplied(repo)
3778 guarded = [
3779 guarded = [
3779 i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
3780 i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
3780 ]
3781 ]
3781 if len(unapplied) != len(old_unapplied):
3782 if len(unapplied) != len(old_unapplied):
3782 ui.status(
3783 ui.status(
3783 _(
3784 _(
3784 b'number of unguarded, unapplied patches has '
3785 b'number of unguarded, unapplied patches has '
3785 b'changed from %d to %d\n'
3786 b'changed from %d to %d\n'
3786 )
3787 )
3787 % (len(old_unapplied), len(unapplied))
3788 % (len(old_unapplied), len(unapplied))
3788 )
3789 )
3789 if len(guarded) != len(old_guarded):
3790 if len(guarded) != len(old_guarded):
3790 ui.status(
3791 ui.status(
3791 _(
3792 _(
3792 b'number of guarded, applied patches has changed '
3793 b'number of guarded, applied patches has changed '
3793 b'from %d to %d\n'
3794 b'from %d to %d\n'
3794 )
3795 )
3795 % (len(old_guarded), len(guarded))
3796 % (len(old_guarded), len(guarded))
3796 )
3797 )
3797 elif opts.get(b'series'):
3798 elif opts.get(b'series'):
3798 guards = {}
3799 guards = {}
3799 noguards = 0
3800 noguards = 0
3800 for gs in q.seriesguards:
3801 for gs in q.seriesguards:
3801 if not gs:
3802 if not gs:
3802 noguards += 1
3803 noguards += 1
3803 for g in gs:
3804 for g in gs:
3804 guards.setdefault(g, 0)
3805 guards.setdefault(g, 0)
3805 guards[g] += 1
3806 guards[g] += 1
3806 if ui.verbose:
3807 if ui.verbose:
3807 guards[b'NONE'] = noguards
3808 guards[b'NONE'] = noguards
3808 guards = list(guards.items())
3809 guards = list(guards.items())
3809 guards.sort(key=lambda x: x[0][1:])
3810 guards.sort(key=lambda x: x[0][1:])
3810 if guards:
3811 if guards:
3811 ui.note(_(b'guards in series file:\n'))
3812 ui.note(_(b'guards in series file:\n'))
3812 for guard, count in guards:
3813 for guard, count in guards:
3813 ui.note(b'%2d ' % count)
3814 ui.note(b'%2d ' % count)
3814 ui.write(guard, b'\n')
3815 ui.write(guard, b'\n')
3815 else:
3816 else:
3816 ui.note(_(b'no guards in series file\n'))
3817 ui.note(_(b'no guards in series file\n'))
3817 else:
3818 else:
3818 if guards:
3819 if guards:
3819 ui.note(_(b'active guards:\n'))
3820 ui.note(_(b'active guards:\n'))
3820 for g in guards:
3821 for g in guards:
3821 ui.write(g, b'\n')
3822 ui.write(g, b'\n')
3822 else:
3823 else:
3823 ui.write(_(b'no active guards\n'))
3824 ui.write(_(b'no active guards\n'))
3824 reapply = opts.get(b'reapply') and q.applied and q.applied[-1].name
3825 reapply = opts.get(b'reapply') and q.applied and q.applied[-1].name
3825 popped = False
3826 popped = False
3826 if opts.get(b'pop') or opts.get(b'reapply'):
3827 if opts.get(b'pop') or opts.get(b'reapply'):
3827 for i in pycompat.xrange(len(q.applied)):
3828 for i in pycompat.xrange(len(q.applied)):
3828 if not pushable(i):
3829 if not pushable(i):
3829 ui.status(_(b'popping guarded patches\n'))
3830 ui.status(_(b'popping guarded patches\n'))
3830 popped = True
3831 popped = True
3831 if i == 0:
3832 if i == 0:
3832 q.pop(repo, all=True)
3833 q.pop(repo, all=True)
3833 else:
3834 else:
3834 q.pop(repo, q.applied[i - 1].name)
3835 q.pop(repo, q.applied[i - 1].name)
3835 break
3836 break
3836 if popped:
3837 if popped:
3837 try:
3838 try:
3838 if reapply:
3839 if reapply:
3839 ui.status(_(b'reapplying unguarded patches\n'))
3840 ui.status(_(b'reapplying unguarded patches\n'))
3840 q.push(repo, reapply)
3841 q.push(repo, reapply)
3841 finally:
3842 finally:
3842 q.savedirty()
3843 q.savedirty()
3843
3844
3844
3845
3845 @command(
3846 @command(
3846 b"qfinish",
3847 b"qfinish",
3847 [(b'a', b'applied', None, _(b'finish all applied changesets'))],
3848 [(b'a', b'applied', None, _(b'finish all applied changesets'))],
3848 _(b'hg qfinish [-a] [REV]...'),
3849 _(b'hg qfinish [-a] [REV]...'),
3849 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3850 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3850 )
3851 )
3851 def finish(ui, repo, *revrange, **opts):
3852 def finish(ui, repo, *revrange, **opts):
3852 """move applied patches into repository history
3853 """move applied patches into repository history
3853
3854
3854 Finishes the specified revisions (corresponding to applied
3855 Finishes the specified revisions (corresponding to applied
3855 patches) by moving them out of mq control into regular repository
3856 patches) by moving them out of mq control into regular repository
3856 history.
3857 history.
3857
3858
3858 Accepts a revision range or the -a/--applied option. If --applied
3859 Accepts a revision range or the -a/--applied option. If --applied
3859 is specified, all applied mq revisions are removed from mq
3860 is specified, all applied mq revisions are removed from mq
3860 control. Otherwise, the given revisions must be at the base of the
3861 control. Otherwise, the given revisions must be at the base of the
3861 stack of applied patches.
3862 stack of applied patches.
3862
3863
3863 This can be especially useful if your changes have been applied to
3864 This can be especially useful if your changes have been applied to
3864 an upstream repository, or if you are about to push your changes
3865 an upstream repository, or if you are about to push your changes
3865 to upstream.
3866 to upstream.
3866
3867
3867 Returns 0 on success.
3868 Returns 0 on success.
3868 """
3869 """
3869 if not opts.get('applied') and not revrange:
3870 if not opts.get('applied') and not revrange:
3870 raise error.Abort(_(b'no revisions specified'))
3871 raise error.Abort(_(b'no revisions specified'))
3871 elif opts.get('applied'):
3872 elif opts.get('applied'):
3872 revrange = (b'qbase::qtip',) + revrange
3873 revrange = (b'qbase::qtip',) + revrange
3873
3874
3874 q = repo.mq
3875 q = repo.mq
3875 if not q.applied:
3876 if not q.applied:
3876 ui.status(_(b'no patches applied\n'))
3877 ui.status(_(b'no patches applied\n'))
3877 return 0
3878 return 0
3878
3879
3879 revs = scmutil.revrange(repo, revrange)
3880 revs = scmutil.revrange(repo, revrange)
3880 if repo[b'.'].rev() in revs and repo[None].files():
3881 if repo[b'.'].rev() in revs and repo[None].files():
3881 ui.warn(_(b'warning: uncommitted changes in the working directory\n'))
3882 ui.warn(_(b'warning: uncommitted changes in the working directory\n'))
3882 # queue.finish may changes phases but leave the responsibility to lock the
3883 # queue.finish may changes phases but leave the responsibility to lock the
3883 # repo to the caller to avoid deadlock with wlock. This command code is
3884 # repo to the caller to avoid deadlock with wlock. This command code is
3884 # responsibility for this locking.
3885 # responsibility for this locking.
3885 with repo.lock():
3886 with repo.lock():
3886 q.finish(repo, revs)
3887 q.finish(repo, revs)
3887 q.savedirty()
3888 q.savedirty()
3888 return 0
3889 return 0
3889
3890
3890
3891
3891 @command(
3892 @command(
3892 b"qqueue",
3893 b"qqueue",
3893 [
3894 [
3894 (b'l', b'list', False, _(b'list all available queues')),
3895 (b'l', b'list', False, _(b'list all available queues')),
3895 (b'', b'active', False, _(b'print name of active queue')),
3896 (b'', b'active', False, _(b'print name of active queue')),
3896 (b'c', b'create', False, _(b'create new queue')),
3897 (b'c', b'create', False, _(b'create new queue')),
3897 (b'', b'rename', False, _(b'rename active queue')),
3898 (b'', b'rename', False, _(b'rename active queue')),
3898 (b'', b'delete', False, _(b'delete reference to queue')),
3899 (b'', b'delete', False, _(b'delete reference to queue')),
3899 (b'', b'purge', False, _(b'delete queue, and remove patch dir')),
3900 (b'', b'purge', False, _(b'delete queue, and remove patch dir')),
3900 ],
3901 ],
3901 _(b'[OPTION] [QUEUE]'),
3902 _(b'[OPTION] [QUEUE]'),
3902 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3903 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3903 )
3904 )
3904 def qqueue(ui, repo, name=None, **opts):
3905 def qqueue(ui, repo, name=None, **opts):
3905 """manage multiple patch queues
3906 """manage multiple patch queues
3906
3907
3907 Supports switching between different patch queues, as well as creating
3908 Supports switching between different patch queues, as well as creating
3908 new patch queues and deleting existing ones.
3909 new patch queues and deleting existing ones.
3909
3910
3910 Omitting a queue name or specifying -l/--list will show you the registered
3911 Omitting a queue name or specifying -l/--list will show you the registered
3911 queues - by default the "normal" patches queue is registered. The currently
3912 queues - by default the "normal" patches queue is registered. The currently
3912 active queue will be marked with "(active)". Specifying --active will print
3913 active queue will be marked with "(active)". Specifying --active will print
3913 only the name of the active queue.
3914 only the name of the active queue.
3914
3915
3915 To create a new queue, use -c/--create. The queue is automatically made
3916 To create a new queue, use -c/--create. The queue is automatically made
3916 active, except in the case where there are applied patches from the
3917 active, except in the case where there are applied patches from the
3917 currently active queue in the repository. Then the queue will only be
3918 currently active queue in the repository. Then the queue will only be
3918 created and switching will fail.
3919 created and switching will fail.
3919
3920
3920 To delete an existing queue, use --delete. You cannot delete the currently
3921 To delete an existing queue, use --delete. You cannot delete the currently
3921 active queue.
3922 active queue.
3922
3923
3923 Returns 0 on success.
3924 Returns 0 on success.
3924 """
3925 """
3925 q = repo.mq
3926 q = repo.mq
3926 _defaultqueue = b'patches'
3927 _defaultqueue = b'patches'
3927 _allqueues = b'patches.queues'
3928 _allqueues = b'patches.queues'
3928 _activequeue = b'patches.queue'
3929 _activequeue = b'patches.queue'
3929
3930
3930 def _getcurrent():
3931 def _getcurrent():
3931 cur = os.path.basename(q.path)
3932 cur = os.path.basename(q.path)
3932 if cur.startswith(b'patches-'):
3933 if cur.startswith(b'patches-'):
3933 cur = cur[8:]
3934 cur = cur[8:]
3934 return cur
3935 return cur
3935
3936
3936 def _noqueues():
3937 def _noqueues():
3937 try:
3938 try:
3938 fh = repo.vfs(_allqueues, b'r')
3939 fh = repo.vfs(_allqueues, b'r')
3939 fh.close()
3940 fh.close()
3940 except IOError:
3941 except IOError:
3941 return True
3942 return True
3942
3943
3943 return False
3944 return False
3944
3945
3945 def _getqueues():
3946 def _getqueues():
3946 current = _getcurrent()
3947 current = _getcurrent()
3947
3948
3948 try:
3949 try:
3949 fh = repo.vfs(_allqueues, b'r')
3950 fh = repo.vfs(_allqueues, b'r')
3950 queues = [queue.strip() for queue in fh if queue.strip()]
3951 queues = [queue.strip() for queue in fh if queue.strip()]
3951 fh.close()
3952 fh.close()
3952 if current not in queues:
3953 if current not in queues:
3953 queues.append(current)
3954 queues.append(current)
3954 except IOError:
3955 except IOError:
3955 queues = [_defaultqueue]
3956 queues = [_defaultqueue]
3956
3957
3957 return sorted(queues)
3958 return sorted(queues)
3958
3959
3959 def _setactive(name):
3960 def _setactive(name):
3960 if q.applied:
3961 if q.applied:
3961 raise error.Abort(
3962 raise error.Abort(
3962 _(
3963 _(
3963 b'new queue created, but cannot make active '
3964 b'new queue created, but cannot make active '
3964 b'as patches are applied'
3965 b'as patches are applied'
3965 )
3966 )
3966 )
3967 )
3967 _setactivenocheck(name)
3968 _setactivenocheck(name)
3968
3969
3969 def _setactivenocheck(name):
3970 def _setactivenocheck(name):
3970 fh = repo.vfs(_activequeue, b'w')
3971 fh = repo.vfs(_activequeue, b'w')
3971 if name != b'patches':
3972 if name != b'patches':
3972 fh.write(name)
3973 fh.write(name)
3973 fh.close()
3974 fh.close()
3974
3975
3975 def _addqueue(name):
3976 def _addqueue(name):
3976 fh = repo.vfs(_allqueues, b'a')
3977 fh = repo.vfs(_allqueues, b'a')
3977 fh.write(b'%s\n' % (name,))
3978 fh.write(b'%s\n' % (name,))
3978 fh.close()
3979 fh.close()
3979
3980
3980 def _queuedir(name):
3981 def _queuedir(name):
3981 if name == b'patches':
3982 if name == b'patches':
3982 return repo.vfs.join(b'patches')
3983 return repo.vfs.join(b'patches')
3983 else:
3984 else:
3984 return repo.vfs.join(b'patches-' + name)
3985 return repo.vfs.join(b'patches-' + name)
3985
3986
3986 def _validname(name):
3987 def _validname(name):
3987 for n in name:
3988 for n in name:
3988 if n in b':\\/.':
3989 if n in b':\\/.':
3989 return False
3990 return False
3990 return True
3991 return True
3991
3992
3992 def _delete(name):
3993 def _delete(name):
3993 if name not in existing:
3994 if name not in existing:
3994 raise error.Abort(_(b'cannot delete queue that does not exist'))
3995 raise error.Abort(_(b'cannot delete queue that does not exist'))
3995
3996
3996 current = _getcurrent()
3997 current = _getcurrent()
3997
3998
3998 if name == current:
3999 if name == current:
3999 raise error.Abort(_(b'cannot delete currently active queue'))
4000 raise error.Abort(_(b'cannot delete currently active queue'))
4000
4001
4001 fh = repo.vfs(b'patches.queues.new', b'w')
4002 fh = repo.vfs(b'patches.queues.new', b'w')
4002 for queue in existing:
4003 for queue in existing:
4003 if queue == name:
4004 if queue == name:
4004 continue
4005 continue
4005 fh.write(b'%s\n' % (queue,))
4006 fh.write(b'%s\n' % (queue,))
4006 fh.close()
4007 fh.close()
4007 repo.vfs.rename(b'patches.queues.new', _allqueues)
4008 repo.vfs.rename(b'patches.queues.new', _allqueues)
4008
4009
4009 opts = pycompat.byteskwargs(opts)
4010 opts = pycompat.byteskwargs(opts)
4010 if not name or opts.get(b'list') or opts.get(b'active'):
4011 if not name or opts.get(b'list') or opts.get(b'active'):
4011 current = _getcurrent()
4012 current = _getcurrent()
4012 if opts.get(b'active'):
4013 if opts.get(b'active'):
4013 ui.write(b'%s\n' % (current,))
4014 ui.write(b'%s\n' % (current,))
4014 return
4015 return
4015 for queue in _getqueues():
4016 for queue in _getqueues():
4016 ui.write(b'%s' % (queue,))
4017 ui.write(b'%s' % (queue,))
4017 if queue == current and not ui.quiet:
4018 if queue == current and not ui.quiet:
4018 ui.write(_(b' (active)\n'))
4019 ui.write(_(b' (active)\n'))
4019 else:
4020 else:
4020 ui.write(b'\n')
4021 ui.write(b'\n')
4021 return
4022 return
4022
4023
4023 if not _validname(name):
4024 if not _validname(name):
4024 raise error.Abort(
4025 raise error.Abort(
4025 _(b'invalid queue name, may not contain the characters ":\\/."')
4026 _(b'invalid queue name, may not contain the characters ":\\/."')
4026 )
4027 )
4027
4028
4028 with repo.wlock():
4029 with repo.wlock():
4029 existing = _getqueues()
4030 existing = _getqueues()
4030
4031
4031 if opts.get(b'create'):
4032 if opts.get(b'create'):
4032 if name in existing:
4033 if name in existing:
4033 raise error.Abort(_(b'queue "%s" already exists') % name)
4034 raise error.Abort(_(b'queue "%s" already exists') % name)
4034 if _noqueues():
4035 if _noqueues():
4035 _addqueue(_defaultqueue)
4036 _addqueue(_defaultqueue)
4036 _addqueue(name)
4037 _addqueue(name)
4037 _setactive(name)
4038 _setactive(name)
4038 elif opts.get(b'rename'):
4039 elif opts.get(b'rename'):
4039 current = _getcurrent()
4040 current = _getcurrent()
4040 if name == current:
4041 if name == current:
4041 raise error.Abort(
4042 raise error.Abort(
4042 _(b'can\'t rename "%s" to its current name') % name
4043 _(b'can\'t rename "%s" to its current name') % name
4043 )
4044 )
4044 if name in existing:
4045 if name in existing:
4045 raise error.Abort(_(b'queue "%s" already exists') % name)
4046 raise error.Abort(_(b'queue "%s" already exists') % name)
4046
4047
4047 olddir = _queuedir(current)
4048 olddir = _queuedir(current)
4048 newdir = _queuedir(name)
4049 newdir = _queuedir(name)
4049
4050
4050 if os.path.exists(newdir):
4051 if os.path.exists(newdir):
4051 raise error.Abort(
4052 raise error.Abort(
4052 _(b'non-queue directory "%s" already exists') % newdir
4053 _(b'non-queue directory "%s" already exists') % newdir
4053 )
4054 )
4054
4055
4055 fh = repo.vfs(b'patches.queues.new', b'w')
4056 fh = repo.vfs(b'patches.queues.new', b'w')
4056 for queue in existing:
4057 for queue in existing:
4057 if queue == current:
4058 if queue == current:
4058 fh.write(b'%s\n' % (name,))
4059 fh.write(b'%s\n' % (name,))
4059 if os.path.exists(olddir):
4060 if os.path.exists(olddir):
4060 util.rename(olddir, newdir)
4061 util.rename(olddir, newdir)
4061 else:
4062 else:
4062 fh.write(b'%s\n' % (queue,))
4063 fh.write(b'%s\n' % (queue,))
4063 fh.close()
4064 fh.close()
4064 repo.vfs.rename(b'patches.queues.new', _allqueues)
4065 repo.vfs.rename(b'patches.queues.new', _allqueues)
4065 _setactivenocheck(name)
4066 _setactivenocheck(name)
4066 elif opts.get(b'delete'):
4067 elif opts.get(b'delete'):
4067 _delete(name)
4068 _delete(name)
4068 elif opts.get(b'purge'):
4069 elif opts.get(b'purge'):
4069 if name in existing:
4070 if name in existing:
4070 _delete(name)
4071 _delete(name)
4071 qdir = _queuedir(name)
4072 qdir = _queuedir(name)
4072 if os.path.exists(qdir):
4073 if os.path.exists(qdir):
4073 shutil.rmtree(qdir)
4074 shutil.rmtree(qdir)
4074 else:
4075 else:
4075 if name not in existing:
4076 if name not in existing:
4076 raise error.Abort(_(b'use --create to create a new queue'))
4077 raise error.Abort(_(b'use --create to create a new queue'))
4077 _setactive(name)
4078 _setactive(name)
4078
4079
4079
4080
4080 def mqphasedefaults(repo, roots):
4081 def mqphasedefaults(repo, roots):
4081 """callback used to set mq changeset as secret when no phase data exists"""
4082 """callback used to set mq changeset as secret when no phase data exists"""
4082 if repo.mq.applied:
4083 if repo.mq.applied:
4083 if repo.ui.configbool(b'mq', b'secret'):
4084 if repo.ui.configbool(b'mq', b'secret'):
4084 mqphase = phases.secret
4085 mqphase = phases.secret
4085 else:
4086 else:
4086 mqphase = phases.draft
4087 mqphase = phases.draft
4087 qbase = repo[repo.mq.applied[0].node]
4088 qbase = repo[repo.mq.applied[0].node]
4088 roots[mqphase].add(qbase.node())
4089 roots[mqphase].add(qbase.node())
4089 return roots
4090 return roots
4090
4091
4091
4092
4092 def reposetup(ui, repo):
4093 def reposetup(ui, repo):
4093 class mqrepo(repo.__class__):
4094 class mqrepo(repo.__class__):
4094 @localrepo.unfilteredpropertycache
4095 @localrepo.unfilteredpropertycache
4095 def mq(self):
4096 def mq(self):
4096 return queue(self.ui, self.baseui, self.path)
4097 return queue(self.ui, self.baseui, self.path)
4097
4098
4098 def invalidateall(self):
4099 def invalidateall(self):
4099 super(mqrepo, self).invalidateall()
4100 super(mqrepo, self).invalidateall()
4100 if localrepo.hasunfilteredcache(self, 'mq'):
4101 if localrepo.hasunfilteredcache(self, 'mq'):
4101 # recreate mq in case queue path was changed
4102 # recreate mq in case queue path was changed
4102 delattr(self.unfiltered(), 'mq')
4103 delattr(self.unfiltered(), 'mq')
4103
4104
4104 def abortifwdirpatched(self, errmsg, force=False):
4105 def abortifwdirpatched(self, errmsg, force=False):
4105 if self.mq.applied and self.mq.checkapplied and not force:
4106 if self.mq.applied and self.mq.checkapplied and not force:
4106 parents = self.dirstate.parents()
4107 parents = self.dirstate.parents()
4107 patches = [s.node for s in self.mq.applied]
4108 patches = [s.node for s in self.mq.applied]
4108 if any(p in patches for p in parents):
4109 if any(p in patches for p in parents):
4109 raise error.Abort(errmsg)
4110 raise error.Abort(errmsg)
4110
4111
4111 def commit(
4112 def commit(
4112 self,
4113 self,
4113 text=b"",
4114 text=b"",
4114 user=None,
4115 user=None,
4115 date=None,
4116 date=None,
4116 match=None,
4117 match=None,
4117 force=False,
4118 force=False,
4118 editor=False,
4119 editor=False,
4119 extra=None,
4120 extra=None,
4120 ):
4121 ):
4121 if extra is None:
4122 if extra is None:
4122 extra = {}
4123 extra = {}
4123 self.abortifwdirpatched(
4124 self.abortifwdirpatched(
4124 _(b'cannot commit over an applied mq patch'), force
4125 _(b'cannot commit over an applied mq patch'), force
4125 )
4126 )
4126
4127
4127 return super(mqrepo, self).commit(
4128 return super(mqrepo, self).commit(
4128 text, user, date, match, force, editor, extra
4129 text, user, date, match, force, editor, extra
4129 )
4130 )
4130
4131
4131 def checkpush(self, pushop):
4132 def checkpush(self, pushop):
4132 if self.mq.applied and self.mq.checkapplied and not pushop.force:
4133 if self.mq.applied and self.mq.checkapplied and not pushop.force:
4133 outapplied = [e.node for e in self.mq.applied]
4134 outapplied = [e.node for e in self.mq.applied]
4134 if pushop.revs:
4135 if pushop.revs:
4135 # Assume applied patches have no non-patch descendants and
4136 # Assume applied patches have no non-patch descendants and
4136 # are not on remote already. Filtering any changeset not
4137 # are not on remote already. Filtering any changeset not
4137 # pushed.
4138 # pushed.
4138 heads = set(pushop.revs)
4139 heads = set(pushop.revs)
4139 for node in reversed(outapplied):
4140 for node in reversed(outapplied):
4140 if node in heads:
4141 if node in heads:
4141 break
4142 break
4142 else:
4143 else:
4143 outapplied.pop()
4144 outapplied.pop()
4144 # looking for pushed and shared changeset
4145 # looking for pushed and shared changeset
4145 for node in outapplied:
4146 for node in outapplied:
4146 if self[node].phase() < phases.secret:
4147 if self[node].phase() < phases.secret:
4147 raise error.Abort(_(b'source has mq patches applied'))
4148 raise error.Abort(_(b'source has mq patches applied'))
4148 # no non-secret patches pushed
4149 # no non-secret patches pushed
4149 super(mqrepo, self).checkpush(pushop)
4150 super(mqrepo, self).checkpush(pushop)
4150
4151
4151 def _findtags(self):
4152 def _findtags(self):
4152 '''augment tags from base class with patch tags'''
4153 '''augment tags from base class with patch tags'''
4153 result = super(mqrepo, self)._findtags()
4154 result = super(mqrepo, self)._findtags()
4154
4155
4155 q = self.mq
4156 q = self.mq
4156 if not q.applied:
4157 if not q.applied:
4157 return result
4158 return result
4158
4159
4159 mqtags = [(patch.node, patch.name) for patch in q.applied]
4160 mqtags = [(patch.node, patch.name) for patch in q.applied]
4160
4161
4161 try:
4162 try:
4162 # for now ignore filtering business
4163 # for now ignore filtering business
4163 self.unfiltered().changelog.rev(mqtags[-1][0])
4164 self.unfiltered().changelog.rev(mqtags[-1][0])
4164 except error.LookupError:
4165 except error.LookupError:
4165 self.ui.warn(
4166 self.ui.warn(
4166 _(b'mq status file refers to unknown node %s\n')
4167 _(b'mq status file refers to unknown node %s\n')
4167 % short(mqtags[-1][0])
4168 % short(mqtags[-1][0])
4168 )
4169 )
4169 return result
4170 return result
4170
4171
4171 # do not add fake tags for filtered revisions
4172 # do not add fake tags for filtered revisions
4172 included = self.changelog.hasnode
4173 included = self.changelog.hasnode
4173 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
4174 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
4174 if not mqtags:
4175 if not mqtags:
4175 return result
4176 return result
4176
4177
4177 mqtags.append((mqtags[-1][0], b'qtip'))
4178 mqtags.append((mqtags[-1][0], b'qtip'))
4178 mqtags.append((mqtags[0][0], b'qbase'))
4179 mqtags.append((mqtags[0][0], b'qbase'))
4179 mqtags.append((self.changelog.parents(mqtags[0][0])[0], b'qparent'))
4180 mqtags.append((self.changelog.parents(mqtags[0][0])[0], b'qparent'))
4180 tags = result[0]
4181 tags = result[0]
4181 for patch in mqtags:
4182 for patch in mqtags:
4182 if patch[1] in tags:
4183 if patch[1] in tags:
4183 self.ui.warn(
4184 self.ui.warn(
4184 _(b'tag %s overrides mq patch of the same name\n')
4185 _(b'tag %s overrides mq patch of the same name\n')
4185 % patch[1]
4186 % patch[1]
4186 )
4187 )
4187 else:
4188 else:
4188 tags[patch[1]] = patch[0]
4189 tags[patch[1]] = patch[0]
4189
4190
4190 return result
4191 return result
4191
4192
4192 if repo.local():
4193 if repo.local():
4193 repo.__class__ = mqrepo
4194 repo.__class__ = mqrepo
4194
4195
4195 repo._phasedefaults.append(mqphasedefaults)
4196 repo._phasedefaults.append(mqphasedefaults)
4196
4197
4197
4198
4198 def mqimport(orig, ui, repo, *args, **kwargs):
4199 def mqimport(orig, ui, repo, *args, **kwargs):
4199 if util.safehasattr(repo, b'abortifwdirpatched') and not kwargs.get(
4200 if util.safehasattr(repo, b'abortifwdirpatched') and not kwargs.get(
4200 'no_commit', False
4201 'no_commit', False
4201 ):
4202 ):
4202 repo.abortifwdirpatched(
4203 repo.abortifwdirpatched(
4203 _(b'cannot import over an applied patch'), kwargs.get('force')
4204 _(b'cannot import over an applied patch'), kwargs.get('force')
4204 )
4205 )
4205 return orig(ui, repo, *args, **kwargs)
4206 return orig(ui, repo, *args, **kwargs)
4206
4207
4207
4208
4208 def mqinit(orig, ui, *args, **kwargs):
4209 def mqinit(orig, ui, *args, **kwargs):
4209 mq = kwargs.pop('mq', None)
4210 mq = kwargs.pop('mq', None)
4210
4211
4211 if not mq:
4212 if not mq:
4212 return orig(ui, *args, **kwargs)
4213 return orig(ui, *args, **kwargs)
4213
4214
4214 if args:
4215 if args:
4215 repopath = args[0]
4216 repopath = args[0]
4216 if not hg.islocal(repopath):
4217 if not hg.islocal(repopath):
4217 raise error.Abort(
4218 raise error.Abort(
4218 _(b'only a local queue repository may be initialized')
4219 _(b'only a local queue repository may be initialized')
4219 )
4220 )
4220 else:
4221 else:
4221 repopath = cmdutil.findrepo(encoding.getcwd())
4222 repopath = cmdutil.findrepo(encoding.getcwd())
4222 if not repopath:
4223 if not repopath:
4223 raise error.Abort(
4224 raise error.Abort(
4224 _(b'there is no Mercurial repository here (.hg not found)')
4225 _(b'there is no Mercurial repository here (.hg not found)')
4225 )
4226 )
4226 repo = hg.repository(ui, repopath)
4227 repo = hg.repository(ui, repopath)
4227 return qinit(ui, repo, True)
4228 return qinit(ui, repo, True)
4228
4229
4229
4230
4230 def mqcommand(orig, ui, repo, *args, **kwargs):
4231 def mqcommand(orig, ui, repo, *args, **kwargs):
4231 """Add --mq option to operate on patch repository instead of main"""
4232 """Add --mq option to operate on patch repository instead of main"""
4232
4233
4233 # some commands do not like getting unknown options
4234 # some commands do not like getting unknown options
4234 mq = kwargs.pop('mq', None)
4235 mq = kwargs.pop('mq', None)
4235
4236
4236 if not mq:
4237 if not mq:
4237 return orig(ui, repo, *args, **kwargs)
4238 return orig(ui, repo, *args, **kwargs)
4238
4239
4239 q = repo.mq
4240 q = repo.mq
4240 r = q.qrepo()
4241 r = q.qrepo()
4241 if not r:
4242 if not r:
4242 raise error.Abort(_(b'no queue repository'))
4243 raise error.Abort(_(b'no queue repository'))
4243 return orig(r.ui, r, *args, **kwargs)
4244 return orig(r.ui, r, *args, **kwargs)
4244
4245
4245
4246
4246 def summaryhook(ui, repo):
4247 def summaryhook(ui, repo):
4247 q = repo.mq
4248 q = repo.mq
4248 m = []
4249 m = []
4249 a, u = len(q.applied), len(q.unapplied(repo))
4250 a, u = len(q.applied), len(q.unapplied(repo))
4250 if a:
4251 if a:
4251 m.append(ui.label(_(b"%d applied"), b'qseries.applied') % a)
4252 m.append(ui.label(_(b"%d applied"), b'qseries.applied') % a)
4252 if u:
4253 if u:
4253 m.append(ui.label(_(b"%d unapplied"), b'qseries.unapplied') % u)
4254 m.append(ui.label(_(b"%d unapplied"), b'qseries.unapplied') % u)
4254 if m:
4255 if m:
4255 # i18n: column positioning for "hg summary"
4256 # i18n: column positioning for "hg summary"
4256 ui.write(_(b"mq: %s\n") % b', '.join(m))
4257 ui.write(_(b"mq: %s\n") % b', '.join(m))
4257 else:
4258 else:
4258 # i18n: column positioning for "hg summary"
4259 # i18n: column positioning for "hg summary"
4259 ui.note(_(b"mq: (empty queue)\n"))
4260 ui.note(_(b"mq: (empty queue)\n"))
4260
4261
4261
4262
4262 revsetpredicate = registrar.revsetpredicate()
4263 revsetpredicate = registrar.revsetpredicate()
4263
4264
4264
4265
4265 @revsetpredicate(b'mq()')
4266 @revsetpredicate(b'mq()')
4266 def revsetmq(repo, subset, x):
4267 def revsetmq(repo, subset, x):
4267 """Changesets managed by MQ."""
4268 """Changesets managed by MQ."""
4268 revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
4269 revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
4269 applied = {repo[r.node].rev() for r in repo.mq.applied}
4270 applied = {repo[r.node].rev() for r in repo.mq.applied}
4270 return smartset.baseset([r for r in subset if r in applied])
4271 return smartset.baseset([r for r in subset if r in applied])
4271
4272
4272
4273
4273 # tell hggettext to extract docstrings from these functions:
4274 # tell hggettext to extract docstrings from these functions:
4274 i18nfunctions = [revsetmq]
4275 i18nfunctions = [revsetmq]
4275
4276
4276
4277
4277 def extsetup(ui):
4278 def extsetup(ui):
4278 # Ensure mq wrappers are called first, regardless of extension load order by
4279 # Ensure mq wrappers are called first, regardless of extension load order by
4279 # NOT wrapping in uisetup() and instead deferring to init stage two here.
4280 # NOT wrapping in uisetup() and instead deferring to init stage two here.
4280 mqopt = [(b'', b'mq', None, _(b"operate on patch repository"))]
4281 mqopt = [(b'', b'mq', None, _(b"operate on patch repository"))]
4281
4282
4282 extensions.wrapcommand(commands.table, b'import', mqimport)
4283 extensions.wrapcommand(commands.table, b'import', mqimport)
4283 cmdutil.summaryhooks.add(b'mq', summaryhook)
4284 cmdutil.summaryhooks.add(b'mq', summaryhook)
4284
4285
4285 entry = extensions.wrapcommand(commands.table, b'init', mqinit)
4286 entry = extensions.wrapcommand(commands.table, b'init', mqinit)
4286 entry[1].extend(mqopt)
4287 entry[1].extend(mqopt)
4287
4288
4288 def dotable(cmdtable):
4289 def dotable(cmdtable):
4289 for cmd, entry in pycompat.iteritems(cmdtable):
4290 for cmd, entry in pycompat.iteritems(cmdtable):
4290 cmd = cmdutil.parsealiases(cmd)[0]
4291 cmd = cmdutil.parsealiases(cmd)[0]
4291 func = entry[0]
4292 func = entry[0]
4292 if func.norepo:
4293 if func.norepo:
4293 continue
4294 continue
4294 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
4295 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
4295 entry[1].extend(mqopt)
4296 entry[1].extend(mqopt)
4296
4297
4297 dotable(commands.table)
4298 dotable(commands.table)
4298
4299
4299 thismodule = sys.modules["hgext.mq"]
4300 thismodule = sys.modules["hgext.mq"]
4300 for extname, extmodule in extensions.extensions():
4301 for extname, extmodule in extensions.extensions():
4301 if extmodule != thismodule:
4302 if extmodule != thismodule:
4302 dotable(getattr(extmodule, 'cmdtable', {}))
4303 dotable(getattr(extmodule, 'cmdtable', {}))
4303
4304
4304
4305
4305 colortable = {
4306 colortable = {
4306 b'qguard.negative': b'red',
4307 b'qguard.negative': b'red',
4307 b'qguard.positive': b'yellow',
4308 b'qguard.positive': b'yellow',
4308 b'qguard.unguarded': b'green',
4309 b'qguard.unguarded': b'green',
4309 b'qseries.applied': b'blue bold underline',
4310 b'qseries.applied': b'blue bold underline',
4310 b'qseries.guarded': b'black bold',
4311 b'qseries.guarded': b'black bold',
4311 b'qseries.missing': b'red bold',
4312 b'qseries.missing': b'red bold',
4312 b'qseries.unapplied': b'black bold',
4313 b'qseries.unapplied': b'black bold',
4313 }
4314 }
@@ -1,677 +1,680 b''
1 # narrowcommands.py - command modifications for narrowhg extension
1 # narrowcommands.py - command modifications for narrowhg extension
2 #
2 #
3 # Copyright 2017 Google, Inc.
3 # Copyright 2017 Google, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import itertools
9 import itertools
10 import os
10 import os
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.node import (
13 from mercurial.node import (
14 hex,
14 hex,
15 nullid,
15 nullid,
16 short,
16 short,
17 )
17 )
18 from mercurial import (
18 from mercurial import (
19 bundle2,
19 bundle2,
20 cmdutil,
20 cmdutil,
21 commands,
21 commands,
22 discovery,
22 discovery,
23 encoding,
23 encoding,
24 error,
24 error,
25 exchange,
25 exchange,
26 extensions,
26 extensions,
27 hg,
27 hg,
28 narrowspec,
28 narrowspec,
29 pathutil,
29 pathutil,
30 pycompat,
30 pycompat,
31 registrar,
31 registrar,
32 repair,
32 repair,
33 repoview,
33 repoview,
34 requirements,
34 requirements,
35 sparse,
35 sparse,
36 util,
36 util,
37 wireprototypes,
37 wireprototypes,
38 )
38 )
39 from mercurial.utils import (
40 urlutil,
41 )
39
42
40 table = {}
43 table = {}
41 command = registrar.command(table)
44 command = registrar.command(table)
42
45
43
46
44 def setup():
47 def setup():
45 """Wraps user-facing mercurial commands with narrow-aware versions."""
48 """Wraps user-facing mercurial commands with narrow-aware versions."""
46
49
47 entry = extensions.wrapcommand(commands.table, b'clone', clonenarrowcmd)
50 entry = extensions.wrapcommand(commands.table, b'clone', clonenarrowcmd)
48 entry[1].append(
51 entry[1].append(
49 (b'', b'narrow', None, _(b"create a narrow clone of select files"))
52 (b'', b'narrow', None, _(b"create a narrow clone of select files"))
50 )
53 )
51 entry[1].append(
54 entry[1].append(
52 (
55 (
53 b'',
56 b'',
54 b'depth',
57 b'depth',
55 b'',
58 b'',
56 _(b"limit the history fetched by distance from heads"),
59 _(b"limit the history fetched by distance from heads"),
57 )
60 )
58 )
61 )
59 entry[1].append((b'', b'narrowspec', b'', _(b"read narrowspecs from file")))
62 entry[1].append((b'', b'narrowspec', b'', _(b"read narrowspecs from file")))
60 # TODO(durin42): unify sparse/narrow --include/--exclude logic a bit
63 # TODO(durin42): unify sparse/narrow --include/--exclude logic a bit
61 if b'sparse' not in extensions.enabled():
64 if b'sparse' not in extensions.enabled():
62 entry[1].append(
65 entry[1].append(
63 (b'', b'include', [], _(b"specifically fetch this file/directory"))
66 (b'', b'include', [], _(b"specifically fetch this file/directory"))
64 )
67 )
65 entry[1].append(
68 entry[1].append(
66 (
69 (
67 b'',
70 b'',
68 b'exclude',
71 b'exclude',
69 [],
72 [],
70 _(b"do not fetch this file/directory, even if included"),
73 _(b"do not fetch this file/directory, even if included"),
71 )
74 )
72 )
75 )
73
76
74 entry = extensions.wrapcommand(commands.table, b'pull', pullnarrowcmd)
77 entry = extensions.wrapcommand(commands.table, b'pull', pullnarrowcmd)
75 entry[1].append(
78 entry[1].append(
76 (
79 (
77 b'',
80 b'',
78 b'depth',
81 b'depth',
79 b'',
82 b'',
80 _(b"limit the history fetched by distance from heads"),
83 _(b"limit the history fetched by distance from heads"),
81 )
84 )
82 )
85 )
83
86
84 extensions.wrapcommand(commands.table, b'archive', archivenarrowcmd)
87 extensions.wrapcommand(commands.table, b'archive', archivenarrowcmd)
85
88
86
89
87 def clonenarrowcmd(orig, ui, repo, *args, **opts):
90 def clonenarrowcmd(orig, ui, repo, *args, **opts):
88 """Wraps clone command, so 'hg clone' first wraps localrepo.clone()."""
91 """Wraps clone command, so 'hg clone' first wraps localrepo.clone()."""
89 opts = pycompat.byteskwargs(opts)
92 opts = pycompat.byteskwargs(opts)
90 wrappedextraprepare = util.nullcontextmanager()
93 wrappedextraprepare = util.nullcontextmanager()
91 narrowspecfile = opts[b'narrowspec']
94 narrowspecfile = opts[b'narrowspec']
92
95
93 if narrowspecfile:
96 if narrowspecfile:
94 filepath = os.path.join(encoding.getcwd(), narrowspecfile)
97 filepath = os.path.join(encoding.getcwd(), narrowspecfile)
95 ui.status(_(b"reading narrowspec from '%s'\n") % filepath)
98 ui.status(_(b"reading narrowspec from '%s'\n") % filepath)
96 try:
99 try:
97 fdata = util.readfile(filepath)
100 fdata = util.readfile(filepath)
98 except IOError as inst:
101 except IOError as inst:
99 raise error.Abort(
102 raise error.Abort(
100 _(b"cannot read narrowspecs from '%s': %s")
103 _(b"cannot read narrowspecs from '%s': %s")
101 % (filepath, encoding.strtolocal(inst.strerror))
104 % (filepath, encoding.strtolocal(inst.strerror))
102 )
105 )
103
106
104 includes, excludes, profiles = sparse.parseconfig(ui, fdata, b'narrow')
107 includes, excludes, profiles = sparse.parseconfig(ui, fdata, b'narrow')
105 if profiles:
108 if profiles:
106 raise error.ConfigError(
109 raise error.ConfigError(
107 _(
110 _(
108 b"cannot specify other files using '%include' in"
111 b"cannot specify other files using '%include' in"
109 b" narrowspec"
112 b" narrowspec"
110 )
113 )
111 )
114 )
112
115
113 narrowspec.validatepatterns(includes)
116 narrowspec.validatepatterns(includes)
114 narrowspec.validatepatterns(excludes)
117 narrowspec.validatepatterns(excludes)
115
118
116 # narrowspec is passed so we should assume that user wants narrow clone
119 # narrowspec is passed so we should assume that user wants narrow clone
117 opts[b'narrow'] = True
120 opts[b'narrow'] = True
118 opts[b'include'].extend(includes)
121 opts[b'include'].extend(includes)
119 opts[b'exclude'].extend(excludes)
122 opts[b'exclude'].extend(excludes)
120
123
121 if opts[b'narrow']:
124 if opts[b'narrow']:
122
125
123 def pullbundle2extraprepare_widen(orig, pullop, kwargs):
126 def pullbundle2extraprepare_widen(orig, pullop, kwargs):
124 orig(pullop, kwargs)
127 orig(pullop, kwargs)
125
128
126 if opts.get(b'depth'):
129 if opts.get(b'depth'):
127 kwargs[b'depth'] = opts[b'depth']
130 kwargs[b'depth'] = opts[b'depth']
128
131
129 wrappedextraprepare = extensions.wrappedfunction(
132 wrappedextraprepare = extensions.wrappedfunction(
130 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
133 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
131 )
134 )
132
135
133 with wrappedextraprepare:
136 with wrappedextraprepare:
134 return orig(ui, repo, *args, **pycompat.strkwargs(opts))
137 return orig(ui, repo, *args, **pycompat.strkwargs(opts))
135
138
136
139
137 def pullnarrowcmd(orig, ui, repo, *args, **opts):
140 def pullnarrowcmd(orig, ui, repo, *args, **opts):
138 """Wraps pull command to allow modifying narrow spec."""
141 """Wraps pull command to allow modifying narrow spec."""
139 wrappedextraprepare = util.nullcontextmanager()
142 wrappedextraprepare = util.nullcontextmanager()
140 if requirements.NARROW_REQUIREMENT in repo.requirements:
143 if requirements.NARROW_REQUIREMENT in repo.requirements:
141
144
142 def pullbundle2extraprepare_widen(orig, pullop, kwargs):
145 def pullbundle2extraprepare_widen(orig, pullop, kwargs):
143 orig(pullop, kwargs)
146 orig(pullop, kwargs)
144 if opts.get('depth'):
147 if opts.get('depth'):
145 kwargs[b'depth'] = opts['depth']
148 kwargs[b'depth'] = opts['depth']
146
149
147 wrappedextraprepare = extensions.wrappedfunction(
150 wrappedextraprepare = extensions.wrappedfunction(
148 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
151 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
149 )
152 )
150
153
151 with wrappedextraprepare:
154 with wrappedextraprepare:
152 return orig(ui, repo, *args, **opts)
155 return orig(ui, repo, *args, **opts)
153
156
154
157
155 def archivenarrowcmd(orig, ui, repo, *args, **opts):
158 def archivenarrowcmd(orig, ui, repo, *args, **opts):
156 """Wraps archive command to narrow the default includes."""
159 """Wraps archive command to narrow the default includes."""
157 if requirements.NARROW_REQUIREMENT in repo.requirements:
160 if requirements.NARROW_REQUIREMENT in repo.requirements:
158 repo_includes, repo_excludes = repo.narrowpats
161 repo_includes, repo_excludes = repo.narrowpats
159 includes = set(opts.get('include', []))
162 includes = set(opts.get('include', []))
160 excludes = set(opts.get('exclude', []))
163 excludes = set(opts.get('exclude', []))
161 includes, excludes, unused_invalid = narrowspec.restrictpatterns(
164 includes, excludes, unused_invalid = narrowspec.restrictpatterns(
162 includes, excludes, repo_includes, repo_excludes
165 includes, excludes, repo_includes, repo_excludes
163 )
166 )
164 if includes:
167 if includes:
165 opts['include'] = includes
168 opts['include'] = includes
166 if excludes:
169 if excludes:
167 opts['exclude'] = excludes
170 opts['exclude'] = excludes
168 return orig(ui, repo, *args, **opts)
171 return orig(ui, repo, *args, **opts)
169
172
170
173
171 def pullbundle2extraprepare(orig, pullop, kwargs):
174 def pullbundle2extraprepare(orig, pullop, kwargs):
172 repo = pullop.repo
175 repo = pullop.repo
173 if requirements.NARROW_REQUIREMENT not in repo.requirements:
176 if requirements.NARROW_REQUIREMENT not in repo.requirements:
174 return orig(pullop, kwargs)
177 return orig(pullop, kwargs)
175
178
176 if wireprototypes.NARROWCAP not in pullop.remote.capabilities():
179 if wireprototypes.NARROWCAP not in pullop.remote.capabilities():
177 raise error.Abort(_(b"server does not support narrow clones"))
180 raise error.Abort(_(b"server does not support narrow clones"))
178 orig(pullop, kwargs)
181 orig(pullop, kwargs)
179 kwargs[b'narrow'] = True
182 kwargs[b'narrow'] = True
180 include, exclude = repo.narrowpats
183 include, exclude = repo.narrowpats
181 kwargs[b'oldincludepats'] = include
184 kwargs[b'oldincludepats'] = include
182 kwargs[b'oldexcludepats'] = exclude
185 kwargs[b'oldexcludepats'] = exclude
183 if include:
186 if include:
184 kwargs[b'includepats'] = include
187 kwargs[b'includepats'] = include
185 if exclude:
188 if exclude:
186 kwargs[b'excludepats'] = exclude
189 kwargs[b'excludepats'] = exclude
187 # calculate known nodes only in ellipses cases because in non-ellipses cases
190 # calculate known nodes only in ellipses cases because in non-ellipses cases
188 # we have all the nodes
191 # we have all the nodes
189 if wireprototypes.ELLIPSESCAP1 in pullop.remote.capabilities():
192 if wireprototypes.ELLIPSESCAP1 in pullop.remote.capabilities():
190 kwargs[b'known'] = [
193 kwargs[b'known'] = [
191 hex(ctx.node())
194 hex(ctx.node())
192 for ctx in repo.set(b'::%ln', pullop.common)
195 for ctx in repo.set(b'::%ln', pullop.common)
193 if ctx.node() != nullid
196 if ctx.node() != nullid
194 ]
197 ]
195 if not kwargs[b'known']:
198 if not kwargs[b'known']:
196 # Mercurial serializes an empty list as '' and deserializes it as
199 # Mercurial serializes an empty list as '' and deserializes it as
197 # [''], so delete it instead to avoid handling the empty string on
200 # [''], so delete it instead to avoid handling the empty string on
198 # the server.
201 # the server.
199 del kwargs[b'known']
202 del kwargs[b'known']
200
203
201
204
202 extensions.wrapfunction(
205 extensions.wrapfunction(
203 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare
206 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare
204 )
207 )
205
208
206
209
207 def _narrow(
210 def _narrow(
208 ui,
211 ui,
209 repo,
212 repo,
210 remote,
213 remote,
211 commoninc,
214 commoninc,
212 oldincludes,
215 oldincludes,
213 oldexcludes,
216 oldexcludes,
214 newincludes,
217 newincludes,
215 newexcludes,
218 newexcludes,
216 force,
219 force,
217 backup,
220 backup,
218 ):
221 ):
219 oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes)
222 oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes)
220 newmatch = narrowspec.match(repo.root, newincludes, newexcludes)
223 newmatch = narrowspec.match(repo.root, newincludes, newexcludes)
221
224
222 # This is essentially doing "hg outgoing" to find all local-only
225 # This is essentially doing "hg outgoing" to find all local-only
223 # commits. We will then check that the local-only commits don't
226 # commits. We will then check that the local-only commits don't
224 # have any changes to files that will be untracked.
227 # have any changes to files that will be untracked.
225 unfi = repo.unfiltered()
228 unfi = repo.unfiltered()
226 outgoing = discovery.findcommonoutgoing(unfi, remote, commoninc=commoninc)
229 outgoing = discovery.findcommonoutgoing(unfi, remote, commoninc=commoninc)
227 ui.status(_(b'looking for local changes to affected paths\n'))
230 ui.status(_(b'looking for local changes to affected paths\n'))
228 localnodes = []
231 localnodes = []
229 for n in itertools.chain(outgoing.missing, outgoing.excluded):
232 for n in itertools.chain(outgoing.missing, outgoing.excluded):
230 if any(oldmatch(f) and not newmatch(f) for f in unfi[n].files()):
233 if any(oldmatch(f) and not newmatch(f) for f in unfi[n].files()):
231 localnodes.append(n)
234 localnodes.append(n)
232 revstostrip = unfi.revs(b'descendants(%ln)', localnodes)
235 revstostrip = unfi.revs(b'descendants(%ln)', localnodes)
233 hiddenrevs = repoview.filterrevs(repo, b'visible')
236 hiddenrevs = repoview.filterrevs(repo, b'visible')
234 visibletostrip = list(
237 visibletostrip = list(
235 repo.changelog.node(r) for r in (revstostrip - hiddenrevs)
238 repo.changelog.node(r) for r in (revstostrip - hiddenrevs)
236 )
239 )
237 if visibletostrip:
240 if visibletostrip:
238 ui.status(
241 ui.status(
239 _(
242 _(
240 b'The following changeset(s) or their ancestors have '
243 b'The following changeset(s) or their ancestors have '
241 b'local changes not on the remote:\n'
244 b'local changes not on the remote:\n'
242 )
245 )
243 )
246 )
244 maxnodes = 10
247 maxnodes = 10
245 if ui.verbose or len(visibletostrip) <= maxnodes:
248 if ui.verbose or len(visibletostrip) <= maxnodes:
246 for n in visibletostrip:
249 for n in visibletostrip:
247 ui.status(b'%s\n' % short(n))
250 ui.status(b'%s\n' % short(n))
248 else:
251 else:
249 for n in visibletostrip[:maxnodes]:
252 for n in visibletostrip[:maxnodes]:
250 ui.status(b'%s\n' % short(n))
253 ui.status(b'%s\n' % short(n))
251 ui.status(
254 ui.status(
252 _(b'...and %d more, use --verbose to list all\n')
255 _(b'...and %d more, use --verbose to list all\n')
253 % (len(visibletostrip) - maxnodes)
256 % (len(visibletostrip) - maxnodes)
254 )
257 )
255 if not force:
258 if not force:
256 raise error.StateError(
259 raise error.StateError(
257 _(b'local changes found'),
260 _(b'local changes found'),
258 hint=_(b'use --force-delete-local-changes to ignore'),
261 hint=_(b'use --force-delete-local-changes to ignore'),
259 )
262 )
260
263
261 with ui.uninterruptible():
264 with ui.uninterruptible():
262 if revstostrip:
265 if revstostrip:
263 tostrip = [unfi.changelog.node(r) for r in revstostrip]
266 tostrip = [unfi.changelog.node(r) for r in revstostrip]
264 if repo[b'.'].node() in tostrip:
267 if repo[b'.'].node() in tostrip:
265 # stripping working copy, so move to a different commit first
268 # stripping working copy, so move to a different commit first
266 urev = max(
269 urev = max(
267 repo.revs(
270 repo.revs(
268 b'(::%n) - %ln + null',
271 b'(::%n) - %ln + null',
269 repo[b'.'].node(),
272 repo[b'.'].node(),
270 visibletostrip,
273 visibletostrip,
271 )
274 )
272 )
275 )
273 hg.clean(repo, urev)
276 hg.clean(repo, urev)
274 overrides = {(b'devel', b'strip-obsmarkers'): False}
277 overrides = {(b'devel', b'strip-obsmarkers'): False}
275 with ui.configoverride(overrides, b'narrow'):
278 with ui.configoverride(overrides, b'narrow'):
276 repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup)
279 repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup)
277
280
278 todelete = []
281 todelete = []
279 for t, f, f2, size in repo.store.datafiles():
282 for t, f, f2, size in repo.store.datafiles():
280 if f.startswith(b'data/'):
283 if f.startswith(b'data/'):
281 file = f[5:-2]
284 file = f[5:-2]
282 if not newmatch(file):
285 if not newmatch(file):
283 todelete.append(f)
286 todelete.append(f)
284 elif f.startswith(b'meta/'):
287 elif f.startswith(b'meta/'):
285 dir = f[5:-13]
288 dir = f[5:-13]
286 dirs = sorted(pathutil.dirs({dir})) + [dir]
289 dirs = sorted(pathutil.dirs({dir})) + [dir]
287 include = True
290 include = True
288 for d in dirs:
291 for d in dirs:
289 visit = newmatch.visitdir(d)
292 visit = newmatch.visitdir(d)
290 if not visit:
293 if not visit:
291 include = False
294 include = False
292 break
295 break
293 if visit == b'all':
296 if visit == b'all':
294 break
297 break
295 if not include:
298 if not include:
296 todelete.append(f)
299 todelete.append(f)
297
300
298 repo.destroying()
301 repo.destroying()
299
302
300 with repo.transaction(b'narrowing'):
303 with repo.transaction(b'narrowing'):
301 # Update narrowspec before removing revlogs, so repo won't be
304 # Update narrowspec before removing revlogs, so repo won't be
302 # corrupt in case of crash
305 # corrupt in case of crash
303 repo.setnarrowpats(newincludes, newexcludes)
306 repo.setnarrowpats(newincludes, newexcludes)
304
307
305 for f in todelete:
308 for f in todelete:
306 ui.status(_(b'deleting %s\n') % f)
309 ui.status(_(b'deleting %s\n') % f)
307 util.unlinkpath(repo.svfs.join(f))
310 util.unlinkpath(repo.svfs.join(f))
308 repo.store.markremoved(f)
311 repo.store.markremoved(f)
309
312
310 narrowspec.updateworkingcopy(repo, assumeclean=True)
313 narrowspec.updateworkingcopy(repo, assumeclean=True)
311 narrowspec.copytoworkingcopy(repo)
314 narrowspec.copytoworkingcopy(repo)
312
315
313 repo.destroyed()
316 repo.destroyed()
314
317
315
318
316 def _widen(
319 def _widen(
317 ui,
320 ui,
318 repo,
321 repo,
319 remote,
322 remote,
320 commoninc,
323 commoninc,
321 oldincludes,
324 oldincludes,
322 oldexcludes,
325 oldexcludes,
323 newincludes,
326 newincludes,
324 newexcludes,
327 newexcludes,
325 ):
328 ):
326 # for now we assume that if a server has ellipses enabled, we will be
329 # for now we assume that if a server has ellipses enabled, we will be
327 # exchanging ellipses nodes. In future we should add ellipses as a client
330 # exchanging ellipses nodes. In future we should add ellipses as a client
328 # side requirement (maybe) to distinguish a client is shallow or not and
331 # side requirement (maybe) to distinguish a client is shallow or not and
329 # then send that information to server whether we want ellipses or not.
332 # then send that information to server whether we want ellipses or not.
330 # Theoretically a non-ellipses repo should be able to use narrow
333 # Theoretically a non-ellipses repo should be able to use narrow
331 # functionality from an ellipses enabled server
334 # functionality from an ellipses enabled server
332 remotecap = remote.capabilities()
335 remotecap = remote.capabilities()
333 ellipsesremote = any(
336 ellipsesremote = any(
334 cap in remotecap for cap in wireprototypes.SUPPORTED_ELLIPSESCAP
337 cap in remotecap for cap in wireprototypes.SUPPORTED_ELLIPSESCAP
335 )
338 )
336
339
337 # check whether we are talking to a server which supports old version of
340 # check whether we are talking to a server which supports old version of
338 # ellipses capabilities
341 # ellipses capabilities
339 isoldellipses = (
342 isoldellipses = (
340 ellipsesremote
343 ellipsesremote
341 and wireprototypes.ELLIPSESCAP1 in remotecap
344 and wireprototypes.ELLIPSESCAP1 in remotecap
342 and wireprototypes.ELLIPSESCAP not in remotecap
345 and wireprototypes.ELLIPSESCAP not in remotecap
343 )
346 )
344
347
345 def pullbundle2extraprepare_widen(orig, pullop, kwargs):
348 def pullbundle2extraprepare_widen(orig, pullop, kwargs):
346 orig(pullop, kwargs)
349 orig(pullop, kwargs)
347 # The old{in,ex}cludepats have already been set by orig()
350 # The old{in,ex}cludepats have already been set by orig()
348 kwargs[b'includepats'] = newincludes
351 kwargs[b'includepats'] = newincludes
349 kwargs[b'excludepats'] = newexcludes
352 kwargs[b'excludepats'] = newexcludes
350
353
351 wrappedextraprepare = extensions.wrappedfunction(
354 wrappedextraprepare = extensions.wrappedfunction(
352 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
355 exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
353 )
356 )
354
357
355 # define a function that narrowbundle2 can call after creating the
358 # define a function that narrowbundle2 can call after creating the
356 # backup bundle, but before applying the bundle from the server
359 # backup bundle, but before applying the bundle from the server
357 def setnewnarrowpats():
360 def setnewnarrowpats():
358 repo.setnarrowpats(newincludes, newexcludes)
361 repo.setnarrowpats(newincludes, newexcludes)
359
362
360 repo.setnewnarrowpats = setnewnarrowpats
363 repo.setnewnarrowpats = setnewnarrowpats
361 # silence the devel-warning of applying an empty changegroup
364 # silence the devel-warning of applying an empty changegroup
362 overrides = {(b'devel', b'all-warnings'): False}
365 overrides = {(b'devel', b'all-warnings'): False}
363
366
364 common = commoninc[0]
367 common = commoninc[0]
365 with ui.uninterruptible():
368 with ui.uninterruptible():
366 if ellipsesremote:
369 if ellipsesremote:
367 ds = repo.dirstate
370 ds = repo.dirstate
368 p1, p2 = ds.p1(), ds.p2()
371 p1, p2 = ds.p1(), ds.p2()
369 with ds.parentchange():
372 with ds.parentchange():
370 ds.setparents(nullid, nullid)
373 ds.setparents(nullid, nullid)
371 if isoldellipses:
374 if isoldellipses:
372 with wrappedextraprepare:
375 with wrappedextraprepare:
373 exchange.pull(repo, remote, heads=common)
376 exchange.pull(repo, remote, heads=common)
374 else:
377 else:
375 known = []
378 known = []
376 if ellipsesremote:
379 if ellipsesremote:
377 known = [
380 known = [
378 ctx.node()
381 ctx.node()
379 for ctx in repo.set(b'::%ln', common)
382 for ctx in repo.set(b'::%ln', common)
380 if ctx.node() != nullid
383 if ctx.node() != nullid
381 ]
384 ]
382 with remote.commandexecutor() as e:
385 with remote.commandexecutor() as e:
383 bundle = e.callcommand(
386 bundle = e.callcommand(
384 b'narrow_widen',
387 b'narrow_widen',
385 {
388 {
386 b'oldincludes': oldincludes,
389 b'oldincludes': oldincludes,
387 b'oldexcludes': oldexcludes,
390 b'oldexcludes': oldexcludes,
388 b'newincludes': newincludes,
391 b'newincludes': newincludes,
389 b'newexcludes': newexcludes,
392 b'newexcludes': newexcludes,
390 b'cgversion': b'03',
393 b'cgversion': b'03',
391 b'commonheads': common,
394 b'commonheads': common,
392 b'known': known,
395 b'known': known,
393 b'ellipses': ellipsesremote,
396 b'ellipses': ellipsesremote,
394 },
397 },
395 ).result()
398 ).result()
396
399
397 trmanager = exchange.transactionmanager(
400 trmanager = exchange.transactionmanager(
398 repo, b'widen', remote.url()
401 repo, b'widen', remote.url()
399 )
402 )
400 with trmanager, repo.ui.configoverride(overrides, b'widen'):
403 with trmanager, repo.ui.configoverride(overrides, b'widen'):
401 op = bundle2.bundleoperation(
404 op = bundle2.bundleoperation(
402 repo, trmanager.transaction, source=b'widen'
405 repo, trmanager.transaction, source=b'widen'
403 )
406 )
404 # TODO: we should catch error.Abort here
407 # TODO: we should catch error.Abort here
405 bundle2.processbundle(repo, bundle, op=op)
408 bundle2.processbundle(repo, bundle, op=op)
406
409
407 if ellipsesremote:
410 if ellipsesremote:
408 with ds.parentchange():
411 with ds.parentchange():
409 ds.setparents(p1, p2)
412 ds.setparents(p1, p2)
410
413
411 with repo.transaction(b'widening'):
414 with repo.transaction(b'widening'):
412 repo.setnewnarrowpats()
415 repo.setnewnarrowpats()
413 narrowspec.updateworkingcopy(repo)
416 narrowspec.updateworkingcopy(repo)
414 narrowspec.copytoworkingcopy(repo)
417 narrowspec.copytoworkingcopy(repo)
415
418
416
419
417 # TODO(rdamazio): Make new matcher format and update description
420 # TODO(rdamazio): Make new matcher format and update description
418 @command(
421 @command(
419 b'tracked',
422 b'tracked',
420 [
423 [
421 (b'', b'addinclude', [], _(b'new paths to include')),
424 (b'', b'addinclude', [], _(b'new paths to include')),
422 (b'', b'removeinclude', [], _(b'old paths to no longer include')),
425 (b'', b'removeinclude', [], _(b'old paths to no longer include')),
423 (
426 (
424 b'',
427 b'',
425 b'auto-remove-includes',
428 b'auto-remove-includes',
426 False,
429 False,
427 _(b'automatically choose unused includes to remove'),
430 _(b'automatically choose unused includes to remove'),
428 ),
431 ),
429 (b'', b'addexclude', [], _(b'new paths to exclude')),
432 (b'', b'addexclude', [], _(b'new paths to exclude')),
430 (b'', b'import-rules', b'', _(b'import narrowspecs from a file')),
433 (b'', b'import-rules', b'', _(b'import narrowspecs from a file')),
431 (b'', b'removeexclude', [], _(b'old paths to no longer exclude')),
434 (b'', b'removeexclude', [], _(b'old paths to no longer exclude')),
432 (
435 (
433 b'',
436 b'',
434 b'clear',
437 b'clear',
435 False,
438 False,
436 _(b'whether to replace the existing narrowspec'),
439 _(b'whether to replace the existing narrowspec'),
437 ),
440 ),
438 (
441 (
439 b'',
442 b'',
440 b'force-delete-local-changes',
443 b'force-delete-local-changes',
441 False,
444 False,
442 _(b'forces deletion of local changes when narrowing'),
445 _(b'forces deletion of local changes when narrowing'),
443 ),
446 ),
444 (
447 (
445 b'',
448 b'',
446 b'backup',
449 b'backup',
447 True,
450 True,
448 _(b'back up local changes when narrowing'),
451 _(b'back up local changes when narrowing'),
449 ),
452 ),
450 (
453 (
451 b'',
454 b'',
452 b'update-working-copy',
455 b'update-working-copy',
453 False,
456 False,
454 _(b'update working copy when the store has changed'),
457 _(b'update working copy when the store has changed'),
455 ),
458 ),
456 ]
459 ]
457 + commands.remoteopts,
460 + commands.remoteopts,
458 _(b'[OPTIONS]... [REMOTE]'),
461 _(b'[OPTIONS]... [REMOTE]'),
459 inferrepo=True,
462 inferrepo=True,
460 helpcategory=command.CATEGORY_MAINTENANCE,
463 helpcategory=command.CATEGORY_MAINTENANCE,
461 )
464 )
462 def trackedcmd(ui, repo, remotepath=None, *pats, **opts):
465 def trackedcmd(ui, repo, remotepath=None, *pats, **opts):
463 """show or change the current narrowspec
466 """show or change the current narrowspec
464
467
465 With no argument, shows the current narrowspec entries, one per line. Each
468 With no argument, shows the current narrowspec entries, one per line. Each
466 line will be prefixed with 'I' or 'X' for included or excluded patterns,
469 line will be prefixed with 'I' or 'X' for included or excluded patterns,
467 respectively.
470 respectively.
468
471
469 The narrowspec is comprised of expressions to match remote files and/or
472 The narrowspec is comprised of expressions to match remote files and/or
470 directories that should be pulled into your client.
473 directories that should be pulled into your client.
471 The narrowspec has *include* and *exclude* expressions, with excludes always
474 The narrowspec has *include* and *exclude* expressions, with excludes always
472 trumping includes: that is, if a file matches an exclude expression, it will
475 trumping includes: that is, if a file matches an exclude expression, it will
473 be excluded even if it also matches an include expression.
476 be excluded even if it also matches an include expression.
474 Excluding files that were never included has no effect.
477 Excluding files that were never included has no effect.
475
478
476 Each included or excluded entry is in the format described by
479 Each included or excluded entry is in the format described by
477 'hg help patterns'.
480 'hg help patterns'.
478
481
479 The options allow you to add or remove included and excluded expressions.
482 The options allow you to add or remove included and excluded expressions.
480
483
481 If --clear is specified, then all previous includes and excludes are DROPPED
484 If --clear is specified, then all previous includes and excludes are DROPPED
482 and replaced by the new ones specified to --addinclude and --addexclude.
485 and replaced by the new ones specified to --addinclude and --addexclude.
483 If --clear is specified without any further options, the narrowspec will be
486 If --clear is specified without any further options, the narrowspec will be
484 empty and will not match any files.
487 empty and will not match any files.
485
488
486 If --auto-remove-includes is specified, then those includes that don't match
489 If --auto-remove-includes is specified, then those includes that don't match
487 any files modified by currently visible local commits (those not shared by
490 any files modified by currently visible local commits (those not shared by
488 the remote) will be added to the set of explicitly specified includes to
491 the remote) will be added to the set of explicitly specified includes to
489 remove.
492 remove.
490
493
491 --import-rules accepts a path to a file containing rules, allowing you to
494 --import-rules accepts a path to a file containing rules, allowing you to
492 add --addinclude, --addexclude rules in bulk. Like the other include and
495 add --addinclude, --addexclude rules in bulk. Like the other include and
493 exclude switches, the changes are applied immediately.
496 exclude switches, the changes are applied immediately.
494 """
497 """
495 opts = pycompat.byteskwargs(opts)
498 opts = pycompat.byteskwargs(opts)
496 if requirements.NARROW_REQUIREMENT not in repo.requirements:
499 if requirements.NARROW_REQUIREMENT not in repo.requirements:
497 raise error.InputError(
500 raise error.InputError(
498 _(
501 _(
499 b'the tracked command is only supported on '
502 b'the tracked command is only supported on '
500 b'repositories cloned with --narrow'
503 b'repositories cloned with --narrow'
501 )
504 )
502 )
505 )
503
506
504 # Before supporting, decide whether it "hg tracked --clear" should mean
507 # Before supporting, decide whether it "hg tracked --clear" should mean
505 # tracking no paths or all paths.
508 # tracking no paths or all paths.
506 if opts[b'clear']:
509 if opts[b'clear']:
507 raise error.InputError(_(b'the --clear option is not yet supported'))
510 raise error.InputError(_(b'the --clear option is not yet supported'))
508
511
509 # import rules from a file
512 # import rules from a file
510 newrules = opts.get(b'import_rules')
513 newrules = opts.get(b'import_rules')
511 if newrules:
514 if newrules:
512 try:
515 try:
513 filepath = os.path.join(encoding.getcwd(), newrules)
516 filepath = os.path.join(encoding.getcwd(), newrules)
514 fdata = util.readfile(filepath)
517 fdata = util.readfile(filepath)
515 except IOError as inst:
518 except IOError as inst:
516 raise error.StorageError(
519 raise error.StorageError(
517 _(b"cannot read narrowspecs from '%s': %s")
520 _(b"cannot read narrowspecs from '%s': %s")
518 % (filepath, encoding.strtolocal(inst.strerror))
521 % (filepath, encoding.strtolocal(inst.strerror))
519 )
522 )
520 includepats, excludepats, profiles = sparse.parseconfig(
523 includepats, excludepats, profiles = sparse.parseconfig(
521 ui, fdata, b'narrow'
524 ui, fdata, b'narrow'
522 )
525 )
523 if profiles:
526 if profiles:
524 raise error.InputError(
527 raise error.InputError(
525 _(
528 _(
526 b"including other spec files using '%include' "
529 b"including other spec files using '%include' "
527 b"is not supported in narrowspec"
530 b"is not supported in narrowspec"
528 )
531 )
529 )
532 )
530 opts[b'addinclude'].extend(includepats)
533 opts[b'addinclude'].extend(includepats)
531 opts[b'addexclude'].extend(excludepats)
534 opts[b'addexclude'].extend(excludepats)
532
535
533 addedincludes = narrowspec.parsepatterns(opts[b'addinclude'])
536 addedincludes = narrowspec.parsepatterns(opts[b'addinclude'])
534 removedincludes = narrowspec.parsepatterns(opts[b'removeinclude'])
537 removedincludes = narrowspec.parsepatterns(opts[b'removeinclude'])
535 addedexcludes = narrowspec.parsepatterns(opts[b'addexclude'])
538 addedexcludes = narrowspec.parsepatterns(opts[b'addexclude'])
536 removedexcludes = narrowspec.parsepatterns(opts[b'removeexclude'])
539 removedexcludes = narrowspec.parsepatterns(opts[b'removeexclude'])
537 autoremoveincludes = opts[b'auto_remove_includes']
540 autoremoveincludes = opts[b'auto_remove_includes']
538
541
539 update_working_copy = opts[b'update_working_copy']
542 update_working_copy = opts[b'update_working_copy']
540 only_show = not (
543 only_show = not (
541 addedincludes
544 addedincludes
542 or removedincludes
545 or removedincludes
543 or addedexcludes
546 or addedexcludes
544 or removedexcludes
547 or removedexcludes
545 or newrules
548 or newrules
546 or autoremoveincludes
549 or autoremoveincludes
547 or update_working_copy
550 or update_working_copy
548 )
551 )
549
552
550 oldincludes, oldexcludes = repo.narrowpats
553 oldincludes, oldexcludes = repo.narrowpats
551
554
552 # filter the user passed additions and deletions into actual additions and
555 # filter the user passed additions and deletions into actual additions and
553 # deletions of excludes and includes
556 # deletions of excludes and includes
554 addedincludes -= oldincludes
557 addedincludes -= oldincludes
555 removedincludes &= oldincludes
558 removedincludes &= oldincludes
556 addedexcludes -= oldexcludes
559 addedexcludes -= oldexcludes
557 removedexcludes &= oldexcludes
560 removedexcludes &= oldexcludes
558
561
559 widening = addedincludes or removedexcludes
562 widening = addedincludes or removedexcludes
560 narrowing = removedincludes or addedexcludes
563 narrowing = removedincludes or addedexcludes
561
564
562 # Only print the current narrowspec.
565 # Only print the current narrowspec.
563 if only_show:
566 if only_show:
564 ui.pager(b'tracked')
567 ui.pager(b'tracked')
565 fm = ui.formatter(b'narrow', opts)
568 fm = ui.formatter(b'narrow', opts)
566 for i in sorted(oldincludes):
569 for i in sorted(oldincludes):
567 fm.startitem()
570 fm.startitem()
568 fm.write(b'status', b'%s ', b'I', label=b'narrow.included')
571 fm.write(b'status', b'%s ', b'I', label=b'narrow.included')
569 fm.write(b'pat', b'%s\n', i, label=b'narrow.included')
572 fm.write(b'pat', b'%s\n', i, label=b'narrow.included')
570 for i in sorted(oldexcludes):
573 for i in sorted(oldexcludes):
571 fm.startitem()
574 fm.startitem()
572 fm.write(b'status', b'%s ', b'X', label=b'narrow.excluded')
575 fm.write(b'status', b'%s ', b'X', label=b'narrow.excluded')
573 fm.write(b'pat', b'%s\n', i, label=b'narrow.excluded')
576 fm.write(b'pat', b'%s\n', i, label=b'narrow.excluded')
574 fm.end()
577 fm.end()
575 return 0
578 return 0
576
579
577 if update_working_copy:
580 if update_working_copy:
578 with repo.wlock(), repo.lock(), repo.transaction(b'narrow-wc'):
581 with repo.wlock(), repo.lock(), repo.transaction(b'narrow-wc'):
579 narrowspec.updateworkingcopy(repo)
582 narrowspec.updateworkingcopy(repo)
580 narrowspec.copytoworkingcopy(repo)
583 narrowspec.copytoworkingcopy(repo)
581 return 0
584 return 0
582
585
583 if not (widening or narrowing or autoremoveincludes):
586 if not (widening or narrowing or autoremoveincludes):
584 ui.status(_(b"nothing to widen or narrow\n"))
587 ui.status(_(b"nothing to widen or narrow\n"))
585 return 0
588 return 0
586
589
587 with repo.wlock(), repo.lock():
590 with repo.wlock(), repo.lock():
588 cmdutil.bailifchanged(repo)
591 cmdutil.bailifchanged(repo)
589
592
590 # Find the revisions we have in common with the remote. These will
593 # Find the revisions we have in common with the remote. These will
591 # be used for finding local-only changes for narrowing. They will
594 # be used for finding local-only changes for narrowing. They will
592 # also define the set of revisions to update for widening.
595 # also define the set of revisions to update for widening.
593 remotepath = ui.expandpath(remotepath or b'default')
596 remotepath = ui.expandpath(remotepath or b'default')
594 url, branches = hg.parseurl(remotepath)
597 url, branches = hg.parseurl(remotepath)
595 ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
598 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url))
596 remote = hg.peer(repo, opts, url)
599 remote = hg.peer(repo, opts, url)
597
600
598 try:
601 try:
599 # check narrow support before doing anything if widening needs to be
602 # check narrow support before doing anything if widening needs to be
600 # performed. In future we should also abort if client is ellipses and
603 # performed. In future we should also abort if client is ellipses and
601 # server does not support ellipses
604 # server does not support ellipses
602 if (
605 if (
603 widening
606 widening
604 and wireprototypes.NARROWCAP not in remote.capabilities()
607 and wireprototypes.NARROWCAP not in remote.capabilities()
605 ):
608 ):
606 raise error.Abort(_(b"server does not support narrow clones"))
609 raise error.Abort(_(b"server does not support narrow clones"))
607
610
608 commoninc = discovery.findcommonincoming(repo, remote)
611 commoninc = discovery.findcommonincoming(repo, remote)
609
612
610 if autoremoveincludes:
613 if autoremoveincludes:
611 outgoing = discovery.findcommonoutgoing(
614 outgoing = discovery.findcommonoutgoing(
612 repo, remote, commoninc=commoninc
615 repo, remote, commoninc=commoninc
613 )
616 )
614 ui.status(_(b'looking for unused includes to remove\n'))
617 ui.status(_(b'looking for unused includes to remove\n'))
615 localfiles = set()
618 localfiles = set()
616 for n in itertools.chain(outgoing.missing, outgoing.excluded):
619 for n in itertools.chain(outgoing.missing, outgoing.excluded):
617 localfiles.update(repo[n].files())
620 localfiles.update(repo[n].files())
618 suggestedremovals = []
621 suggestedremovals = []
619 for include in sorted(oldincludes):
622 for include in sorted(oldincludes):
620 match = narrowspec.match(repo.root, [include], oldexcludes)
623 match = narrowspec.match(repo.root, [include], oldexcludes)
621 if not any(match(f) for f in localfiles):
624 if not any(match(f) for f in localfiles):
622 suggestedremovals.append(include)
625 suggestedremovals.append(include)
623 if suggestedremovals:
626 if suggestedremovals:
624 for s in suggestedremovals:
627 for s in suggestedremovals:
625 ui.status(b'%s\n' % s)
628 ui.status(b'%s\n' % s)
626 if (
629 if (
627 ui.promptchoice(
630 ui.promptchoice(
628 _(
631 _(
629 b'remove these unused includes (yn)?'
632 b'remove these unused includes (yn)?'
630 b'$$ &Yes $$ &No'
633 b'$$ &Yes $$ &No'
631 )
634 )
632 )
635 )
633 == 0
636 == 0
634 ):
637 ):
635 removedincludes.update(suggestedremovals)
638 removedincludes.update(suggestedremovals)
636 narrowing = True
639 narrowing = True
637 else:
640 else:
638 ui.status(_(b'found no unused includes\n'))
641 ui.status(_(b'found no unused includes\n'))
639
642
640 if narrowing:
643 if narrowing:
641 newincludes = oldincludes - removedincludes
644 newincludes = oldincludes - removedincludes
642 newexcludes = oldexcludes | addedexcludes
645 newexcludes = oldexcludes | addedexcludes
643 _narrow(
646 _narrow(
644 ui,
647 ui,
645 repo,
648 repo,
646 remote,
649 remote,
647 commoninc,
650 commoninc,
648 oldincludes,
651 oldincludes,
649 oldexcludes,
652 oldexcludes,
650 newincludes,
653 newincludes,
651 newexcludes,
654 newexcludes,
652 opts[b'force_delete_local_changes'],
655 opts[b'force_delete_local_changes'],
653 opts[b'backup'],
656 opts[b'backup'],
654 )
657 )
655 # _narrow() updated the narrowspec and _widen() below needs to
658 # _narrow() updated the narrowspec and _widen() below needs to
656 # use the updated values as its base (otherwise removed includes
659 # use the updated values as its base (otherwise removed includes
657 # and addedexcludes will be lost in the resulting narrowspec)
660 # and addedexcludes will be lost in the resulting narrowspec)
658 oldincludes = newincludes
661 oldincludes = newincludes
659 oldexcludes = newexcludes
662 oldexcludes = newexcludes
660
663
661 if widening:
664 if widening:
662 newincludes = oldincludes | addedincludes
665 newincludes = oldincludes | addedincludes
663 newexcludes = oldexcludes - removedexcludes
666 newexcludes = oldexcludes - removedexcludes
664 _widen(
667 _widen(
665 ui,
668 ui,
666 repo,
669 repo,
667 remote,
670 remote,
668 commoninc,
671 commoninc,
669 oldincludes,
672 oldincludes,
670 oldexcludes,
673 oldexcludes,
671 newincludes,
674 newincludes,
672 newexcludes,
675 newexcludes,
673 )
676 )
674 finally:
677 finally:
675 remote.close()
678 remote.close()
676
679
677 return 0
680 return 0
@@ -1,998 +1,1001 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 you do not supply one via configuration or the command line. You can
48 you do not supply one via configuration or the command line. You can
49 override this to never prompt by configuring an empty value::
49 override this to never prompt by configuring an empty value::
50
50
51 [email]
51 [email]
52 cc =
52 cc =
53
53
54 You can control the default inclusion of an introduction message with the
54 You can control the default inclusion of an introduction message with the
55 ``patchbomb.intro`` configuration option. The configuration is always
55 ``patchbomb.intro`` configuration option. The configuration is always
56 overwritten by command line flags like --intro and --desc::
56 overwritten by command line flags like --intro and --desc::
57
57
58 [patchbomb]
58 [patchbomb]
59 intro=auto # include introduction message if more than 1 patch (default)
59 intro=auto # include introduction message if more than 1 patch (default)
60 intro=never # never include an introduction message
60 intro=never # never include an introduction message
61 intro=always # always include an introduction message
61 intro=always # always include an introduction message
62
62
63 You can specify a template for flags to be added in subject prefixes. Flags
63 You can specify a template for flags to be added in subject prefixes. Flags
64 specified by --flag option are exported as ``{flags}`` keyword::
64 specified by --flag option are exported as ``{flags}`` keyword::
65
65
66 [patchbomb]
66 [patchbomb]
67 flagtemplate = "{separate(' ',
67 flagtemplate = "{separate(' ',
68 ifeq(branch, 'default', '', branch|upper),
68 ifeq(branch, 'default', '', branch|upper),
69 flags)}"
69 flags)}"
70
70
71 You can set patchbomb to always ask for confirmation by setting
71 You can set patchbomb to always ask for confirmation by setting
72 ``patchbomb.confirm`` to true.
72 ``patchbomb.confirm`` to true.
73 '''
73 '''
74 from __future__ import absolute_import
74 from __future__ import absolute_import
75
75
76 import email.encoders as emailencoders
76 import email.encoders as emailencoders
77 import email.mime.base as emimebase
77 import email.mime.base as emimebase
78 import email.mime.multipart as emimemultipart
78 import email.mime.multipart as emimemultipart
79 import email.utils as eutil
79 import email.utils as eutil
80 import errno
80 import errno
81 import os
81 import os
82 import socket
82 import socket
83
83
84 from mercurial.i18n import _
84 from mercurial.i18n import _
85 from mercurial.pycompat import open
85 from mercurial.pycompat import open
86 from mercurial.node import bin
86 from mercurial.node import bin
87 from mercurial import (
87 from mercurial import (
88 cmdutil,
88 cmdutil,
89 commands,
89 commands,
90 encoding,
90 encoding,
91 error,
91 error,
92 formatter,
92 formatter,
93 hg,
93 hg,
94 mail,
94 mail,
95 patch,
95 patch,
96 pycompat,
96 pycompat,
97 registrar,
97 registrar,
98 scmutil,
98 scmutil,
99 templater,
99 templater,
100 util,
100 util,
101 )
101 )
102 from mercurial.utils import dateutil
102 from mercurial.utils import (
103 dateutil,
104 urlutil,
105 )
103
106
104 stringio = util.stringio
107 stringio = util.stringio
105
108
106 cmdtable = {}
109 cmdtable = {}
107 command = registrar.command(cmdtable)
110 command = registrar.command(cmdtable)
108
111
109 configtable = {}
112 configtable = {}
110 configitem = registrar.configitem(configtable)
113 configitem = registrar.configitem(configtable)
111
114
112 configitem(
115 configitem(
113 b'patchbomb',
116 b'patchbomb',
114 b'bundletype',
117 b'bundletype',
115 default=None,
118 default=None,
116 )
119 )
117 configitem(
120 configitem(
118 b'patchbomb',
121 b'patchbomb',
119 b'bcc',
122 b'bcc',
120 default=None,
123 default=None,
121 )
124 )
122 configitem(
125 configitem(
123 b'patchbomb',
126 b'patchbomb',
124 b'cc',
127 b'cc',
125 default=None,
128 default=None,
126 )
129 )
127 configitem(
130 configitem(
128 b'patchbomb',
131 b'patchbomb',
129 b'confirm',
132 b'confirm',
130 default=False,
133 default=False,
131 )
134 )
132 configitem(
135 configitem(
133 b'patchbomb',
136 b'patchbomb',
134 b'flagtemplate',
137 b'flagtemplate',
135 default=None,
138 default=None,
136 )
139 )
137 configitem(
140 configitem(
138 b'patchbomb',
141 b'patchbomb',
139 b'from',
142 b'from',
140 default=None,
143 default=None,
141 )
144 )
142 configitem(
145 configitem(
143 b'patchbomb',
146 b'patchbomb',
144 b'intro',
147 b'intro',
145 default=b'auto',
148 default=b'auto',
146 )
149 )
147 configitem(
150 configitem(
148 b'patchbomb',
151 b'patchbomb',
149 b'publicurl',
152 b'publicurl',
150 default=None,
153 default=None,
151 )
154 )
152 configitem(
155 configitem(
153 b'patchbomb',
156 b'patchbomb',
154 b'reply-to',
157 b'reply-to',
155 default=None,
158 default=None,
156 )
159 )
157 configitem(
160 configitem(
158 b'patchbomb',
161 b'patchbomb',
159 b'to',
162 b'to',
160 default=None,
163 default=None,
161 )
164 )
162
165
163 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
166 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
164 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
167 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
165 # be specifying the version(s) of Mercurial they are tested with, or
168 # be specifying the version(s) of Mercurial they are tested with, or
166 # leave the attribute unspecified.
169 # leave the attribute unspecified.
167 testedwith = b'ships-with-hg-core'
170 testedwith = b'ships-with-hg-core'
168
171
169
172
170 def _addpullheader(seq, ctx):
173 def _addpullheader(seq, ctx):
171 """Add a header pointing to a public URL where the changeset is available"""
174 """Add a header pointing to a public URL where the changeset is available"""
172 repo = ctx.repo()
175 repo = ctx.repo()
173 # experimental config: patchbomb.publicurl
176 # experimental config: patchbomb.publicurl
174 # waiting for some logic that check that the changeset are available on the
177 # waiting for some logic that check that the changeset are available on the
175 # destination before patchbombing anything.
178 # destination before patchbombing anything.
176 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
179 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
177 if publicurl:
180 if publicurl:
178 return b'Available At %s\n# hg pull %s -r %s' % (
181 return b'Available At %s\n# hg pull %s -r %s' % (
179 publicurl,
182 publicurl,
180 publicurl,
183 publicurl,
181 ctx,
184 ctx,
182 )
185 )
183 return None
186 return None
184
187
185
188
186 def uisetup(ui):
189 def uisetup(ui):
187 cmdutil.extraexport.append(b'pullurl')
190 cmdutil.extraexport.append(b'pullurl')
188 cmdutil.extraexportmap[b'pullurl'] = _addpullheader
191 cmdutil.extraexportmap[b'pullurl'] = _addpullheader
189
192
190
193
191 def reposetup(ui, repo):
194 def reposetup(ui, repo):
192 if not repo.local():
195 if not repo.local():
193 return
196 return
194 repo._wlockfreeprefix.add(b'last-email.txt')
197 repo._wlockfreeprefix.add(b'last-email.txt')
195
198
196
199
197 def prompt(ui, prompt, default=None, rest=b':'):
200 def prompt(ui, prompt, default=None, rest=b':'):
198 if default:
201 if default:
199 prompt += b' [%s]' % default
202 prompt += b' [%s]' % default
200 return ui.prompt(prompt + rest, default)
203 return ui.prompt(prompt + rest, default)
201
204
202
205
203 def introwanted(ui, opts, number):
206 def introwanted(ui, opts, number):
204 '''is an introductory message apparently wanted?'''
207 '''is an introductory message apparently wanted?'''
205 introconfig = ui.config(b'patchbomb', b'intro')
208 introconfig = ui.config(b'patchbomb', b'intro')
206 if opts.get(b'intro') or opts.get(b'desc'):
209 if opts.get(b'intro') or opts.get(b'desc'):
207 intro = True
210 intro = True
208 elif introconfig == b'always':
211 elif introconfig == b'always':
209 intro = True
212 intro = True
210 elif introconfig == b'never':
213 elif introconfig == b'never':
211 intro = False
214 intro = False
212 elif introconfig == b'auto':
215 elif introconfig == b'auto':
213 intro = number > 1
216 intro = number > 1
214 else:
217 else:
215 ui.write_err(
218 ui.write_err(
216 _(b'warning: invalid patchbomb.intro value "%s"\n') % introconfig
219 _(b'warning: invalid patchbomb.intro value "%s"\n') % introconfig
217 )
220 )
218 ui.write_err(_(b'(should be one of always, never, auto)\n'))
221 ui.write_err(_(b'(should be one of always, never, auto)\n'))
219 intro = number > 1
222 intro = number > 1
220 return intro
223 return intro
221
224
222
225
223 def _formatflags(ui, repo, rev, flags):
226 def _formatflags(ui, repo, rev, flags):
224 """build flag string optionally by template"""
227 """build flag string optionally by template"""
225 tmpl = ui.config(b'patchbomb', b'flagtemplate')
228 tmpl = ui.config(b'patchbomb', b'flagtemplate')
226 if not tmpl:
229 if not tmpl:
227 return b' '.join(flags)
230 return b' '.join(flags)
228 out = util.stringio()
231 out = util.stringio()
229 spec = formatter.literal_templatespec(templater.unquotestring(tmpl))
232 spec = formatter.literal_templatespec(templater.unquotestring(tmpl))
230 with formatter.templateformatter(ui, out, b'patchbombflag', {}, spec) as fm:
233 with formatter.templateformatter(ui, out, b'patchbombflag', {}, spec) as fm:
231 fm.startitem()
234 fm.startitem()
232 fm.context(ctx=repo[rev])
235 fm.context(ctx=repo[rev])
233 fm.write(b'flags', b'%s', fm.formatlist(flags, name=b'flag'))
236 fm.write(b'flags', b'%s', fm.formatlist(flags, name=b'flag'))
234 return out.getvalue()
237 return out.getvalue()
235
238
236
239
237 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
240 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
238 """build prefix to patch subject"""
241 """build prefix to patch subject"""
239 flag = _formatflags(ui, repo, rev, flags)
242 flag = _formatflags(ui, repo, rev, flags)
240 if flag:
243 if flag:
241 flag = b' ' + flag
244 flag = b' ' + flag
242
245
243 if not numbered:
246 if not numbered:
244 return b'[PATCH%s]' % flag
247 return b'[PATCH%s]' % flag
245 else:
248 else:
246 tlen = len(b"%d" % total)
249 tlen = len(b"%d" % total)
247 return b'[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
250 return b'[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
248
251
249
252
250 def makepatch(
253 def makepatch(
251 ui,
254 ui,
252 repo,
255 repo,
253 rev,
256 rev,
254 patchlines,
257 patchlines,
255 opts,
258 opts,
256 _charsets,
259 _charsets,
257 idx,
260 idx,
258 total,
261 total,
259 numbered,
262 numbered,
260 patchname=None,
263 patchname=None,
261 ):
264 ):
262
265
263 desc = []
266 desc = []
264 node = None
267 node = None
265 body = b''
268 body = b''
266
269
267 for line in patchlines:
270 for line in patchlines:
268 if line.startswith(b'#'):
271 if line.startswith(b'#'):
269 if line.startswith(b'# Node ID'):
272 if line.startswith(b'# Node ID'):
270 node = line.split()[-1]
273 node = line.split()[-1]
271 continue
274 continue
272 if line.startswith(b'diff -r') or line.startswith(b'diff --git'):
275 if line.startswith(b'diff -r') or line.startswith(b'diff --git'):
273 break
276 break
274 desc.append(line)
277 desc.append(line)
275
278
276 if not patchname and not node:
279 if not patchname and not node:
277 raise ValueError
280 raise ValueError
278
281
279 if opts.get(b'attach') and not opts.get(b'body'):
282 if opts.get(b'attach') and not opts.get(b'body'):
280 body = (
283 body = (
281 b'\n'.join(desc[1:]).strip()
284 b'\n'.join(desc[1:]).strip()
282 or b'Patch subject is complete summary.'
285 or b'Patch subject is complete summary.'
283 )
286 )
284 body += b'\n\n\n'
287 body += b'\n\n\n'
285
288
286 if opts.get(b'plain'):
289 if opts.get(b'plain'):
287 while patchlines and patchlines[0].startswith(b'# '):
290 while patchlines and patchlines[0].startswith(b'# '):
288 patchlines.pop(0)
291 patchlines.pop(0)
289 if patchlines:
292 if patchlines:
290 patchlines.pop(0)
293 patchlines.pop(0)
291 while patchlines and not patchlines[0].strip():
294 while patchlines and not patchlines[0].strip():
292 patchlines.pop(0)
295 patchlines.pop(0)
293
296
294 ds = patch.diffstat(patchlines)
297 ds = patch.diffstat(patchlines)
295 if opts.get(b'diffstat'):
298 if opts.get(b'diffstat'):
296 body += ds + b'\n\n'
299 body += ds + b'\n\n'
297
300
298 addattachment = opts.get(b'attach') or opts.get(b'inline')
301 addattachment = opts.get(b'attach') or opts.get(b'inline')
299 if not addattachment or opts.get(b'body'):
302 if not addattachment or opts.get(b'body'):
300 body += b'\n'.join(patchlines)
303 body += b'\n'.join(patchlines)
301
304
302 if addattachment:
305 if addattachment:
303 msg = emimemultipart.MIMEMultipart()
306 msg = emimemultipart.MIMEMultipart()
304 if body:
307 if body:
305 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(b'test')))
308 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(b'test')))
306 p = mail.mimetextpatch(
309 p = mail.mimetextpatch(
307 b'\n'.join(patchlines), 'x-patch', opts.get(b'test')
310 b'\n'.join(patchlines), 'x-patch', opts.get(b'test')
308 )
311 )
309 binnode = bin(node)
312 binnode = bin(node)
310 # if node is mq patch, it will have the patch file's name as a tag
313 # if node is mq patch, it will have the patch file's name as a tag
311 if not patchname:
314 if not patchname:
312 patchtags = [
315 patchtags = [
313 t
316 t
314 for t in repo.nodetags(binnode)
317 for t in repo.nodetags(binnode)
315 if t.endswith(b'.patch') or t.endswith(b'.diff')
318 if t.endswith(b'.patch') or t.endswith(b'.diff')
316 ]
319 ]
317 if patchtags:
320 if patchtags:
318 patchname = patchtags[0]
321 patchname = patchtags[0]
319 elif total > 1:
322 elif total > 1:
320 patchname = cmdutil.makefilename(
323 patchname = cmdutil.makefilename(
321 repo[node], b'%b-%n.patch', seqno=idx, total=total
324 repo[node], b'%b-%n.patch', seqno=idx, total=total
322 )
325 )
323 else:
326 else:
324 patchname = cmdutil.makefilename(repo[node], b'%b.patch')
327 patchname = cmdutil.makefilename(repo[node], b'%b.patch')
325 disposition = r'inline'
328 disposition = r'inline'
326 if opts.get(b'attach'):
329 if opts.get(b'attach'):
327 disposition = r'attachment'
330 disposition = r'attachment'
328 p['Content-Disposition'] = (
331 p['Content-Disposition'] = (
329 disposition + '; filename=' + encoding.strfromlocal(patchname)
332 disposition + '; filename=' + encoding.strfromlocal(patchname)
330 )
333 )
331 msg.attach(p)
334 msg.attach(p)
332 else:
335 else:
333 msg = mail.mimetextpatch(body, display=opts.get(b'test'))
336 msg = mail.mimetextpatch(body, display=opts.get(b'test'))
334
337
335 prefix = _formatprefix(
338 prefix = _formatprefix(
336 ui, repo, rev, opts.get(b'flag'), idx, total, numbered
339 ui, repo, rev, opts.get(b'flag'), idx, total, numbered
337 )
340 )
338 subj = desc[0].strip().rstrip(b'. ')
341 subj = desc[0].strip().rstrip(b'. ')
339 if not numbered:
342 if not numbered:
340 subj = b' '.join([prefix, opts.get(b'subject') or subj])
343 subj = b' '.join([prefix, opts.get(b'subject') or subj])
341 else:
344 else:
342 subj = b' '.join([prefix, subj])
345 subj = b' '.join([prefix, subj])
343 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(b'test'))
346 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(b'test'))
344 msg['X-Mercurial-Node'] = pycompat.sysstr(node)
347 msg['X-Mercurial-Node'] = pycompat.sysstr(node)
345 msg['X-Mercurial-Series-Index'] = '%i' % idx
348 msg['X-Mercurial-Series-Index'] = '%i' % idx
346 msg['X-Mercurial-Series-Total'] = '%i' % total
349 msg['X-Mercurial-Series-Total'] = '%i' % total
347 return msg, subj, ds
350 return msg, subj, ds
348
351
349
352
350 def _getpatches(repo, revs, **opts):
353 def _getpatches(repo, revs, **opts):
351 """return a list of patches for a list of revisions
354 """return a list of patches for a list of revisions
352
355
353 Each patch in the list is itself a list of lines.
356 Each patch in the list is itself a list of lines.
354 """
357 """
355 ui = repo.ui
358 ui = repo.ui
356 prev = repo[b'.'].rev()
359 prev = repo[b'.'].rev()
357 for r in revs:
360 for r in revs:
358 if r == prev and (repo[None].files() or repo[None].deleted()):
361 if r == prev and (repo[None].files() or repo[None].deleted()):
359 ui.warn(_(b'warning: working directory has uncommitted changes\n'))
362 ui.warn(_(b'warning: working directory has uncommitted changes\n'))
360 output = stringio()
363 output = stringio()
361 cmdutil.exportfile(
364 cmdutil.exportfile(
362 repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True)
365 repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True)
363 )
366 )
364 yield output.getvalue().split(b'\n')
367 yield output.getvalue().split(b'\n')
365
368
366
369
367 def _getbundle(repo, dest, **opts):
370 def _getbundle(repo, dest, **opts):
368 """return a bundle containing changesets missing in "dest"
371 """return a bundle containing changesets missing in "dest"
369
372
370 The `opts` keyword-arguments are the same as the one accepted by the
373 The `opts` keyword-arguments are the same as the one accepted by the
371 `bundle` command.
374 `bundle` command.
372
375
373 The bundle is a returned as a single in-memory binary blob.
376 The bundle is a returned as a single in-memory binary blob.
374 """
377 """
375 ui = repo.ui
378 ui = repo.ui
376 tmpdir = pycompat.mkdtemp(prefix=b'hg-email-bundle-')
379 tmpdir = pycompat.mkdtemp(prefix=b'hg-email-bundle-')
377 tmpfn = os.path.join(tmpdir, b'bundle')
380 tmpfn = os.path.join(tmpdir, b'bundle')
378 btype = ui.config(b'patchbomb', b'bundletype')
381 btype = ui.config(b'patchbomb', b'bundletype')
379 if btype:
382 if btype:
380 opts['type'] = btype
383 opts['type'] = btype
381 try:
384 try:
382 commands.bundle(ui, repo, tmpfn, dest, **opts)
385 commands.bundle(ui, repo, tmpfn, dest, **opts)
383 return util.readfile(tmpfn)
386 return util.readfile(tmpfn)
384 finally:
387 finally:
385 try:
388 try:
386 os.unlink(tmpfn)
389 os.unlink(tmpfn)
387 except OSError:
390 except OSError:
388 pass
391 pass
389 os.rmdir(tmpdir)
392 os.rmdir(tmpdir)
390
393
391
394
392 def _getdescription(repo, defaultbody, sender, **opts):
395 def _getdescription(repo, defaultbody, sender, **opts):
393 """obtain the body of the introduction message and return it
396 """obtain the body of the introduction message and return it
394
397
395 This is also used for the body of email with an attached bundle.
398 This is also used for the body of email with an attached bundle.
396
399
397 The body can be obtained either from the command line option or entered by
400 The body can be obtained either from the command line option or entered by
398 the user through the editor.
401 the user through the editor.
399 """
402 """
400 ui = repo.ui
403 ui = repo.ui
401 if opts.get('desc'):
404 if opts.get('desc'):
402 body = open(opts.get('desc')).read()
405 body = open(opts.get('desc')).read()
403 else:
406 else:
404 ui.write(
407 ui.write(
405 _(b'\nWrite the introductory message for the patch series.\n\n')
408 _(b'\nWrite the introductory message for the patch series.\n\n')
406 )
409 )
407 body = ui.edit(
410 body = ui.edit(
408 defaultbody, sender, repopath=repo.path, action=b'patchbombbody'
411 defaultbody, sender, repopath=repo.path, action=b'patchbombbody'
409 )
412 )
410 # Save series description in case sendmail fails
413 # Save series description in case sendmail fails
411 msgfile = repo.vfs(b'last-email.txt', b'wb')
414 msgfile = repo.vfs(b'last-email.txt', b'wb')
412 msgfile.write(body)
415 msgfile.write(body)
413 msgfile.close()
416 msgfile.close()
414 return body
417 return body
415
418
416
419
417 def _getbundlemsgs(repo, sender, bundle, **opts):
420 def _getbundlemsgs(repo, sender, bundle, **opts):
418 """Get the full email for sending a given bundle
421 """Get the full email for sending a given bundle
419
422
420 This function returns a list of "email" tuples (subject, content, None).
423 This function returns a list of "email" tuples (subject, content, None).
421 The list is always one message long in that case.
424 The list is always one message long in that case.
422 """
425 """
423 ui = repo.ui
426 ui = repo.ui
424 _charsets = mail._charsets(ui)
427 _charsets = mail._charsets(ui)
425 subj = opts.get('subject') or prompt(
428 subj = opts.get('subject') or prompt(
426 ui, b'Subject:', b'A bundle for your repository'
429 ui, b'Subject:', b'A bundle for your repository'
427 )
430 )
428
431
429 body = _getdescription(repo, b'', sender, **opts)
432 body = _getdescription(repo, b'', sender, **opts)
430 msg = emimemultipart.MIMEMultipart()
433 msg = emimemultipart.MIMEMultipart()
431 if body:
434 if body:
432 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
435 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
433 datapart = emimebase.MIMEBase('application', 'x-mercurial-bundle')
436 datapart = emimebase.MIMEBase('application', 'x-mercurial-bundle')
434 datapart.set_payload(bundle)
437 datapart.set_payload(bundle)
435 bundlename = b'%s.hg' % opts.get('bundlename', b'bundle')
438 bundlename = b'%s.hg' % opts.get('bundlename', b'bundle')
436 datapart.add_header(
439 datapart.add_header(
437 'Content-Disposition',
440 'Content-Disposition',
438 'attachment',
441 'attachment',
439 filename=encoding.strfromlocal(bundlename),
442 filename=encoding.strfromlocal(bundlename),
440 )
443 )
441 emailencoders.encode_base64(datapart)
444 emailencoders.encode_base64(datapart)
442 msg.attach(datapart)
445 msg.attach(datapart)
443 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
446 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
444 return [(msg, subj, None)]
447 return [(msg, subj, None)]
445
448
446
449
447 def _makeintro(repo, sender, revs, patches, **opts):
450 def _makeintro(repo, sender, revs, patches, **opts):
448 """make an introduction email, asking the user for content if needed
451 """make an introduction email, asking the user for content if needed
449
452
450 email is returned as (subject, body, cumulative-diffstat)"""
453 email is returned as (subject, body, cumulative-diffstat)"""
451 ui = repo.ui
454 ui = repo.ui
452 _charsets = mail._charsets(ui)
455 _charsets = mail._charsets(ui)
453
456
454 # use the last revision which is likely to be a bookmarked head
457 # use the last revision which is likely to be a bookmarked head
455 prefix = _formatprefix(
458 prefix = _formatprefix(
456 ui, repo, revs.last(), opts.get('flag'), 0, len(patches), numbered=True
459 ui, repo, revs.last(), opts.get('flag'), 0, len(patches), numbered=True
457 )
460 )
458 subj = opts.get('subject') or prompt(
461 subj = opts.get('subject') or prompt(
459 ui, b'(optional) Subject: ', rest=prefix, default=b''
462 ui, b'(optional) Subject: ', rest=prefix, default=b''
460 )
463 )
461 if not subj:
464 if not subj:
462 return None # skip intro if the user doesn't bother
465 return None # skip intro if the user doesn't bother
463
466
464 subj = prefix + b' ' + subj
467 subj = prefix + b' ' + subj
465
468
466 body = b''
469 body = b''
467 if opts.get('diffstat'):
470 if opts.get('diffstat'):
468 # generate a cumulative diffstat of the whole patch series
471 # generate a cumulative diffstat of the whole patch series
469 diffstat = patch.diffstat(sum(patches, []))
472 diffstat = patch.diffstat(sum(patches, []))
470 body = b'\n' + diffstat
473 body = b'\n' + diffstat
471 else:
474 else:
472 diffstat = None
475 diffstat = None
473
476
474 body = _getdescription(repo, body, sender, **opts)
477 body = _getdescription(repo, body, sender, **opts)
475 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
478 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
476 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
479 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
477 return (msg, subj, diffstat)
480 return (msg, subj, diffstat)
478
481
479
482
480 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
483 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
481 """return a list of emails from a list of patches
484 """return a list of emails from a list of patches
482
485
483 This involves introduction message creation if necessary.
486 This involves introduction message creation if necessary.
484
487
485 This function returns a list of "email" tuples (subject, content, None).
488 This function returns a list of "email" tuples (subject, content, None).
486 """
489 """
487 bytesopts = pycompat.byteskwargs(opts)
490 bytesopts = pycompat.byteskwargs(opts)
488 ui = repo.ui
491 ui = repo.ui
489 _charsets = mail._charsets(ui)
492 _charsets = mail._charsets(ui)
490 patches = list(_getpatches(repo, revs, **opts))
493 patches = list(_getpatches(repo, revs, **opts))
491 msgs = []
494 msgs = []
492
495
493 ui.write(_(b'this patch series consists of %d patches.\n\n') % len(patches))
496 ui.write(_(b'this patch series consists of %d patches.\n\n') % len(patches))
494
497
495 # build the intro message, or skip it if the user declines
498 # build the intro message, or skip it if the user declines
496 if introwanted(ui, bytesopts, len(patches)):
499 if introwanted(ui, bytesopts, len(patches)):
497 msg = _makeintro(repo, sender, revs, patches, **opts)
500 msg = _makeintro(repo, sender, revs, patches, **opts)
498 if msg:
501 if msg:
499 msgs.append(msg)
502 msgs.append(msg)
500
503
501 # are we going to send more than one message?
504 # are we going to send more than one message?
502 numbered = len(msgs) + len(patches) > 1
505 numbered = len(msgs) + len(patches) > 1
503
506
504 # now generate the actual patch messages
507 # now generate the actual patch messages
505 name = None
508 name = None
506 assert len(revs) == len(patches)
509 assert len(revs) == len(patches)
507 for i, (r, p) in enumerate(zip(revs, patches)):
510 for i, (r, p) in enumerate(zip(revs, patches)):
508 if patchnames:
511 if patchnames:
509 name = patchnames[i]
512 name = patchnames[i]
510 msg = makepatch(
513 msg = makepatch(
511 ui,
514 ui,
512 repo,
515 repo,
513 r,
516 r,
514 p,
517 p,
515 bytesopts,
518 bytesopts,
516 _charsets,
519 _charsets,
517 i + 1,
520 i + 1,
518 len(patches),
521 len(patches),
519 numbered,
522 numbered,
520 name,
523 name,
521 )
524 )
522 msgs.append(msg)
525 msgs.append(msg)
523
526
524 return msgs
527 return msgs
525
528
526
529
527 def _getoutgoing(repo, dest, revs):
530 def _getoutgoing(repo, dest, revs):
528 '''Return the revisions present locally but not in dest'''
531 '''Return the revisions present locally but not in dest'''
529 ui = repo.ui
532 ui = repo.ui
530 url = ui.expandpath(dest or b'default-push', dest or b'default')
533 url = ui.expandpath(dest or b'default-push', dest or b'default')
531 url = hg.parseurl(url)[0]
534 url = hg.parseurl(url)[0]
532 ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
535 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url))
533
536
534 revs = [r for r in revs if r >= 0]
537 revs = [r for r in revs if r >= 0]
535 if not revs:
538 if not revs:
536 revs = [repo.changelog.tiprev()]
539 revs = [repo.changelog.tiprev()]
537 revs = repo.revs(b'outgoing(%s) and ::%ld', dest or b'', revs)
540 revs = repo.revs(b'outgoing(%s) and ::%ld', dest or b'', revs)
538 if not revs:
541 if not revs:
539 ui.status(_(b"no changes found\n"))
542 ui.status(_(b"no changes found\n"))
540 return revs
543 return revs
541
544
542
545
543 def _msgid(node, timestamp):
546 def _msgid(node, timestamp):
544 try:
547 try:
545 hostname = encoding.strfromlocal(encoding.environ[b'HGHOSTNAME'])
548 hostname = encoding.strfromlocal(encoding.environ[b'HGHOSTNAME'])
546 except KeyError:
549 except KeyError:
547 hostname = socket.getfqdn()
550 hostname = socket.getfqdn()
548 return '<%s.%d@%s>' % (node, timestamp, hostname)
551 return '<%s.%d@%s>' % (node, timestamp, hostname)
549
552
550
553
551 emailopts = [
554 emailopts = [
552 (b'', b'body', None, _(b'send patches as inline message text (default)')),
555 (b'', b'body', None, _(b'send patches as inline message text (default)')),
553 (b'a', b'attach', None, _(b'send patches as attachments')),
556 (b'a', b'attach', None, _(b'send patches as attachments')),
554 (b'i', b'inline', None, _(b'send patches as inline attachments')),
557 (b'i', b'inline', None, _(b'send patches as inline attachments')),
555 (
558 (
556 b'',
559 b'',
557 b'bcc',
560 b'bcc',
558 [],
561 [],
559 _(b'email addresses of blind carbon copy recipients'),
562 _(b'email addresses of blind carbon copy recipients'),
560 _(b'EMAIL'),
563 _(b'EMAIL'),
561 ),
564 ),
562 (b'c', b'cc', [], _(b'email addresses of copy recipients'), _(b'EMAIL')),
565 (b'c', b'cc', [], _(b'email addresses of copy recipients'), _(b'EMAIL')),
563 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
566 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
564 (b'd', b'diffstat', None, _(b'add diffstat output to messages')),
567 (b'd', b'diffstat', None, _(b'add diffstat output to messages')),
565 (
568 (
566 b'',
569 b'',
567 b'date',
570 b'date',
568 b'',
571 b'',
569 _(b'use the given date as the sending date'),
572 _(b'use the given date as the sending date'),
570 _(b'DATE'),
573 _(b'DATE'),
571 ),
574 ),
572 (
575 (
573 b'',
576 b'',
574 b'desc',
577 b'desc',
575 b'',
578 b'',
576 _(b'use the given file as the series description'),
579 _(b'use the given file as the series description'),
577 _(b'FILE'),
580 _(b'FILE'),
578 ),
581 ),
579 (b'f', b'from', b'', _(b'email address of sender'), _(b'EMAIL')),
582 (b'f', b'from', b'', _(b'email address of sender'), _(b'EMAIL')),
580 (b'n', b'test', None, _(b'print messages that would be sent')),
583 (b'n', b'test', None, _(b'print messages that would be sent')),
581 (
584 (
582 b'm',
585 b'm',
583 b'mbox',
586 b'mbox',
584 b'',
587 b'',
585 _(b'write messages to mbox file instead of sending them'),
588 _(b'write messages to mbox file instead of sending them'),
586 _(b'FILE'),
589 _(b'FILE'),
587 ),
590 ),
588 (
591 (
589 b'',
592 b'',
590 b'reply-to',
593 b'reply-to',
591 [],
594 [],
592 _(b'email addresses replies should be sent to'),
595 _(b'email addresses replies should be sent to'),
593 _(b'EMAIL'),
596 _(b'EMAIL'),
594 ),
597 ),
595 (
598 (
596 b's',
599 b's',
597 b'subject',
600 b'subject',
598 b'',
601 b'',
599 _(b'subject of first message (intro or single patch)'),
602 _(b'subject of first message (intro or single patch)'),
600 _(b'TEXT'),
603 _(b'TEXT'),
601 ),
604 ),
602 (
605 (
603 b'',
606 b'',
604 b'in-reply-to',
607 b'in-reply-to',
605 b'',
608 b'',
606 _(b'message identifier to reply to'),
609 _(b'message identifier to reply to'),
607 _(b'MSGID'),
610 _(b'MSGID'),
608 ),
611 ),
609 (b'', b'flag', [], _(b'flags to add in subject prefixes'), _(b'FLAG')),
612 (b'', b'flag', [], _(b'flags to add in subject prefixes'), _(b'FLAG')),
610 (b't', b'to', [], _(b'email addresses of recipients'), _(b'EMAIL')),
613 (b't', b'to', [], _(b'email addresses of recipients'), _(b'EMAIL')),
611 ]
614 ]
612
615
613
616
614 @command(
617 @command(
615 b'email',
618 b'email',
616 [
619 [
617 (b'g', b'git', None, _(b'use git extended diff format')),
620 (b'g', b'git', None, _(b'use git extended diff format')),
618 (b'', b'plain', None, _(b'omit hg patch header')),
621 (b'', b'plain', None, _(b'omit hg patch header')),
619 (
622 (
620 b'o',
623 b'o',
621 b'outgoing',
624 b'outgoing',
622 None,
625 None,
623 _(b'send changes not found in the target repository'),
626 _(b'send changes not found in the target repository'),
624 ),
627 ),
625 (
628 (
626 b'b',
629 b'b',
627 b'bundle',
630 b'bundle',
628 None,
631 None,
629 _(b'send changes not in target as a binary bundle'),
632 _(b'send changes not in target as a binary bundle'),
630 ),
633 ),
631 (
634 (
632 b'B',
635 b'B',
633 b'bookmark',
636 b'bookmark',
634 b'',
637 b'',
635 _(b'send changes only reachable by given bookmark'),
638 _(b'send changes only reachable by given bookmark'),
636 _(b'BOOKMARK'),
639 _(b'BOOKMARK'),
637 ),
640 ),
638 (
641 (
639 b'',
642 b'',
640 b'bundlename',
643 b'bundlename',
641 b'bundle',
644 b'bundle',
642 _(b'name of the bundle attachment file'),
645 _(b'name of the bundle attachment file'),
643 _(b'NAME'),
646 _(b'NAME'),
644 ),
647 ),
645 (b'r', b'rev', [], _(b'a revision to send'), _(b'REV')),
648 (b'r', b'rev', [], _(b'a revision to send'), _(b'REV')),
646 (
649 (
647 b'',
650 b'',
648 b'force',
651 b'force',
649 None,
652 None,
650 _(
653 _(
651 b'run even when remote repository is unrelated '
654 b'run even when remote repository is unrelated '
652 b'(with -b/--bundle)'
655 b'(with -b/--bundle)'
653 ),
656 ),
654 ),
657 ),
655 (
658 (
656 b'',
659 b'',
657 b'base',
660 b'base',
658 [],
661 [],
659 _(
662 _(
660 b'a base changeset to specify instead of a destination '
663 b'a base changeset to specify instead of a destination '
661 b'(with -b/--bundle)'
664 b'(with -b/--bundle)'
662 ),
665 ),
663 _(b'REV'),
666 _(b'REV'),
664 ),
667 ),
665 (
668 (
666 b'',
669 b'',
667 b'intro',
670 b'intro',
668 None,
671 None,
669 _(b'send an introduction email for a single patch'),
672 _(b'send an introduction email for a single patch'),
670 ),
673 ),
671 ]
674 ]
672 + emailopts
675 + emailopts
673 + cmdutil.remoteopts,
676 + cmdutil.remoteopts,
674 _(b'hg email [OPTION]... [DEST]...'),
677 _(b'hg email [OPTION]... [DEST]...'),
675 helpcategory=command.CATEGORY_IMPORT_EXPORT,
678 helpcategory=command.CATEGORY_IMPORT_EXPORT,
676 )
679 )
677 def email(ui, repo, *revs, **opts):
680 def email(ui, repo, *revs, **opts):
678 """send changesets by email
681 """send changesets by email
679
682
680 By default, diffs are sent in the format generated by
683 By default, diffs are sent in the format generated by
681 :hg:`export`, one per message. The series starts with a "[PATCH 0
684 :hg:`export`, one per message. The series starts with a "[PATCH 0
682 of N]" introduction, which describes the series as a whole.
685 of N]" introduction, which describes the series as a whole.
683
686
684 Each patch email has a Subject line of "[PATCH M of N] ...", using
687 Each patch email has a Subject line of "[PATCH M of N] ...", using
685 the first line of the changeset description as the subject text.
688 the first line of the changeset description as the subject text.
686 The message contains two or three parts. First, the changeset
689 The message contains two or three parts. First, the changeset
687 description.
690 description.
688
691
689 With the -d/--diffstat option, if the diffstat program is
692 With the -d/--diffstat option, if the diffstat program is
690 installed, the result of running diffstat on the patch is inserted.
693 installed, the result of running diffstat on the patch is inserted.
691
694
692 Finally, the patch itself, as generated by :hg:`export`.
695 Finally, the patch itself, as generated by :hg:`export`.
693
696
694 With the -d/--diffstat or --confirm options, you will be presented
697 With the -d/--diffstat or --confirm options, you will be presented
695 with a final summary of all messages and asked for confirmation before
698 with a final summary of all messages and asked for confirmation before
696 the messages are sent.
699 the messages are sent.
697
700
698 By default the patch is included as text in the email body for
701 By default the patch is included as text in the email body for
699 easy reviewing. Using the -a/--attach option will instead create
702 easy reviewing. Using the -a/--attach option will instead create
700 an attachment for the patch. With -i/--inline an inline attachment
703 an attachment for the patch. With -i/--inline an inline attachment
701 will be created. You can include a patch both as text in the email
704 will be created. You can include a patch both as text in the email
702 body and as a regular or an inline attachment by combining the
705 body and as a regular or an inline attachment by combining the
703 -a/--attach or -i/--inline with the --body option.
706 -a/--attach or -i/--inline with the --body option.
704
707
705 With -B/--bookmark changesets reachable by the given bookmark are
708 With -B/--bookmark changesets reachable by the given bookmark are
706 selected.
709 selected.
707
710
708 With -o/--outgoing, emails will be generated for patches not found
711 With -o/--outgoing, emails will be generated for patches not found
709 in the destination repository (or only those which are ancestors
712 in the destination repository (or only those which are ancestors
710 of the specified revisions if any are provided)
713 of the specified revisions if any are provided)
711
714
712 With -b/--bundle, changesets are selected as for --outgoing, but a
715 With -b/--bundle, changesets are selected as for --outgoing, but a
713 single email containing a binary Mercurial bundle as an attachment
716 single email containing a binary Mercurial bundle as an attachment
714 will be sent. Use the ``patchbomb.bundletype`` config option to
717 will be sent. Use the ``patchbomb.bundletype`` config option to
715 control the bundle type as with :hg:`bundle --type`.
718 control the bundle type as with :hg:`bundle --type`.
716
719
717 With -m/--mbox, instead of previewing each patchbomb message in a
720 With -m/--mbox, instead of previewing each patchbomb message in a
718 pager or sending the messages directly, it will create a UNIX
721 pager or sending the messages directly, it will create a UNIX
719 mailbox file with the patch emails. This mailbox file can be
722 mailbox file with the patch emails. This mailbox file can be
720 previewed with any mail user agent which supports UNIX mbox
723 previewed with any mail user agent which supports UNIX mbox
721 files.
724 files.
722
725
723 With -n/--test, all steps will run, but mail will not be sent.
726 With -n/--test, all steps will run, but mail will not be sent.
724 You will be prompted for an email recipient address, a subject and
727 You will be prompted for an email recipient address, a subject and
725 an introductory message describing the patches of your patchbomb.
728 an introductory message describing the patches of your patchbomb.
726 Then when all is done, patchbomb messages are displayed.
729 Then when all is done, patchbomb messages are displayed.
727
730
728 In case email sending fails, you will find a backup of your series
731 In case email sending fails, you will find a backup of your series
729 introductory message in ``.hg/last-email.txt``.
732 introductory message in ``.hg/last-email.txt``.
730
733
731 The default behavior of this command can be customized through
734 The default behavior of this command can be customized through
732 configuration. (See :hg:`help patchbomb` for details)
735 configuration. (See :hg:`help patchbomb` for details)
733
736
734 Examples::
737 Examples::
735
738
736 hg email -r 3000 # send patch 3000 only
739 hg email -r 3000 # send patch 3000 only
737 hg email -r 3000 -r 3001 # send patches 3000 and 3001
740 hg email -r 3000 -r 3001 # send patches 3000 and 3001
738 hg email -r 3000:3005 # send patches 3000 through 3005
741 hg email -r 3000:3005 # send patches 3000 through 3005
739 hg email 3000 # send patch 3000 (deprecated)
742 hg email 3000 # send patch 3000 (deprecated)
740
743
741 hg email -o # send all patches not in default
744 hg email -o # send all patches not in default
742 hg email -o DEST # send all patches not in DEST
745 hg email -o DEST # send all patches not in DEST
743 hg email -o -r 3000 # send all ancestors of 3000 not in default
746 hg email -o -r 3000 # send all ancestors of 3000 not in default
744 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
747 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
745
748
746 hg email -B feature # send all ancestors of feature bookmark
749 hg email -B feature # send all ancestors of feature bookmark
747
750
748 hg email -b # send bundle of all patches not in default
751 hg email -b # send bundle of all patches not in default
749 hg email -b DEST # send bundle of all patches not in DEST
752 hg email -b DEST # send bundle of all patches not in DEST
750 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
753 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
751 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
754 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
752
755
753 hg email -o -m mbox && # generate an mbox file...
756 hg email -o -m mbox && # generate an mbox file...
754 mutt -R -f mbox # ... and view it with mutt
757 mutt -R -f mbox # ... and view it with mutt
755 hg email -o -m mbox && # generate an mbox file ...
758 hg email -o -m mbox && # generate an mbox file ...
756 formail -s sendmail \\ # ... and use formail to send from the mbox
759 formail -s sendmail \\ # ... and use formail to send from the mbox
757 -bm -t < mbox # ... using sendmail
760 -bm -t < mbox # ... using sendmail
758
761
759 Before using this command, you will need to enable email in your
762 Before using this command, you will need to enable email in your
760 hgrc. See the [email] section in hgrc(5) for details.
763 hgrc. See the [email] section in hgrc(5) for details.
761 """
764 """
762 opts = pycompat.byteskwargs(opts)
765 opts = pycompat.byteskwargs(opts)
763
766
764 _charsets = mail._charsets(ui)
767 _charsets = mail._charsets(ui)
765
768
766 bundle = opts.get(b'bundle')
769 bundle = opts.get(b'bundle')
767 date = opts.get(b'date')
770 date = opts.get(b'date')
768 mbox = opts.get(b'mbox')
771 mbox = opts.get(b'mbox')
769 outgoing = opts.get(b'outgoing')
772 outgoing = opts.get(b'outgoing')
770 rev = opts.get(b'rev')
773 rev = opts.get(b'rev')
771 bookmark = opts.get(b'bookmark')
774 bookmark = opts.get(b'bookmark')
772
775
773 if not (opts.get(b'test') or mbox):
776 if not (opts.get(b'test') or mbox):
774 # really sending
777 # really sending
775 mail.validateconfig(ui)
778 mail.validateconfig(ui)
776
779
777 if not (revs or rev or outgoing or bundle or bookmark):
780 if not (revs or rev or outgoing or bundle or bookmark):
778 raise error.Abort(
781 raise error.Abort(
779 _(b'specify at least one changeset with -B, -r or -o')
782 _(b'specify at least one changeset with -B, -r or -o')
780 )
783 )
781
784
782 if outgoing and bundle:
785 if outgoing and bundle:
783 raise error.Abort(
786 raise error.Abort(
784 _(
787 _(
785 b"--outgoing mode always on with --bundle;"
788 b"--outgoing mode always on with --bundle;"
786 b" do not re-specify --outgoing"
789 b" do not re-specify --outgoing"
787 )
790 )
788 )
791 )
789 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
792 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
790
793
791 if outgoing or bundle:
794 if outgoing or bundle:
792 if len(revs) > 1:
795 if len(revs) > 1:
793 raise error.Abort(_(b"too many destinations"))
796 raise error.Abort(_(b"too many destinations"))
794 if revs:
797 if revs:
795 dest = revs[0]
798 dest = revs[0]
796 else:
799 else:
797 dest = None
800 dest = None
798 revs = []
801 revs = []
799
802
800 if rev:
803 if rev:
801 if revs:
804 if revs:
802 raise error.Abort(_(b'use only one form to specify the revision'))
805 raise error.Abort(_(b'use only one form to specify the revision'))
803 revs = rev
806 revs = rev
804 elif bookmark:
807 elif bookmark:
805 if bookmark not in repo._bookmarks:
808 if bookmark not in repo._bookmarks:
806 raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
809 raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
807 revs = scmutil.bookmarkrevs(repo, bookmark)
810 revs = scmutil.bookmarkrevs(repo, bookmark)
808
811
809 revs = scmutil.revrange(repo, revs)
812 revs = scmutil.revrange(repo, revs)
810 if outgoing:
813 if outgoing:
811 revs = _getoutgoing(repo, dest, revs)
814 revs = _getoutgoing(repo, dest, revs)
812 if bundle:
815 if bundle:
813 opts[b'revs'] = [b"%d" % r for r in revs]
816 opts[b'revs'] = [b"%d" % r for r in revs]
814
817
815 # check if revision exist on the public destination
818 # check if revision exist on the public destination
816 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
819 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
817 if publicurl:
820 if publicurl:
818 repo.ui.debug(b'checking that revision exist in the public repo\n')
821 repo.ui.debug(b'checking that revision exist in the public repo\n')
819 try:
822 try:
820 publicpeer = hg.peer(repo, {}, publicurl)
823 publicpeer = hg.peer(repo, {}, publicurl)
821 except error.RepoError:
824 except error.RepoError:
822 repo.ui.write_err(
825 repo.ui.write_err(
823 _(b'unable to access public repo: %s\n') % publicurl
826 _(b'unable to access public repo: %s\n') % publicurl
824 )
827 )
825 raise
828 raise
826 if not publicpeer.capable(b'known'):
829 if not publicpeer.capable(b'known'):
827 repo.ui.debug(b'skipping existence checks: public repo too old\n')
830 repo.ui.debug(b'skipping existence checks: public repo too old\n')
828 else:
831 else:
829 out = [repo[r] for r in revs]
832 out = [repo[r] for r in revs]
830 known = publicpeer.known(h.node() for h in out)
833 known = publicpeer.known(h.node() for h in out)
831 missing = []
834 missing = []
832 for idx, h in enumerate(out):
835 for idx, h in enumerate(out):
833 if not known[idx]:
836 if not known[idx]:
834 missing.append(h)
837 missing.append(h)
835 if missing:
838 if missing:
836 if len(missing) > 1:
839 if len(missing) > 1:
837 msg = _(b'public "%s" is missing %s and %i others')
840 msg = _(b'public "%s" is missing %s and %i others')
838 msg %= (publicurl, missing[0], len(missing) - 1)
841 msg %= (publicurl, missing[0], len(missing) - 1)
839 else:
842 else:
840 msg = _(b'public url %s is missing %s')
843 msg = _(b'public url %s is missing %s')
841 msg %= (publicurl, missing[0])
844 msg %= (publicurl, missing[0])
842 missingrevs = [ctx.rev() for ctx in missing]
845 missingrevs = [ctx.rev() for ctx in missing]
843 revhint = b' '.join(
846 revhint = b' '.join(
844 b'-r %s' % h for h in repo.set(b'heads(%ld)', missingrevs)
847 b'-r %s' % h for h in repo.set(b'heads(%ld)', missingrevs)
845 )
848 )
846 hint = _(b"use 'hg push %s %s'") % (publicurl, revhint)
849 hint = _(b"use 'hg push %s %s'") % (publicurl, revhint)
847 raise error.Abort(msg, hint=hint)
850 raise error.Abort(msg, hint=hint)
848
851
849 # start
852 # start
850 if date:
853 if date:
851 start_time = dateutil.parsedate(date)
854 start_time = dateutil.parsedate(date)
852 else:
855 else:
853 start_time = dateutil.makedate()
856 start_time = dateutil.makedate()
854
857
855 def genmsgid(id):
858 def genmsgid(id):
856 return _msgid(id[:20], int(start_time[0]))
859 return _msgid(id[:20], int(start_time[0]))
857
860
858 # deprecated config: patchbomb.from
861 # deprecated config: patchbomb.from
859 sender = (
862 sender = (
860 opts.get(b'from')
863 opts.get(b'from')
861 or ui.config(b'email', b'from')
864 or ui.config(b'email', b'from')
862 or ui.config(b'patchbomb', b'from')
865 or ui.config(b'patchbomb', b'from')
863 or prompt(ui, b'From', ui.username())
866 or prompt(ui, b'From', ui.username())
864 )
867 )
865
868
866 if bundle:
869 if bundle:
867 stropts = pycompat.strkwargs(opts)
870 stropts = pycompat.strkwargs(opts)
868 bundledata = _getbundle(repo, dest, **stropts)
871 bundledata = _getbundle(repo, dest, **stropts)
869 bundleopts = stropts.copy()
872 bundleopts = stropts.copy()
870 bundleopts.pop('bundle', None) # already processed
873 bundleopts.pop('bundle', None) # already processed
871 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
874 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
872 else:
875 else:
873 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
876 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
874
877
875 showaddrs = []
878 showaddrs = []
876
879
877 def getaddrs(header, ask=False, default=None):
880 def getaddrs(header, ask=False, default=None):
878 configkey = header.lower()
881 configkey = header.lower()
879 opt = header.replace(b'-', b'_').lower()
882 opt = header.replace(b'-', b'_').lower()
880 addrs = opts.get(opt)
883 addrs = opts.get(opt)
881 if addrs:
884 if addrs:
882 showaddrs.append(b'%s: %s' % (header, b', '.join(addrs)))
885 showaddrs.append(b'%s: %s' % (header, b', '.join(addrs)))
883 return mail.addrlistencode(ui, addrs, _charsets, opts.get(b'test'))
886 return mail.addrlistencode(ui, addrs, _charsets, opts.get(b'test'))
884
887
885 # not on the command line: fallback to config and then maybe ask
888 # not on the command line: fallback to config and then maybe ask
886 addr = ui.config(b'email', configkey) or ui.config(
889 addr = ui.config(b'email', configkey) or ui.config(
887 b'patchbomb', configkey
890 b'patchbomb', configkey
888 )
891 )
889 if not addr:
892 if not addr:
890 specified = ui.hasconfig(b'email', configkey) or ui.hasconfig(
893 specified = ui.hasconfig(b'email', configkey) or ui.hasconfig(
891 b'patchbomb', configkey
894 b'patchbomb', configkey
892 )
895 )
893 if not specified and ask:
896 if not specified and ask:
894 addr = prompt(ui, header, default=default)
897 addr = prompt(ui, header, default=default)
895 if addr:
898 if addr:
896 showaddrs.append(b'%s: %s' % (header, addr))
899 showaddrs.append(b'%s: %s' % (header, addr))
897 return mail.addrlistencode(ui, [addr], _charsets, opts.get(b'test'))
900 return mail.addrlistencode(ui, [addr], _charsets, opts.get(b'test'))
898 elif default:
901 elif default:
899 return mail.addrlistencode(
902 return mail.addrlistencode(
900 ui, [default], _charsets, opts.get(b'test')
903 ui, [default], _charsets, opts.get(b'test')
901 )
904 )
902 return []
905 return []
903
906
904 to = getaddrs(b'To', ask=True)
907 to = getaddrs(b'To', ask=True)
905 if not to:
908 if not to:
906 # we can get here in non-interactive mode
909 # we can get here in non-interactive mode
907 raise error.Abort(_(b'no recipient addresses provided'))
910 raise error.Abort(_(b'no recipient addresses provided'))
908 cc = getaddrs(b'Cc', ask=True, default=b'')
911 cc = getaddrs(b'Cc', ask=True, default=b'')
909 bcc = getaddrs(b'Bcc')
912 bcc = getaddrs(b'Bcc')
910 replyto = getaddrs(b'Reply-To')
913 replyto = getaddrs(b'Reply-To')
911
914
912 confirm = ui.configbool(b'patchbomb', b'confirm')
915 confirm = ui.configbool(b'patchbomb', b'confirm')
913 confirm |= bool(opts.get(b'diffstat') or opts.get(b'confirm'))
916 confirm |= bool(opts.get(b'diffstat') or opts.get(b'confirm'))
914
917
915 if confirm:
918 if confirm:
916 ui.write(_(b'\nFinal summary:\n\n'), label=b'patchbomb.finalsummary')
919 ui.write(_(b'\nFinal summary:\n\n'), label=b'patchbomb.finalsummary')
917 ui.write((b'From: %s\n' % sender), label=b'patchbomb.from')
920 ui.write((b'From: %s\n' % sender), label=b'patchbomb.from')
918 for addr in showaddrs:
921 for addr in showaddrs:
919 ui.write(b'%s\n' % addr, label=b'patchbomb.to')
922 ui.write(b'%s\n' % addr, label=b'patchbomb.to')
920 for m, subj, ds in msgs:
923 for m, subj, ds in msgs:
921 ui.write((b'Subject: %s\n' % subj), label=b'patchbomb.subject')
924 ui.write((b'Subject: %s\n' % subj), label=b'patchbomb.subject')
922 if ds:
925 if ds:
923 ui.write(ds, label=b'patchbomb.diffstats')
926 ui.write(ds, label=b'patchbomb.diffstats')
924 ui.write(b'\n')
927 ui.write(b'\n')
925 if ui.promptchoice(
928 if ui.promptchoice(
926 _(b'are you sure you want to send (yn)?$$ &Yes $$ &No')
929 _(b'are you sure you want to send (yn)?$$ &Yes $$ &No')
927 ):
930 ):
928 raise error.Abort(_(b'patchbomb canceled'))
931 raise error.Abort(_(b'patchbomb canceled'))
929
932
930 ui.write(b'\n')
933 ui.write(b'\n')
931
934
932 parent = opts.get(b'in_reply_to') or None
935 parent = opts.get(b'in_reply_to') or None
933 # angle brackets may be omitted, they're not semantically part of the msg-id
936 # angle brackets may be omitted, they're not semantically part of the msg-id
934 if parent is not None:
937 if parent is not None:
935 parent = encoding.strfromlocal(parent)
938 parent = encoding.strfromlocal(parent)
936 if not parent.startswith('<'):
939 if not parent.startswith('<'):
937 parent = '<' + parent
940 parent = '<' + parent
938 if not parent.endswith('>'):
941 if not parent.endswith('>'):
939 parent += '>'
942 parent += '>'
940
943
941 sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
944 sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
942 sender = mail.addressencode(ui, sender, _charsets, opts.get(b'test'))
945 sender = mail.addressencode(ui, sender, _charsets, opts.get(b'test'))
943 sendmail = None
946 sendmail = None
944 firstpatch = None
947 firstpatch = None
945 progress = ui.makeprogress(
948 progress = ui.makeprogress(
946 _(b'sending'), unit=_(b'emails'), total=len(msgs)
949 _(b'sending'), unit=_(b'emails'), total=len(msgs)
947 )
950 )
948 for i, (m, subj, ds) in enumerate(msgs):
951 for i, (m, subj, ds) in enumerate(msgs):
949 try:
952 try:
950 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
953 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
951 if not firstpatch:
954 if not firstpatch:
952 firstpatch = m['Message-Id']
955 firstpatch = m['Message-Id']
953 m['X-Mercurial-Series-Id'] = firstpatch
956 m['X-Mercurial-Series-Id'] = firstpatch
954 except TypeError:
957 except TypeError:
955 m['Message-Id'] = genmsgid('patchbomb')
958 m['Message-Id'] = genmsgid('patchbomb')
956 if parent:
959 if parent:
957 m['In-Reply-To'] = parent
960 m['In-Reply-To'] = parent
958 m['References'] = parent
961 m['References'] = parent
959 if not parent or 'X-Mercurial-Node' not in m:
962 if not parent or 'X-Mercurial-Node' not in m:
960 parent = m['Message-Id']
963 parent = m['Message-Id']
961
964
962 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version().decode()
965 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version().decode()
963 m['Date'] = eutil.formatdate(start_time[0], localtime=True)
966 m['Date'] = eutil.formatdate(start_time[0], localtime=True)
964
967
965 start_time = (start_time[0] + 1, start_time[1])
968 start_time = (start_time[0] + 1, start_time[1])
966 m['From'] = sender
969 m['From'] = sender
967 m['To'] = ', '.join(to)
970 m['To'] = ', '.join(to)
968 if cc:
971 if cc:
969 m['Cc'] = ', '.join(cc)
972 m['Cc'] = ', '.join(cc)
970 if bcc:
973 if bcc:
971 m['Bcc'] = ', '.join(bcc)
974 m['Bcc'] = ', '.join(bcc)
972 if replyto:
975 if replyto:
973 m['Reply-To'] = ', '.join(replyto)
976 m['Reply-To'] = ', '.join(replyto)
974 if opts.get(b'test'):
977 if opts.get(b'test'):
975 ui.status(_(b'displaying '), subj, b' ...\n')
978 ui.status(_(b'displaying '), subj, b' ...\n')
976 ui.pager(b'email')
979 ui.pager(b'email')
977 generator = mail.Generator(ui, mangle_from_=False)
980 generator = mail.Generator(ui, mangle_from_=False)
978 try:
981 try:
979 generator.flatten(m, False)
982 generator.flatten(m, False)
980 ui.write(b'\n')
983 ui.write(b'\n')
981 except IOError as inst:
984 except IOError as inst:
982 if inst.errno != errno.EPIPE:
985 if inst.errno != errno.EPIPE:
983 raise
986 raise
984 else:
987 else:
985 if not sendmail:
988 if not sendmail:
986 sendmail = mail.connect(ui, mbox=mbox)
989 sendmail = mail.connect(ui, mbox=mbox)
987 ui.status(_(b'sending '), subj, b' ...\n')
990 ui.status(_(b'sending '), subj, b' ...\n')
988 progress.update(i, item=subj)
991 progress.update(i, item=subj)
989 if not mbox:
992 if not mbox:
990 # Exim does not remove the Bcc field
993 # Exim does not remove the Bcc field
991 del m['Bcc']
994 del m['Bcc']
992 fp = stringio()
995 fp = stringio()
993 generator = mail.Generator(fp, mangle_from_=False)
996 generator = mail.Generator(fp, mangle_from_=False)
994 generator.flatten(m, False)
997 generator.flatten(m, False)
995 alldests = to + bcc + cc
998 alldests = to + bcc + cc
996 sendmail(sender_addr, alldests, fp.getvalue())
999 sendmail(sender_addr, alldests, fp.getvalue())
997
1000
998 progress.complete()
1001 progress.complete()
@@ -1,2400 +1,2401 b''
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
14 A "phabstatus" view for :hg:`show` is also provided; it displays status
15 information of Phabricator differentials associated with unfinished
15 information of Phabricator differentials associated with unfinished
16 changesets.
16 changesets.
17
17
18 By default, Phabricator requires ``Test Plan`` which might prevent some
18 By default, Phabricator requires ``Test Plan`` which might prevent some
19 changeset from being sent. The requirement could be disabled by changing
19 changeset from being sent. The requirement could be disabled by changing
20 ``differential.require-test-plan-field`` config server side.
20 ``differential.require-test-plan-field`` config server side.
21
21
22 Config::
22 Config::
23
23
24 [phabricator]
24 [phabricator]
25 # Phabricator URL
25 # Phabricator URL
26 url = https://phab.example.com/
26 url = https://phab.example.com/
27
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
29 # callsign is "FOO".
30 callsign = FOO
30 callsign = FOO
31
31
32 # curl command to use. If not set (default), use builtin HTTP library to
32 # curl command to use. If not set (default), use builtin HTTP library to
33 # communicate. If set, use the specified curl command. This could be useful
33 # communicate. If set, use the specified curl command. This could be useful
34 # if you need to specify advanced options that is not easily supported by
34 # if you need to specify advanced options that is not easily supported by
35 # the internal library.
35 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
37
38 # retry failed command N time (default 0). Useful when using the extension
38 # retry failed command N time (default 0). Useful when using the extension
39 # over flakly connection.
39 # over flakly connection.
40 #
40 #
41 # We wait `retry.interval` between each retry, in seconds.
41 # We wait `retry.interval` between each retry, in seconds.
42 # (default 1 second).
42 # (default 1 second).
43 retry = 3
43 retry = 3
44 retry.interval = 10
44 retry.interval = 10
45
45
46 # the retry option can combine well with the http.timeout one.
46 # the retry option can combine well with the http.timeout one.
47 #
47 #
48 # For example to give up on http request after 20 seconds:
48 # For example to give up on http request after 20 seconds:
49 [http]
49 [http]
50 timeout=20
50 timeout=20
51
51
52 [auth]
52 [auth]
53 example.schemes = https
53 example.schemes = https
54 example.prefix = phab.example.com
54 example.prefix = phab.example.com
55
55
56 # API token. Get it from https://$HOST/conduit/login/
56 # API token. Get it from https://$HOST/conduit/login/
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
58 """
58 """
59
59
60 from __future__ import absolute_import
60 from __future__ import absolute_import
61
61
62 import base64
62 import base64
63 import contextlib
63 import contextlib
64 import hashlib
64 import hashlib
65 import itertools
65 import itertools
66 import json
66 import json
67 import mimetypes
67 import mimetypes
68 import operator
68 import operator
69 import re
69 import re
70 import time
70 import time
71
71
72 from mercurial.node import bin, nullid, short
72 from mercurial.node import bin, nullid, short
73 from mercurial.i18n import _
73 from mercurial.i18n import _
74 from mercurial.pycompat import getattr
74 from mercurial.pycompat import getattr
75 from mercurial.thirdparty import attr
75 from mercurial.thirdparty import attr
76 from mercurial import (
76 from mercurial import (
77 cmdutil,
77 cmdutil,
78 context,
78 context,
79 copies,
79 copies,
80 encoding,
80 encoding,
81 error,
81 error,
82 exthelper,
82 exthelper,
83 graphmod,
83 graphmod,
84 httpconnection as httpconnectionmod,
84 httpconnection as httpconnectionmod,
85 localrepo,
85 localrepo,
86 logcmdutil,
86 logcmdutil,
87 match,
87 match,
88 mdiff,
88 mdiff,
89 obsutil,
89 obsutil,
90 parser,
90 parser,
91 patch,
91 patch,
92 phases,
92 phases,
93 pycompat,
93 pycompat,
94 rewriteutil,
94 rewriteutil,
95 scmutil,
95 scmutil,
96 smartset,
96 smartset,
97 tags,
97 tags,
98 templatefilters,
98 templatefilters,
99 templateutil,
99 templateutil,
100 url as urlmod,
100 url as urlmod,
101 util,
101 util,
102 )
102 )
103 from mercurial.utils import (
103 from mercurial.utils import (
104 procutil,
104 procutil,
105 stringutil,
105 stringutil,
106 urlutil,
106 )
107 )
107 from . import show
108 from . import show
108
109
109
110
110 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
111 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
112 # be specifying the version(s) of Mercurial they are tested with, or
113 # be specifying the version(s) of Mercurial they are tested with, or
113 # leave the attribute unspecified.
114 # leave the attribute unspecified.
114 testedwith = b'ships-with-hg-core'
115 testedwith = b'ships-with-hg-core'
115
116
116 eh = exthelper.exthelper()
117 eh = exthelper.exthelper()
117
118
118 cmdtable = eh.cmdtable
119 cmdtable = eh.cmdtable
119 command = eh.command
120 command = eh.command
120 configtable = eh.configtable
121 configtable = eh.configtable
121 templatekeyword = eh.templatekeyword
122 templatekeyword = eh.templatekeyword
122 uisetup = eh.finaluisetup
123 uisetup = eh.finaluisetup
123
124
124 # developer config: phabricator.batchsize
125 # developer config: phabricator.batchsize
125 eh.configitem(
126 eh.configitem(
126 b'phabricator',
127 b'phabricator',
127 b'batchsize',
128 b'batchsize',
128 default=12,
129 default=12,
129 )
130 )
130 eh.configitem(
131 eh.configitem(
131 b'phabricator',
132 b'phabricator',
132 b'callsign',
133 b'callsign',
133 default=None,
134 default=None,
134 )
135 )
135 eh.configitem(
136 eh.configitem(
136 b'phabricator',
137 b'phabricator',
137 b'curlcmd',
138 b'curlcmd',
138 default=None,
139 default=None,
139 )
140 )
140 # developer config: phabricator.debug
141 # developer config: phabricator.debug
141 eh.configitem(
142 eh.configitem(
142 b'phabricator',
143 b'phabricator',
143 b'debug',
144 b'debug',
144 default=False,
145 default=False,
145 )
146 )
146 # developer config: phabricator.repophid
147 # developer config: phabricator.repophid
147 eh.configitem(
148 eh.configitem(
148 b'phabricator',
149 b'phabricator',
149 b'repophid',
150 b'repophid',
150 default=None,
151 default=None,
151 )
152 )
152 eh.configitem(
153 eh.configitem(
153 b'phabricator',
154 b'phabricator',
154 b'retry',
155 b'retry',
155 default=0,
156 default=0,
156 )
157 )
157 eh.configitem(
158 eh.configitem(
158 b'phabricator',
159 b'phabricator',
159 b'retry.interval',
160 b'retry.interval',
160 default=1,
161 default=1,
161 )
162 )
162 eh.configitem(
163 eh.configitem(
163 b'phabricator',
164 b'phabricator',
164 b'url',
165 b'url',
165 default=None,
166 default=None,
166 )
167 )
167 eh.configitem(
168 eh.configitem(
168 b'phabsend',
169 b'phabsend',
169 b'confirm',
170 b'confirm',
170 default=False,
171 default=False,
171 )
172 )
172 eh.configitem(
173 eh.configitem(
173 b'phabimport',
174 b'phabimport',
174 b'secret',
175 b'secret',
175 default=False,
176 default=False,
176 )
177 )
177 eh.configitem(
178 eh.configitem(
178 b'phabimport',
179 b'phabimport',
179 b'obsolete',
180 b'obsolete',
180 default=False,
181 default=False,
181 )
182 )
182
183
183 colortable = {
184 colortable = {
184 b'phabricator.action.created': b'green',
185 b'phabricator.action.created': b'green',
185 b'phabricator.action.skipped': b'magenta',
186 b'phabricator.action.skipped': b'magenta',
186 b'phabricator.action.updated': b'magenta',
187 b'phabricator.action.updated': b'magenta',
187 b'phabricator.drev': b'bold',
188 b'phabricator.drev': b'bold',
188 b'phabricator.status.abandoned': b'magenta dim',
189 b'phabricator.status.abandoned': b'magenta dim',
189 b'phabricator.status.accepted': b'green bold',
190 b'phabricator.status.accepted': b'green bold',
190 b'phabricator.status.closed': b'green',
191 b'phabricator.status.closed': b'green',
191 b'phabricator.status.needsreview': b'yellow',
192 b'phabricator.status.needsreview': b'yellow',
192 b'phabricator.status.needsrevision': b'red',
193 b'phabricator.status.needsrevision': b'red',
193 b'phabricator.status.changesplanned': b'red',
194 b'phabricator.status.changesplanned': b'red',
194 }
195 }
195
196
196 _VCR_FLAGS = [
197 _VCR_FLAGS = [
197 (
198 (
198 b'',
199 b'',
199 b'test-vcr',
200 b'test-vcr',
200 b'',
201 b'',
201 _(
202 _(
202 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
203 b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
203 b', otherwise will mock all http requests using the specified vcr file.'
204 b', otherwise will mock all http requests using the specified vcr file.'
204 b' (ADVANCED)'
205 b' (ADVANCED)'
205 ),
206 ),
206 ),
207 ),
207 ]
208 ]
208
209
209
210
210 @eh.wrapfunction(localrepo, "loadhgrc")
211 @eh.wrapfunction(localrepo, "loadhgrc")
211 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
212 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
212 """Load ``.arcconfig`` content into a ui instance on repository open."""
213 """Load ``.arcconfig`` content into a ui instance on repository open."""
213 result = False
214 result = False
214 arcconfig = {}
215 arcconfig = {}
215
216
216 try:
217 try:
217 # json.loads only accepts bytes from 3.6+
218 # json.loads only accepts bytes from 3.6+
218 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
219 # json.loads only returns unicode strings
220 # json.loads only returns unicode strings
220 arcconfig = pycompat.rapply(
221 arcconfig = pycompat.rapply(
221 lambda x: encoding.unitolocal(x)
222 lambda x: encoding.unitolocal(x)
222 if isinstance(x, pycompat.unicode)
223 if isinstance(x, pycompat.unicode)
223 else x,
224 else x,
224 pycompat.json_loads(rawparams),
225 pycompat.json_loads(rawparams),
225 )
226 )
226
227
227 result = True
228 result = True
228 except ValueError:
229 except ValueError:
229 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
230 ui.warn(_(b"invalid JSON in %s\n") % wdirvfs.join(b".arcconfig"))
230 except IOError:
231 except IOError:
231 pass
232 pass
232
233
233 cfg = util.sortdict()
234 cfg = util.sortdict()
234
235
235 if b"repository.callsign" in arcconfig:
236 if b"repository.callsign" in arcconfig:
236 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
237 cfg[(b"phabricator", b"callsign")] = arcconfig[b"repository.callsign"]
237
238
238 if b"phabricator.uri" in arcconfig:
239 if b"phabricator.uri" in arcconfig:
239 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
240 cfg[(b"phabricator", b"url")] = arcconfig[b"phabricator.uri"]
240
241
241 if cfg:
242 if cfg:
242 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
243 ui.applyconfig(cfg, source=wdirvfs.join(b".arcconfig"))
243
244
244 return (
245 return (
245 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
246 orig(ui, wdirvfs, hgvfs, requirements, *args, **opts) or result
246 ) # Load .hg/hgrc
247 ) # Load .hg/hgrc
247
248
248
249
249 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
250 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
250 fullflags = flags + _VCR_FLAGS
251 fullflags = flags + _VCR_FLAGS
251
252
252 def hgmatcher(r1, r2):
253 def hgmatcher(r1, r2):
253 if r1.uri != r2.uri or r1.method != r2.method:
254 if r1.uri != r2.uri or r1.method != r2.method:
254 return False
255 return False
255 r1params = util.urlreq.parseqs(r1.body)
256 r1params = util.urlreq.parseqs(r1.body)
256 r2params = util.urlreq.parseqs(r2.body)
257 r2params = util.urlreq.parseqs(r2.body)
257 for key in r1params:
258 for key in r1params:
258 if key not in r2params:
259 if key not in r2params:
259 return False
260 return False
260 value = r1params[key][0]
261 value = r1params[key][0]
261 # we want to compare json payloads without worrying about ordering
262 # we want to compare json payloads without worrying about ordering
262 if value.startswith(b'{') and value.endswith(b'}'):
263 if value.startswith(b'{') and value.endswith(b'}'):
263 r1json = pycompat.json_loads(value)
264 r1json = pycompat.json_loads(value)
264 r2json = pycompat.json_loads(r2params[key][0])
265 r2json = pycompat.json_loads(r2params[key][0])
265 if r1json != r2json:
266 if r1json != r2json:
266 return False
267 return False
267 elif r2params[key][0] != value:
268 elif r2params[key][0] != value:
268 return False
269 return False
269 return True
270 return True
270
271
271 def sanitiserequest(request):
272 def sanitiserequest(request):
272 request.body = re.sub(
273 request.body = re.sub(
273 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
274 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
274 )
275 )
275 return request
276 return request
276
277
277 def sanitiseresponse(response):
278 def sanitiseresponse(response):
278 if 'set-cookie' in response['headers']:
279 if 'set-cookie' in response['headers']:
279 del response['headers']['set-cookie']
280 del response['headers']['set-cookie']
280 return response
281 return response
281
282
282 def decorate(fn):
283 def decorate(fn):
283 def inner(*args, **kwargs):
284 def inner(*args, **kwargs):
284 vcr = kwargs.pop('test_vcr')
285 vcr = kwargs.pop('test_vcr')
285 if vcr:
286 if vcr:
286 cassette = pycompat.fsdecode(vcr)
287 cassette = pycompat.fsdecode(vcr)
287 import hgdemandimport
288 import hgdemandimport
288
289
289 with hgdemandimport.deactivated():
290 with hgdemandimport.deactivated():
290 import vcr as vcrmod
291 import vcr as vcrmod
291 import vcr.stubs as stubs
292 import vcr.stubs as stubs
292
293
293 vcr = vcrmod.VCR(
294 vcr = vcrmod.VCR(
294 serializer='json',
295 serializer='json',
295 before_record_request=sanitiserequest,
296 before_record_request=sanitiserequest,
296 before_record_response=sanitiseresponse,
297 before_record_response=sanitiseresponse,
297 custom_patches=[
298 custom_patches=[
298 (
299 (
299 urlmod,
300 urlmod,
300 'httpconnection',
301 'httpconnection',
301 stubs.VCRHTTPConnection,
302 stubs.VCRHTTPConnection,
302 ),
303 ),
303 (
304 (
304 urlmod,
305 urlmod,
305 'httpsconnection',
306 'httpsconnection',
306 stubs.VCRHTTPSConnection,
307 stubs.VCRHTTPSConnection,
307 ),
308 ),
308 ],
309 ],
309 )
310 )
310 vcr.register_matcher('hgmatcher', hgmatcher)
311 vcr.register_matcher('hgmatcher', hgmatcher)
311 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
312 with vcr.use_cassette(cassette, match_on=['hgmatcher']):
312 return fn(*args, **kwargs)
313 return fn(*args, **kwargs)
313 return fn(*args, **kwargs)
314 return fn(*args, **kwargs)
314
315
315 cmd = util.checksignature(inner, depth=2)
316 cmd = util.checksignature(inner, depth=2)
316 cmd.__name__ = fn.__name__
317 cmd.__name__ = fn.__name__
317 cmd.__doc__ = fn.__doc__
318 cmd.__doc__ = fn.__doc__
318
319
319 return command(
320 return command(
320 name,
321 name,
321 fullflags,
322 fullflags,
322 spec,
323 spec,
323 helpcategory=helpcategory,
324 helpcategory=helpcategory,
324 optionalrepo=optionalrepo,
325 optionalrepo=optionalrepo,
325 )(cmd)
326 )(cmd)
326
327
327 return decorate
328 return decorate
328
329
329
330
330 def _debug(ui, *msg, **opts):
331 def _debug(ui, *msg, **opts):
331 """write debug output for Phabricator if ``phabricator.debug`` is set
332 """write debug output for Phabricator if ``phabricator.debug`` is set
332
333
333 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
334 Specifically, this avoids dumping Conduit and HTTP auth chatter that is
334 printed with the --debug argument.
335 printed with the --debug argument.
335 """
336 """
336 if ui.configbool(b"phabricator", b"debug"):
337 if ui.configbool(b"phabricator", b"debug"):
337 flag = ui.debugflag
338 flag = ui.debugflag
338 try:
339 try:
339 ui.debugflag = True
340 ui.debugflag = True
340 ui.write(*msg, **opts)
341 ui.write(*msg, **opts)
341 finally:
342 finally:
342 ui.debugflag = flag
343 ui.debugflag = flag
343
344
344
345
345 def urlencodenested(params):
346 def urlencodenested(params):
346 """like urlencode, but works with nested parameters.
347 """like urlencode, but works with nested parameters.
347
348
348 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
349 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
349 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
350 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
350 urlencode. Note: the encoding is consistent with PHP's http_build_query.
351 urlencode. Note: the encoding is consistent with PHP's http_build_query.
351 """
352 """
352 flatparams = util.sortdict()
353 flatparams = util.sortdict()
353
354
354 def process(prefix, obj):
355 def process(prefix, obj):
355 if isinstance(obj, bool):
356 if isinstance(obj, bool):
356 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
357 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
357 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
358 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)]
358 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
359 items = {list: lister, dict: lambda x: x.items()}.get(type(obj))
359 if items is None:
360 if items is None:
360 flatparams[prefix] = obj
361 flatparams[prefix] = obj
361 else:
362 else:
362 for k, v in items(obj):
363 for k, v in items(obj):
363 if prefix:
364 if prefix:
364 process(b'%s[%s]' % (prefix, k), v)
365 process(b'%s[%s]' % (prefix, k), v)
365 else:
366 else:
366 process(k, v)
367 process(k, v)
367
368
368 process(b'', params)
369 process(b'', params)
369 return util.urlreq.urlencode(flatparams)
370 return urlutil.urlreq.urlencode(flatparams)
370
371
371
372
372 def readurltoken(ui):
373 def readurltoken(ui):
373 """return conduit url, token and make sure they exist
374 """return conduit url, token and make sure they exist
374
375
375 Currently read from [auth] config section. In the future, it might
376 Currently read from [auth] config section. In the future, it might
376 make sense to read from .arcconfig and .arcrc as well.
377 make sense to read from .arcconfig and .arcrc as well.
377 """
378 """
378 url = ui.config(b'phabricator', b'url')
379 url = ui.config(b'phabricator', b'url')
379 if not url:
380 if not url:
380 raise error.Abort(
381 raise error.Abort(
381 _(b'config %s.%s is required') % (b'phabricator', b'url')
382 _(b'config %s.%s is required') % (b'phabricator', b'url')
382 )
383 )
383
384
384 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
385 res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user)
385 token = None
386 token = None
386
387
387 if res:
388 if res:
388 group, auth = res
389 group, auth = res
389
390
390 ui.debug(b"using auth.%s.* for authentication\n" % group)
391 ui.debug(b"using auth.%s.* for authentication\n" % group)
391
392
392 token = auth.get(b'phabtoken')
393 token = auth.get(b'phabtoken')
393
394
394 if not token:
395 if not token:
395 raise error.Abort(
396 raise error.Abort(
396 _(b'Can\'t find conduit token associated to %s') % (url,)
397 _(b'Can\'t find conduit token associated to %s') % (url,)
397 )
398 )
398
399
399 return url, token
400 return url, token
400
401
401
402
402 def callconduit(ui, name, params):
403 def callconduit(ui, name, params):
403 """call Conduit API, params is a dict. return json.loads result, or None"""
404 """call Conduit API, params is a dict. return json.loads result, or None"""
404 host, token = readurltoken(ui)
405 host, token = readurltoken(ui)
405 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
406 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
406 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
407 ui.debug(b'Conduit Call: %s %s\n' % (url, pycompat.byterepr(params)))
407 params = params.copy()
408 params = params.copy()
408 params[b'__conduit__'] = {
409 params[b'__conduit__'] = {
409 b'token': token,
410 b'token': token,
410 }
411 }
411 rawdata = {
412 rawdata = {
412 b'params': templatefilters.json(params),
413 b'params': templatefilters.json(params),
413 b'output': b'json',
414 b'output': b'json',
414 b'__conduit__': 1,
415 b'__conduit__': 1,
415 }
416 }
416 data = urlencodenested(rawdata)
417 data = urlencodenested(rawdata)
417 curlcmd = ui.config(b'phabricator', b'curlcmd')
418 curlcmd = ui.config(b'phabricator', b'curlcmd')
418 if curlcmd:
419 if curlcmd:
419 sin, sout = procutil.popen2(
420 sin, sout = procutil.popen2(
420 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
421 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
421 )
422 )
422 sin.write(data)
423 sin.write(data)
423 sin.close()
424 sin.close()
424 body = sout.read()
425 body = sout.read()
425 else:
426 else:
426 urlopener = urlmod.opener(ui, authinfo)
427 urlopener = urlmod.opener(ui, authinfo)
427 request = util.urlreq.request(pycompat.strurl(url), data=data)
428 request = util.urlreq.request(pycompat.strurl(url), data=data)
428 max_try = ui.configint(b'phabricator', b'retry') + 1
429 max_try = ui.configint(b'phabricator', b'retry') + 1
429 timeout = ui.configwith(float, b'http', b'timeout')
430 timeout = ui.configwith(float, b'http', b'timeout')
430 for try_count in range(max_try):
431 for try_count in range(max_try):
431 try:
432 try:
432 with contextlib.closing(
433 with contextlib.closing(
433 urlopener.open(request, timeout=timeout)
434 urlopener.open(request, timeout=timeout)
434 ) as rsp:
435 ) as rsp:
435 body = rsp.read()
436 body = rsp.read()
436 break
437 break
437 except util.urlerr.urlerror as err:
438 except util.urlerr.urlerror as err:
438 if try_count == max_try - 1:
439 if try_count == max_try - 1:
439 raise
440 raise
440 ui.debug(
441 ui.debug(
441 b'Conduit Request failed (try %d/%d): %r\n'
442 b'Conduit Request failed (try %d/%d): %r\n'
442 % (try_count + 1, max_try, err)
443 % (try_count + 1, max_try, err)
443 )
444 )
444 # failing request might come from overloaded server
445 # failing request might come from overloaded server
445 retry_interval = ui.configint(b'phabricator', b'retry.interval')
446 retry_interval = ui.configint(b'phabricator', b'retry.interval')
446 time.sleep(retry_interval)
447 time.sleep(retry_interval)
447 ui.debug(b'Conduit Response: %s\n' % body)
448 ui.debug(b'Conduit Response: %s\n' % body)
448 parsed = pycompat.rapply(
449 parsed = pycompat.rapply(
449 lambda x: encoding.unitolocal(x)
450 lambda x: encoding.unitolocal(x)
450 if isinstance(x, pycompat.unicode)
451 if isinstance(x, pycompat.unicode)
451 else x,
452 else x,
452 # json.loads only accepts bytes from py3.6+
453 # json.loads only accepts bytes from py3.6+
453 pycompat.json_loads(encoding.unifromlocal(body)),
454 pycompat.json_loads(encoding.unifromlocal(body)),
454 )
455 )
455 if parsed.get(b'error_code'):
456 if parsed.get(b'error_code'):
456 msg = _(b'Conduit Error (%s): %s') % (
457 msg = _(b'Conduit Error (%s): %s') % (
457 parsed[b'error_code'],
458 parsed[b'error_code'],
458 parsed[b'error_info'],
459 parsed[b'error_info'],
459 )
460 )
460 raise error.Abort(msg)
461 raise error.Abort(msg)
461 return parsed[b'result']
462 return parsed[b'result']
462
463
463
464
464 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
465 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
465 def debugcallconduit(ui, repo, name):
466 def debugcallconduit(ui, repo, name):
466 """call Conduit API
467 """call Conduit API
467
468
468 Call parameters are read from stdin as a JSON blob. Result will be written
469 Call parameters are read from stdin as a JSON blob. Result will be written
469 to stdout as a JSON blob.
470 to stdout as a JSON blob.
470 """
471 """
471 # json.loads only accepts bytes from 3.6+
472 # json.loads only accepts bytes from 3.6+
472 rawparams = encoding.unifromlocal(ui.fin.read())
473 rawparams = encoding.unifromlocal(ui.fin.read())
473 # json.loads only returns unicode strings
474 # json.loads only returns unicode strings
474 params = pycompat.rapply(
475 params = pycompat.rapply(
475 lambda x: encoding.unitolocal(x)
476 lambda x: encoding.unitolocal(x)
476 if isinstance(x, pycompat.unicode)
477 if isinstance(x, pycompat.unicode)
477 else x,
478 else x,
478 pycompat.json_loads(rawparams),
479 pycompat.json_loads(rawparams),
479 )
480 )
480 # json.dumps only accepts unicode strings
481 # json.dumps only accepts unicode strings
481 result = pycompat.rapply(
482 result = pycompat.rapply(
482 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
483 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
483 callconduit(ui, name, params),
484 callconduit(ui, name, params),
484 )
485 )
485 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
486 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
486 ui.write(b'%s\n' % encoding.unitolocal(s))
487 ui.write(b'%s\n' % encoding.unitolocal(s))
487
488
488
489
489 def getrepophid(repo):
490 def getrepophid(repo):
490 """given callsign, return repository PHID or None"""
491 """given callsign, return repository PHID or None"""
491 # developer config: phabricator.repophid
492 # developer config: phabricator.repophid
492 repophid = repo.ui.config(b'phabricator', b'repophid')
493 repophid = repo.ui.config(b'phabricator', b'repophid')
493 if repophid:
494 if repophid:
494 return repophid
495 return repophid
495 callsign = repo.ui.config(b'phabricator', b'callsign')
496 callsign = repo.ui.config(b'phabricator', b'callsign')
496 if not callsign:
497 if not callsign:
497 return None
498 return None
498 query = callconduit(
499 query = callconduit(
499 repo.ui,
500 repo.ui,
500 b'diffusion.repository.search',
501 b'diffusion.repository.search',
501 {b'constraints': {b'callsigns': [callsign]}},
502 {b'constraints': {b'callsigns': [callsign]}},
502 )
503 )
503 if len(query[b'data']) == 0:
504 if len(query[b'data']) == 0:
504 return None
505 return None
505 repophid = query[b'data'][0][b'phid']
506 repophid = query[b'data'][0][b'phid']
506 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
507 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
507 return repophid
508 return repophid
508
509
509
510
510 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
511 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
511 _differentialrevisiondescre = re.compile(
512 _differentialrevisiondescre = re.compile(
512 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
513 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
513 )
514 )
514
515
515
516
516 def getoldnodedrevmap(repo, nodelist):
517 def getoldnodedrevmap(repo, nodelist):
517 """find previous nodes that has been sent to Phabricator
518 """find previous nodes that has been sent to Phabricator
518
519
519 return {node: (oldnode, Differential diff, Differential Revision ID)}
520 return {node: (oldnode, Differential diff, Differential Revision ID)}
520 for node in nodelist with known previous sent versions, or associated
521 for node in nodelist with known previous sent versions, or associated
521 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
522 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
522 be ``None``.
523 be ``None``.
523
524
524 Examines commit messages like "Differential Revision:" to get the
525 Examines commit messages like "Differential Revision:" to get the
525 association information.
526 association information.
526
527
527 If such commit message line is not found, examines all precursors and their
528 If such commit message line is not found, examines all precursors and their
528 tags. Tags with format like "D1234" are considered a match and the node
529 tags. Tags with format like "D1234" are considered a match and the node
529 with that tag, and the number after "D" (ex. 1234) will be returned.
530 with that tag, and the number after "D" (ex. 1234) will be returned.
530
531
531 The ``old node``, if not None, is guaranteed to be the last diff of
532 The ``old node``, if not None, is guaranteed to be the last diff of
532 corresponding Differential Revision, and exist in the repo.
533 corresponding Differential Revision, and exist in the repo.
533 """
534 """
534 unfi = repo.unfiltered()
535 unfi = repo.unfiltered()
535 has_node = unfi.changelog.index.has_node
536 has_node = unfi.changelog.index.has_node
536
537
537 result = {} # {node: (oldnode?, lastdiff?, drev)}
538 result = {} # {node: (oldnode?, lastdiff?, drev)}
538 # ordered for test stability when printing new -> old mapping below
539 # ordered for test stability when printing new -> old mapping below
539 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
540 toconfirm = util.sortdict() # {node: (force, {precnode}, drev)}
540 for node in nodelist:
541 for node in nodelist:
541 ctx = unfi[node]
542 ctx = unfi[node]
542 # For tags like "D123", put them into "toconfirm" to verify later
543 # For tags like "D123", put them into "toconfirm" to verify later
543 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
544 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
544 for n in precnodes:
545 for n in precnodes:
545 if has_node(n):
546 if has_node(n):
546 for tag in unfi.nodetags(n):
547 for tag in unfi.nodetags(n):
547 m = _differentialrevisiontagre.match(tag)
548 m = _differentialrevisiontagre.match(tag)
548 if m:
549 if m:
549 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
550 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
550 break
551 break
551 else:
552 else:
552 continue # move to next predecessor
553 continue # move to next predecessor
553 break # found a tag, stop
554 break # found a tag, stop
554 else:
555 else:
555 # Check commit message
556 # Check commit message
556 m = _differentialrevisiondescre.search(ctx.description())
557 m = _differentialrevisiondescre.search(ctx.description())
557 if m:
558 if m:
558 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
559 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
559
560
560 # Double check if tags are genuine by collecting all old nodes from
561 # Double check if tags are genuine by collecting all old nodes from
561 # Phabricator, and expect precursors overlap with it.
562 # Phabricator, and expect precursors overlap with it.
562 if toconfirm:
563 if toconfirm:
563 drevs = [drev for force, precs, drev in toconfirm.values()]
564 drevs = [drev for force, precs, drev in toconfirm.values()]
564 alldiffs = callconduit(
565 alldiffs = callconduit(
565 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
566 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
566 )
567 )
567
568
568 def getnodes(d, precset):
569 def getnodes(d, precset):
569 # Ignore other nodes that were combined into the Differential
570 # Ignore other nodes that were combined into the Differential
570 # that aren't predecessors of the current local node.
571 # that aren't predecessors of the current local node.
571 return [n for n in getlocalcommits(d) if n in precset]
572 return [n for n in getlocalcommits(d) if n in precset]
572
573
573 for newnode, (force, precset, drev) in toconfirm.items():
574 for newnode, (force, precset, drev) in toconfirm.items():
574 diffs = [
575 diffs = [
575 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
576 d for d in alldiffs.values() if int(d[b'revisionID']) == drev
576 ]
577 ]
577
578
578 # local predecessors known by Phabricator
579 # local predecessors known by Phabricator
579 phprecset = {n for d in diffs for n in getnodes(d, precset)}
580 phprecset = {n for d in diffs for n in getnodes(d, precset)}
580
581
581 # Ignore if precursors (Phabricator and local repo) do not overlap,
582 # Ignore if precursors (Phabricator and local repo) do not overlap,
582 # and force is not set (when commit message says nothing)
583 # and force is not set (when commit message says nothing)
583 if not force and not phprecset:
584 if not force and not phprecset:
584 tagname = b'D%d' % drev
585 tagname = b'D%d' % drev
585 tags.tag(
586 tags.tag(
586 repo,
587 repo,
587 tagname,
588 tagname,
588 nullid,
589 nullid,
589 message=None,
590 message=None,
590 user=None,
591 user=None,
591 date=None,
592 date=None,
592 local=True,
593 local=True,
593 )
594 )
594 unfi.ui.warn(
595 unfi.ui.warn(
595 _(
596 _(
596 b'D%d: local tag removed - does not match '
597 b'D%d: local tag removed - does not match '
597 b'Differential history\n'
598 b'Differential history\n'
598 )
599 )
599 % drev
600 % drev
600 )
601 )
601 continue
602 continue
602
603
603 # Find the last node using Phabricator metadata, and make sure it
604 # Find the last node using Phabricator metadata, and make sure it
604 # exists in the repo
605 # exists in the repo
605 oldnode = lastdiff = None
606 oldnode = lastdiff = None
606 if diffs:
607 if diffs:
607 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
608 lastdiff = max(diffs, key=lambda d: int(d[b'id']))
608 oldnodes = getnodes(lastdiff, precset)
609 oldnodes = getnodes(lastdiff, precset)
609
610
610 _debug(
611 _debug(
611 unfi.ui,
612 unfi.ui,
612 b"%s mapped to old nodes %s\n"
613 b"%s mapped to old nodes %s\n"
613 % (
614 % (
614 short(newnode),
615 short(newnode),
615 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
616 stringutil.pprint([short(n) for n in sorted(oldnodes)]),
616 ),
617 ),
617 )
618 )
618
619
619 # If this commit was the result of `hg fold` after submission,
620 # If this commit was the result of `hg fold` after submission,
620 # and now resubmitted with --fold, the easiest thing to do is
621 # and now resubmitted with --fold, the easiest thing to do is
621 # to leave the node clear. This only results in creating a new
622 # to leave the node clear. This only results in creating a new
622 # diff for the _same_ Differential Revision if this commit is
623 # diff for the _same_ Differential Revision if this commit is
623 # the first or last in the selected range. If we picked a node
624 # the first or last in the selected range. If we picked a node
624 # from the list instead, it would have to be the lowest if at
625 # from the list instead, it would have to be the lowest if at
625 # the beginning of the --fold range, or the highest at the end.
626 # the beginning of the --fold range, or the highest at the end.
626 # Otherwise, one or more of the nodes wouldn't be considered in
627 # Otherwise, one or more of the nodes wouldn't be considered in
627 # the diff, and the Differential wouldn't be properly updated.
628 # the diff, and the Differential wouldn't be properly updated.
628 # If this commit is the result of `hg split` in the same
629 # If this commit is the result of `hg split` in the same
629 # scenario, there is a single oldnode here (and multiple
630 # scenario, there is a single oldnode here (and multiple
630 # newnodes mapped to it). That makes it the same as the normal
631 # newnodes mapped to it). That makes it the same as the normal
631 # case, as the edges of the newnode range cleanly maps to one
632 # case, as the edges of the newnode range cleanly maps to one
632 # oldnode each.
633 # oldnode each.
633 if len(oldnodes) == 1:
634 if len(oldnodes) == 1:
634 oldnode = oldnodes[0]
635 oldnode = oldnodes[0]
635 if oldnode and not has_node(oldnode):
636 if oldnode and not has_node(oldnode):
636 oldnode = None
637 oldnode = None
637
638
638 result[newnode] = (oldnode, lastdiff, drev)
639 result[newnode] = (oldnode, lastdiff, drev)
639
640
640 return result
641 return result
641
642
642
643
643 def getdrevmap(repo, revs):
644 def getdrevmap(repo, revs):
644 """Return a dict mapping each rev in `revs` to their Differential Revision
645 """Return a dict mapping each rev in `revs` to their Differential Revision
645 ID or None.
646 ID or None.
646 """
647 """
647 result = {}
648 result = {}
648 for rev in revs:
649 for rev in revs:
649 result[rev] = None
650 result[rev] = None
650 ctx = repo[rev]
651 ctx = repo[rev]
651 # Check commit message
652 # Check commit message
652 m = _differentialrevisiondescre.search(ctx.description())
653 m = _differentialrevisiondescre.search(ctx.description())
653 if m:
654 if m:
654 result[rev] = int(m.group('id'))
655 result[rev] = int(m.group('id'))
655 continue
656 continue
656 # Check tags
657 # Check tags
657 for tag in repo.nodetags(ctx.node()):
658 for tag in repo.nodetags(ctx.node()):
658 m = _differentialrevisiontagre.match(tag)
659 m = _differentialrevisiontagre.match(tag)
659 if m:
660 if m:
660 result[rev] = int(m.group(1))
661 result[rev] = int(m.group(1))
661 break
662 break
662
663
663 return result
664 return result
664
665
665
666
666 def getdiff(basectx, ctx, diffopts):
667 def getdiff(basectx, ctx, diffopts):
667 """plain-text diff without header (user, commit message, etc)"""
668 """plain-text diff without header (user, commit message, etc)"""
668 output = util.stringio()
669 output = util.stringio()
669 for chunk, _label in patch.diffui(
670 for chunk, _label in patch.diffui(
670 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
671 ctx.repo(), basectx.p1().node(), ctx.node(), None, opts=diffopts
671 ):
672 ):
672 output.write(chunk)
673 output.write(chunk)
673 return output.getvalue()
674 return output.getvalue()
674
675
675
676
676 class DiffChangeType(object):
677 class DiffChangeType(object):
677 ADD = 1
678 ADD = 1
678 CHANGE = 2
679 CHANGE = 2
679 DELETE = 3
680 DELETE = 3
680 MOVE_AWAY = 4
681 MOVE_AWAY = 4
681 COPY_AWAY = 5
682 COPY_AWAY = 5
682 MOVE_HERE = 6
683 MOVE_HERE = 6
683 COPY_HERE = 7
684 COPY_HERE = 7
684 MULTICOPY = 8
685 MULTICOPY = 8
685
686
686
687
687 class DiffFileType(object):
688 class DiffFileType(object):
688 TEXT = 1
689 TEXT = 1
689 IMAGE = 2
690 IMAGE = 2
690 BINARY = 3
691 BINARY = 3
691
692
692
693
693 @attr.s
694 @attr.s
694 class phabhunk(dict):
695 class phabhunk(dict):
695 """Represents a Differential hunk, which is owned by a Differential change"""
696 """Represents a Differential hunk, which is owned by a Differential change"""
696
697
697 oldOffset = attr.ib(default=0) # camelcase-required
698 oldOffset = attr.ib(default=0) # camelcase-required
698 oldLength = attr.ib(default=0) # camelcase-required
699 oldLength = attr.ib(default=0) # camelcase-required
699 newOffset = attr.ib(default=0) # camelcase-required
700 newOffset = attr.ib(default=0) # camelcase-required
700 newLength = attr.ib(default=0) # camelcase-required
701 newLength = attr.ib(default=0) # camelcase-required
701 corpus = attr.ib(default='')
702 corpus = attr.ib(default='')
702 # These get added to the phabchange's equivalents
703 # These get added to the phabchange's equivalents
703 addLines = attr.ib(default=0) # camelcase-required
704 addLines = attr.ib(default=0) # camelcase-required
704 delLines = attr.ib(default=0) # camelcase-required
705 delLines = attr.ib(default=0) # camelcase-required
705
706
706
707
707 @attr.s
708 @attr.s
708 class phabchange(object):
709 class phabchange(object):
709 """Represents a Differential change, owns Differential hunks and owned by a
710 """Represents a Differential change, owns Differential hunks and owned by a
710 Differential diff. Each one represents one file in a diff.
711 Differential diff. Each one represents one file in a diff.
711 """
712 """
712
713
713 currentPath = attr.ib(default=None) # camelcase-required
714 currentPath = attr.ib(default=None) # camelcase-required
714 oldPath = attr.ib(default=None) # camelcase-required
715 oldPath = attr.ib(default=None) # camelcase-required
715 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
716 awayPaths = attr.ib(default=attr.Factory(list)) # camelcase-required
716 metadata = attr.ib(default=attr.Factory(dict))
717 metadata = attr.ib(default=attr.Factory(dict))
717 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
718 oldProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
718 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
719 newProperties = attr.ib(default=attr.Factory(dict)) # camelcase-required
719 type = attr.ib(default=DiffChangeType.CHANGE)
720 type = attr.ib(default=DiffChangeType.CHANGE)
720 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
721 fileType = attr.ib(default=DiffFileType.TEXT) # camelcase-required
721 commitHash = attr.ib(default=None) # camelcase-required
722 commitHash = attr.ib(default=None) # camelcase-required
722 addLines = attr.ib(default=0) # camelcase-required
723 addLines = attr.ib(default=0) # camelcase-required
723 delLines = attr.ib(default=0) # camelcase-required
724 delLines = attr.ib(default=0) # camelcase-required
724 hunks = attr.ib(default=attr.Factory(list))
725 hunks = attr.ib(default=attr.Factory(list))
725
726
726 def copynewmetadatatoold(self):
727 def copynewmetadatatoold(self):
727 for key in list(self.metadata.keys()):
728 for key in list(self.metadata.keys()):
728 newkey = key.replace(b'new:', b'old:')
729 newkey = key.replace(b'new:', b'old:')
729 self.metadata[newkey] = self.metadata[key]
730 self.metadata[newkey] = self.metadata[key]
730
731
731 def addoldmode(self, value):
732 def addoldmode(self, value):
732 self.oldProperties[b'unix:filemode'] = value
733 self.oldProperties[b'unix:filemode'] = value
733
734
734 def addnewmode(self, value):
735 def addnewmode(self, value):
735 self.newProperties[b'unix:filemode'] = value
736 self.newProperties[b'unix:filemode'] = value
736
737
737 def addhunk(self, hunk):
738 def addhunk(self, hunk):
738 if not isinstance(hunk, phabhunk):
739 if not isinstance(hunk, phabhunk):
739 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
740 raise error.Abort(b'phabchange.addhunk only takes phabhunks')
740 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
741 self.hunks.append(pycompat.byteskwargs(attr.asdict(hunk)))
741 # It's useful to include these stats since the Phab web UI shows them,
742 # It's useful to include these stats since the Phab web UI shows them,
742 # and uses them to estimate how large a change a Revision is. Also used
743 # and uses them to estimate how large a change a Revision is. Also used
743 # in email subjects for the [+++--] bit.
744 # in email subjects for the [+++--] bit.
744 self.addLines += hunk.addLines
745 self.addLines += hunk.addLines
745 self.delLines += hunk.delLines
746 self.delLines += hunk.delLines
746
747
747
748
748 @attr.s
749 @attr.s
749 class phabdiff(object):
750 class phabdiff(object):
750 """Represents a Differential diff, owns Differential changes. Corresponds
751 """Represents a Differential diff, owns Differential changes. Corresponds
751 to a commit.
752 to a commit.
752 """
753 """
753
754
754 # Doesn't seem to be any reason to send this (output of uname -n)
755 # Doesn't seem to be any reason to send this (output of uname -n)
755 sourceMachine = attr.ib(default=b'') # camelcase-required
756 sourceMachine = attr.ib(default=b'') # camelcase-required
756 sourcePath = attr.ib(default=b'/') # camelcase-required
757 sourcePath = attr.ib(default=b'/') # camelcase-required
757 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
758 sourceControlBaseRevision = attr.ib(default=b'0' * 40) # camelcase-required
758 sourceControlPath = attr.ib(default=b'/') # camelcase-required
759 sourceControlPath = attr.ib(default=b'/') # camelcase-required
759 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
760 sourceControlSystem = attr.ib(default=b'hg') # camelcase-required
760 branch = attr.ib(default=b'default')
761 branch = attr.ib(default=b'default')
761 bookmark = attr.ib(default=None)
762 bookmark = attr.ib(default=None)
762 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
763 creationMethod = attr.ib(default=b'phabsend') # camelcase-required
763 lintStatus = attr.ib(default=b'none') # camelcase-required
764 lintStatus = attr.ib(default=b'none') # camelcase-required
764 unitStatus = attr.ib(default=b'none') # camelcase-required
765 unitStatus = attr.ib(default=b'none') # camelcase-required
765 changes = attr.ib(default=attr.Factory(dict))
766 changes = attr.ib(default=attr.Factory(dict))
766 repositoryPHID = attr.ib(default=None) # camelcase-required
767 repositoryPHID = attr.ib(default=None) # camelcase-required
767
768
768 def addchange(self, change):
769 def addchange(self, change):
769 if not isinstance(change, phabchange):
770 if not isinstance(change, phabchange):
770 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
771 raise error.Abort(b'phabdiff.addchange only takes phabchanges')
771 self.changes[change.currentPath] = pycompat.byteskwargs(
772 self.changes[change.currentPath] = pycompat.byteskwargs(
772 attr.asdict(change)
773 attr.asdict(change)
773 )
774 )
774
775
775
776
776 def maketext(pchange, basectx, ctx, fname):
777 def maketext(pchange, basectx, ctx, fname):
777 """populate the phabchange for a text file"""
778 """populate the phabchange for a text file"""
778 repo = ctx.repo()
779 repo = ctx.repo()
779 fmatcher = match.exact([fname])
780 fmatcher = match.exact([fname])
780 diffopts = mdiff.diffopts(git=True, context=32767)
781 diffopts = mdiff.diffopts(git=True, context=32767)
781 _pfctx, _fctx, header, fhunks = next(
782 _pfctx, _fctx, header, fhunks = next(
782 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
783 patch.diffhunks(repo, basectx.p1(), ctx, fmatcher, opts=diffopts)
783 )
784 )
784
785
785 for fhunk in fhunks:
786 for fhunk in fhunks:
786 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
787 (oldOffset, oldLength, newOffset, newLength), lines = fhunk
787 corpus = b''.join(lines[1:])
788 corpus = b''.join(lines[1:])
788 shunk = list(header)
789 shunk = list(header)
789 shunk.extend(lines)
790 shunk.extend(lines)
790 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
791 _mf, _mt, addLines, delLines, _hb = patch.diffstatsum(
791 patch.diffstatdata(util.iterlines(shunk))
792 patch.diffstatdata(util.iterlines(shunk))
792 )
793 )
793 pchange.addhunk(
794 pchange.addhunk(
794 phabhunk(
795 phabhunk(
795 oldOffset,
796 oldOffset,
796 oldLength,
797 oldLength,
797 newOffset,
798 newOffset,
798 newLength,
799 newLength,
799 corpus,
800 corpus,
800 addLines,
801 addLines,
801 delLines,
802 delLines,
802 )
803 )
803 )
804 )
804
805
805
806
806 def uploadchunks(fctx, fphid):
807 def uploadchunks(fctx, fphid):
807 """upload large binary files as separate chunks.
808 """upload large binary files as separate chunks.
808 Phab requests chunking over 8MiB, and splits into 4MiB chunks
809 Phab requests chunking over 8MiB, and splits into 4MiB chunks
809 """
810 """
810 ui = fctx.repo().ui
811 ui = fctx.repo().ui
811 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
812 chunks = callconduit(ui, b'file.querychunks', {b'filePHID': fphid})
812 with ui.makeprogress(
813 with ui.makeprogress(
813 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
814 _(b'uploading file chunks'), unit=_(b'chunks'), total=len(chunks)
814 ) as progress:
815 ) as progress:
815 for chunk in chunks:
816 for chunk in chunks:
816 progress.increment()
817 progress.increment()
817 if chunk[b'complete']:
818 if chunk[b'complete']:
818 continue
819 continue
819 bstart = int(chunk[b'byteStart'])
820 bstart = int(chunk[b'byteStart'])
820 bend = int(chunk[b'byteEnd'])
821 bend = int(chunk[b'byteEnd'])
821 callconduit(
822 callconduit(
822 ui,
823 ui,
823 b'file.uploadchunk',
824 b'file.uploadchunk',
824 {
825 {
825 b'filePHID': fphid,
826 b'filePHID': fphid,
826 b'byteStart': bstart,
827 b'byteStart': bstart,
827 b'data': base64.b64encode(fctx.data()[bstart:bend]),
828 b'data': base64.b64encode(fctx.data()[bstart:bend]),
828 b'dataEncoding': b'base64',
829 b'dataEncoding': b'base64',
829 },
830 },
830 )
831 )
831
832
832
833
833 def uploadfile(fctx):
834 def uploadfile(fctx):
834 """upload binary files to Phabricator"""
835 """upload binary files to Phabricator"""
835 repo = fctx.repo()
836 repo = fctx.repo()
836 ui = repo.ui
837 ui = repo.ui
837 fname = fctx.path()
838 fname = fctx.path()
838 size = fctx.size()
839 size = fctx.size()
839 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
840 fhash = pycompat.bytestr(hashlib.sha256(fctx.data()).hexdigest())
840
841
841 # an allocate call is required first to see if an upload is even required
842 # an allocate call is required first to see if an upload is even required
842 # (Phab might already have it) and to determine if chunking is needed
843 # (Phab might already have it) and to determine if chunking is needed
843 allocateparams = {
844 allocateparams = {
844 b'name': fname,
845 b'name': fname,
845 b'contentLength': size,
846 b'contentLength': size,
846 b'contentHash': fhash,
847 b'contentHash': fhash,
847 }
848 }
848 filealloc = callconduit(ui, b'file.allocate', allocateparams)
849 filealloc = callconduit(ui, b'file.allocate', allocateparams)
849 fphid = filealloc[b'filePHID']
850 fphid = filealloc[b'filePHID']
850
851
851 if filealloc[b'upload']:
852 if filealloc[b'upload']:
852 ui.write(_(b'uploading %s\n') % bytes(fctx))
853 ui.write(_(b'uploading %s\n') % bytes(fctx))
853 if not fphid:
854 if not fphid:
854 uploadparams = {
855 uploadparams = {
855 b'name': fname,
856 b'name': fname,
856 b'data_base64': base64.b64encode(fctx.data()),
857 b'data_base64': base64.b64encode(fctx.data()),
857 }
858 }
858 fphid = callconduit(ui, b'file.upload', uploadparams)
859 fphid = callconduit(ui, b'file.upload', uploadparams)
859 else:
860 else:
860 uploadchunks(fctx, fphid)
861 uploadchunks(fctx, fphid)
861 else:
862 else:
862 ui.debug(b'server already has %s\n' % bytes(fctx))
863 ui.debug(b'server already has %s\n' % bytes(fctx))
863
864
864 if not fphid:
865 if not fphid:
865 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
866 raise error.Abort(b'Upload of %s failed.' % bytes(fctx))
866
867
867 return fphid
868 return fphid
868
869
869
870
870 def addoldbinary(pchange, oldfctx, fctx):
871 def addoldbinary(pchange, oldfctx, fctx):
871 """add the metadata for the previous version of a binary file to the
872 """add the metadata for the previous version of a binary file to the
872 phabchange for the new version
873 phabchange for the new version
873
874
874 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
875 ``oldfctx`` is the previous version of the file; ``fctx`` is the new
875 version of the file, or None if the file is being removed.
876 version of the file, or None if the file is being removed.
876 """
877 """
877 if not fctx or fctx.cmp(oldfctx):
878 if not fctx or fctx.cmp(oldfctx):
878 # Files differ, add the old one
879 # Files differ, add the old one
879 pchange.metadata[b'old:file:size'] = oldfctx.size()
880 pchange.metadata[b'old:file:size'] = oldfctx.size()
880 mimeguess, _enc = mimetypes.guess_type(
881 mimeguess, _enc = mimetypes.guess_type(
881 encoding.unifromlocal(oldfctx.path())
882 encoding.unifromlocal(oldfctx.path())
882 )
883 )
883 if mimeguess:
884 if mimeguess:
884 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
885 pchange.metadata[b'old:file:mime-type'] = pycompat.bytestr(
885 mimeguess
886 mimeguess
886 )
887 )
887 fphid = uploadfile(oldfctx)
888 fphid = uploadfile(oldfctx)
888 pchange.metadata[b'old:binary-phid'] = fphid
889 pchange.metadata[b'old:binary-phid'] = fphid
889 else:
890 else:
890 # If it's left as IMAGE/BINARY web UI might try to display it
891 # If it's left as IMAGE/BINARY web UI might try to display it
891 pchange.fileType = DiffFileType.TEXT
892 pchange.fileType = DiffFileType.TEXT
892 pchange.copynewmetadatatoold()
893 pchange.copynewmetadatatoold()
893
894
894
895
895 def makebinary(pchange, fctx):
896 def makebinary(pchange, fctx):
896 """populate the phabchange for a binary file"""
897 """populate the phabchange for a binary file"""
897 pchange.fileType = DiffFileType.BINARY
898 pchange.fileType = DiffFileType.BINARY
898 fphid = uploadfile(fctx)
899 fphid = uploadfile(fctx)
899 pchange.metadata[b'new:binary-phid'] = fphid
900 pchange.metadata[b'new:binary-phid'] = fphid
900 pchange.metadata[b'new:file:size'] = fctx.size()
901 pchange.metadata[b'new:file:size'] = fctx.size()
901 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
902 mimeguess, _enc = mimetypes.guess_type(encoding.unifromlocal(fctx.path()))
902 if mimeguess:
903 if mimeguess:
903 mimeguess = pycompat.bytestr(mimeguess)
904 mimeguess = pycompat.bytestr(mimeguess)
904 pchange.metadata[b'new:file:mime-type'] = mimeguess
905 pchange.metadata[b'new:file:mime-type'] = mimeguess
905 if mimeguess.startswith(b'image/'):
906 if mimeguess.startswith(b'image/'):
906 pchange.fileType = DiffFileType.IMAGE
907 pchange.fileType = DiffFileType.IMAGE
907
908
908
909
909 # Copied from mercurial/patch.py
910 # Copied from mercurial/patch.py
910 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
911 gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
911
912
912
913
913 def notutf8(fctx):
914 def notutf8(fctx):
914 """detect non-UTF-8 text files since Phabricator requires them to be marked
915 """detect non-UTF-8 text files since Phabricator requires them to be marked
915 as binary
916 as binary
916 """
917 """
917 try:
918 try:
918 fctx.data().decode('utf-8')
919 fctx.data().decode('utf-8')
919 return False
920 return False
920 except UnicodeDecodeError:
921 except UnicodeDecodeError:
921 fctx.repo().ui.write(
922 fctx.repo().ui.write(
922 _(b'file %s detected as non-UTF-8, marked as binary\n')
923 _(b'file %s detected as non-UTF-8, marked as binary\n')
923 % fctx.path()
924 % fctx.path()
924 )
925 )
925 return True
926 return True
926
927
927
928
928 def addremoved(pdiff, basectx, ctx, removed):
929 def addremoved(pdiff, basectx, ctx, removed):
929 """add removed files to the phabdiff. Shouldn't include moves"""
930 """add removed files to the phabdiff. Shouldn't include moves"""
930 for fname in removed:
931 for fname in removed:
931 pchange = phabchange(
932 pchange = phabchange(
932 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
933 currentPath=fname, oldPath=fname, type=DiffChangeType.DELETE
933 )
934 )
934 oldfctx = basectx.p1()[fname]
935 oldfctx = basectx.p1()[fname]
935 pchange.addoldmode(gitmode[oldfctx.flags()])
936 pchange.addoldmode(gitmode[oldfctx.flags()])
936 if not (oldfctx.isbinary() or notutf8(oldfctx)):
937 if not (oldfctx.isbinary() or notutf8(oldfctx)):
937 maketext(pchange, basectx, ctx, fname)
938 maketext(pchange, basectx, ctx, fname)
938
939
939 pdiff.addchange(pchange)
940 pdiff.addchange(pchange)
940
941
941
942
942 def addmodified(pdiff, basectx, ctx, modified):
943 def addmodified(pdiff, basectx, ctx, modified):
943 """add modified files to the phabdiff"""
944 """add modified files to the phabdiff"""
944 for fname in modified:
945 for fname in modified:
945 fctx = ctx[fname]
946 fctx = ctx[fname]
946 oldfctx = basectx.p1()[fname]
947 oldfctx = basectx.p1()[fname]
947 pchange = phabchange(currentPath=fname, oldPath=fname)
948 pchange = phabchange(currentPath=fname, oldPath=fname)
948 filemode = gitmode[fctx.flags()]
949 filemode = gitmode[fctx.flags()]
949 originalmode = gitmode[oldfctx.flags()]
950 originalmode = gitmode[oldfctx.flags()]
950 if filemode != originalmode:
951 if filemode != originalmode:
951 pchange.addoldmode(originalmode)
952 pchange.addoldmode(originalmode)
952 pchange.addnewmode(filemode)
953 pchange.addnewmode(filemode)
953
954
954 if (
955 if (
955 fctx.isbinary()
956 fctx.isbinary()
956 or notutf8(fctx)
957 or notutf8(fctx)
957 or oldfctx.isbinary()
958 or oldfctx.isbinary()
958 or notutf8(oldfctx)
959 or notutf8(oldfctx)
959 ):
960 ):
960 makebinary(pchange, fctx)
961 makebinary(pchange, fctx)
961 addoldbinary(pchange, oldfctx, fctx)
962 addoldbinary(pchange, oldfctx, fctx)
962 else:
963 else:
963 maketext(pchange, basectx, ctx, fname)
964 maketext(pchange, basectx, ctx, fname)
964
965
965 pdiff.addchange(pchange)
966 pdiff.addchange(pchange)
966
967
967
968
968 def addadded(pdiff, basectx, ctx, added, removed):
969 def addadded(pdiff, basectx, ctx, added, removed):
969 """add file adds to the phabdiff, both new files and copies/moves"""
970 """add file adds to the phabdiff, both new files and copies/moves"""
970 # Keep track of files that've been recorded as moved/copied, so if there are
971 # Keep track of files that've been recorded as moved/copied, so if there are
971 # additional copies we can mark them (moves get removed from removed)
972 # additional copies we can mark them (moves get removed from removed)
972 copiedchanges = {}
973 copiedchanges = {}
973 movedchanges = {}
974 movedchanges = {}
974
975
975 copy = {}
976 copy = {}
976 if basectx != ctx:
977 if basectx != ctx:
977 copy = copies.pathcopies(basectx.p1(), ctx)
978 copy = copies.pathcopies(basectx.p1(), ctx)
978
979
979 for fname in added:
980 for fname in added:
980 fctx = ctx[fname]
981 fctx = ctx[fname]
981 oldfctx = None
982 oldfctx = None
982 pchange = phabchange(currentPath=fname)
983 pchange = phabchange(currentPath=fname)
983
984
984 filemode = gitmode[fctx.flags()]
985 filemode = gitmode[fctx.flags()]
985
986
986 if copy:
987 if copy:
987 originalfname = copy.get(fname, fname)
988 originalfname = copy.get(fname, fname)
988 else:
989 else:
989 originalfname = fname
990 originalfname = fname
990 if fctx.renamed():
991 if fctx.renamed():
991 originalfname = fctx.renamed()[0]
992 originalfname = fctx.renamed()[0]
992
993
993 renamed = fname != originalfname
994 renamed = fname != originalfname
994
995
995 if renamed:
996 if renamed:
996 oldfctx = basectx.p1()[originalfname]
997 oldfctx = basectx.p1()[originalfname]
997 originalmode = gitmode[oldfctx.flags()]
998 originalmode = gitmode[oldfctx.flags()]
998 pchange.oldPath = originalfname
999 pchange.oldPath = originalfname
999
1000
1000 if originalfname in removed:
1001 if originalfname in removed:
1001 origpchange = phabchange(
1002 origpchange = phabchange(
1002 currentPath=originalfname,
1003 currentPath=originalfname,
1003 oldPath=originalfname,
1004 oldPath=originalfname,
1004 type=DiffChangeType.MOVE_AWAY,
1005 type=DiffChangeType.MOVE_AWAY,
1005 awayPaths=[fname],
1006 awayPaths=[fname],
1006 )
1007 )
1007 movedchanges[originalfname] = origpchange
1008 movedchanges[originalfname] = origpchange
1008 removed.remove(originalfname)
1009 removed.remove(originalfname)
1009 pchange.type = DiffChangeType.MOVE_HERE
1010 pchange.type = DiffChangeType.MOVE_HERE
1010 elif originalfname in movedchanges:
1011 elif originalfname in movedchanges:
1011 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1012 movedchanges[originalfname].type = DiffChangeType.MULTICOPY
1012 movedchanges[originalfname].awayPaths.append(fname)
1013 movedchanges[originalfname].awayPaths.append(fname)
1013 pchange.type = DiffChangeType.COPY_HERE
1014 pchange.type = DiffChangeType.COPY_HERE
1014 else: # pure copy
1015 else: # pure copy
1015 if originalfname not in copiedchanges:
1016 if originalfname not in copiedchanges:
1016 origpchange = phabchange(
1017 origpchange = phabchange(
1017 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1018 currentPath=originalfname, type=DiffChangeType.COPY_AWAY
1018 )
1019 )
1019 copiedchanges[originalfname] = origpchange
1020 copiedchanges[originalfname] = origpchange
1020 else:
1021 else:
1021 origpchange = copiedchanges[originalfname]
1022 origpchange = copiedchanges[originalfname]
1022 origpchange.awayPaths.append(fname)
1023 origpchange.awayPaths.append(fname)
1023 pchange.type = DiffChangeType.COPY_HERE
1024 pchange.type = DiffChangeType.COPY_HERE
1024
1025
1025 if filemode != originalmode:
1026 if filemode != originalmode:
1026 pchange.addoldmode(originalmode)
1027 pchange.addoldmode(originalmode)
1027 pchange.addnewmode(filemode)
1028 pchange.addnewmode(filemode)
1028 else: # Brand-new file
1029 else: # Brand-new file
1029 pchange.addnewmode(gitmode[fctx.flags()])
1030 pchange.addnewmode(gitmode[fctx.flags()])
1030 pchange.type = DiffChangeType.ADD
1031 pchange.type = DiffChangeType.ADD
1031
1032
1032 if (
1033 if (
1033 fctx.isbinary()
1034 fctx.isbinary()
1034 or notutf8(fctx)
1035 or notutf8(fctx)
1035 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1036 or (oldfctx and (oldfctx.isbinary() or notutf8(oldfctx)))
1036 ):
1037 ):
1037 makebinary(pchange, fctx)
1038 makebinary(pchange, fctx)
1038 if renamed:
1039 if renamed:
1039 addoldbinary(pchange, oldfctx, fctx)
1040 addoldbinary(pchange, oldfctx, fctx)
1040 else:
1041 else:
1041 maketext(pchange, basectx, ctx, fname)
1042 maketext(pchange, basectx, ctx, fname)
1042
1043
1043 pdiff.addchange(pchange)
1044 pdiff.addchange(pchange)
1044
1045
1045 for _path, copiedchange in copiedchanges.items():
1046 for _path, copiedchange in copiedchanges.items():
1046 pdiff.addchange(copiedchange)
1047 pdiff.addchange(copiedchange)
1047 for _path, movedchange in movedchanges.items():
1048 for _path, movedchange in movedchanges.items():
1048 pdiff.addchange(movedchange)
1049 pdiff.addchange(movedchange)
1049
1050
1050
1051
1051 def creatediff(basectx, ctx):
1052 def creatediff(basectx, ctx):
1052 """create a Differential Diff"""
1053 """create a Differential Diff"""
1053 repo = ctx.repo()
1054 repo = ctx.repo()
1054 repophid = getrepophid(repo)
1055 repophid = getrepophid(repo)
1055 # Create a "Differential Diff" via "differential.creatediff" API
1056 # Create a "Differential Diff" via "differential.creatediff" API
1056 pdiff = phabdiff(
1057 pdiff = phabdiff(
1057 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1058 sourceControlBaseRevision=b'%s' % basectx.p1().hex(),
1058 branch=b'%s' % ctx.branch(),
1059 branch=b'%s' % ctx.branch(),
1059 )
1060 )
1060 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1061 modified, added, removed, _d, _u, _i, _c = basectx.p1().status(ctx)
1061 # addadded will remove moved files from removed, so addremoved won't get
1062 # addadded will remove moved files from removed, so addremoved won't get
1062 # them
1063 # them
1063 addadded(pdiff, basectx, ctx, added, removed)
1064 addadded(pdiff, basectx, ctx, added, removed)
1064 addmodified(pdiff, basectx, ctx, modified)
1065 addmodified(pdiff, basectx, ctx, modified)
1065 addremoved(pdiff, basectx, ctx, removed)
1066 addremoved(pdiff, basectx, ctx, removed)
1066 if repophid:
1067 if repophid:
1067 pdiff.repositoryPHID = repophid
1068 pdiff.repositoryPHID = repophid
1068 diff = callconduit(
1069 diff = callconduit(
1069 repo.ui,
1070 repo.ui,
1070 b'differential.creatediff',
1071 b'differential.creatediff',
1071 pycompat.byteskwargs(attr.asdict(pdiff)),
1072 pycompat.byteskwargs(attr.asdict(pdiff)),
1072 )
1073 )
1073 if not diff:
1074 if not diff:
1074 if basectx != ctx:
1075 if basectx != ctx:
1075 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1076 msg = _(b'cannot create diff for %s::%s') % (basectx, ctx)
1076 else:
1077 else:
1077 msg = _(b'cannot create diff for %s') % ctx
1078 msg = _(b'cannot create diff for %s') % ctx
1078 raise error.Abort(msg)
1079 raise error.Abort(msg)
1079 return diff
1080 return diff
1080
1081
1081
1082
1082 def writediffproperties(ctxs, diff):
1083 def writediffproperties(ctxs, diff):
1083 """write metadata to diff so patches could be applied losslessly
1084 """write metadata to diff so patches could be applied losslessly
1084
1085
1085 ``ctxs`` is the list of commits that created the diff, in ascending order.
1086 ``ctxs`` is the list of commits that created the diff, in ascending order.
1086 The list is generally a single commit, but may be several when using
1087 The list is generally a single commit, but may be several when using
1087 ``phabsend --fold``.
1088 ``phabsend --fold``.
1088 """
1089 """
1089 # creatediff returns with a diffid but query returns with an id
1090 # creatediff returns with a diffid but query returns with an id
1090 diffid = diff.get(b'diffid', diff.get(b'id'))
1091 diffid = diff.get(b'diffid', diff.get(b'id'))
1091 basectx = ctxs[0]
1092 basectx = ctxs[0]
1092 tipctx = ctxs[-1]
1093 tipctx = ctxs[-1]
1093
1094
1094 params = {
1095 params = {
1095 b'diff_id': diffid,
1096 b'diff_id': diffid,
1096 b'name': b'hg:meta',
1097 b'name': b'hg:meta',
1097 b'data': templatefilters.json(
1098 b'data': templatefilters.json(
1098 {
1099 {
1099 b'user': tipctx.user(),
1100 b'user': tipctx.user(),
1100 b'date': b'%d %d' % tipctx.date(),
1101 b'date': b'%d %d' % tipctx.date(),
1101 b'branch': tipctx.branch(),
1102 b'branch': tipctx.branch(),
1102 b'node': tipctx.hex(),
1103 b'node': tipctx.hex(),
1103 b'parent': basectx.p1().hex(),
1104 b'parent': basectx.p1().hex(),
1104 }
1105 }
1105 ),
1106 ),
1106 }
1107 }
1107 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1108 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1108
1109
1109 commits = {}
1110 commits = {}
1110 for ctx in ctxs:
1111 for ctx in ctxs:
1111 commits[ctx.hex()] = {
1112 commits[ctx.hex()] = {
1112 b'author': stringutil.person(ctx.user()),
1113 b'author': stringutil.person(ctx.user()),
1113 b'authorEmail': stringutil.email(ctx.user()),
1114 b'authorEmail': stringutil.email(ctx.user()),
1114 b'time': int(ctx.date()[0]),
1115 b'time': int(ctx.date()[0]),
1115 b'commit': ctx.hex(),
1116 b'commit': ctx.hex(),
1116 b'parents': [ctx.p1().hex()],
1117 b'parents': [ctx.p1().hex()],
1117 b'branch': ctx.branch(),
1118 b'branch': ctx.branch(),
1118 }
1119 }
1119 params = {
1120 params = {
1120 b'diff_id': diffid,
1121 b'diff_id': diffid,
1121 b'name': b'local:commits',
1122 b'name': b'local:commits',
1122 b'data': templatefilters.json(commits),
1123 b'data': templatefilters.json(commits),
1123 }
1124 }
1124 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1125 callconduit(basectx.repo().ui, b'differential.setdiffproperty', params)
1125
1126
1126
1127
1127 def createdifferentialrevision(
1128 def createdifferentialrevision(
1128 ctxs,
1129 ctxs,
1129 revid=None,
1130 revid=None,
1130 parentrevphid=None,
1131 parentrevphid=None,
1131 oldbasenode=None,
1132 oldbasenode=None,
1132 oldnode=None,
1133 oldnode=None,
1133 olddiff=None,
1134 olddiff=None,
1134 actions=None,
1135 actions=None,
1135 comment=None,
1136 comment=None,
1136 ):
1137 ):
1137 """create or update a Differential Revision
1138 """create or update a Differential Revision
1138
1139
1139 If revid is None, create a new Differential Revision, otherwise update
1140 If revid is None, create a new Differential Revision, otherwise update
1140 revid. If parentrevphid is not None, set it as a dependency.
1141 revid. If parentrevphid is not None, set it as a dependency.
1141
1142
1142 If there is a single commit for the new Differential Revision, ``ctxs`` will
1143 If there is a single commit for the new Differential Revision, ``ctxs`` will
1143 be a list of that single context. Otherwise, it is a list that covers the
1144 be a list of that single context. Otherwise, it is a list that covers the
1144 range of changes for the differential, where ``ctxs[0]`` is the first change
1145 range of changes for the differential, where ``ctxs[0]`` is the first change
1145 to include and ``ctxs[-1]`` is the last.
1146 to include and ``ctxs[-1]`` is the last.
1146
1147
1147 If oldnode is not None, check if the patch content (without commit message
1148 If oldnode is not None, check if the patch content (without commit message
1148 and metadata) has changed before creating another diff. For a Revision with
1149 and metadata) has changed before creating another diff. For a Revision with
1149 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1150 a single commit, ``oldbasenode`` and ``oldnode`` have the same value. For a
1150 Revision covering multiple commits, ``oldbasenode`` corresponds to
1151 Revision covering multiple commits, ``oldbasenode`` corresponds to
1151 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1152 ``ctxs[0]`` the previous time this Revision was posted, and ``oldnode``
1152 corresponds to ``ctxs[-1]``.
1153 corresponds to ``ctxs[-1]``.
1153
1154
1154 If actions is not None, they will be appended to the transaction.
1155 If actions is not None, they will be appended to the transaction.
1155 """
1156 """
1156 ctx = ctxs[-1]
1157 ctx = ctxs[-1]
1157 basectx = ctxs[0]
1158 basectx = ctxs[0]
1158
1159
1159 repo = ctx.repo()
1160 repo = ctx.repo()
1160 if oldnode:
1161 if oldnode:
1161 diffopts = mdiff.diffopts(git=True, context=32767)
1162 diffopts = mdiff.diffopts(git=True, context=32767)
1162 unfi = repo.unfiltered()
1163 unfi = repo.unfiltered()
1163 oldctx = unfi[oldnode]
1164 oldctx = unfi[oldnode]
1164 oldbasectx = unfi[oldbasenode]
1165 oldbasectx = unfi[oldbasenode]
1165 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1166 neednewdiff = getdiff(basectx, ctx, diffopts) != getdiff(
1166 oldbasectx, oldctx, diffopts
1167 oldbasectx, oldctx, diffopts
1167 )
1168 )
1168 else:
1169 else:
1169 neednewdiff = True
1170 neednewdiff = True
1170
1171
1171 transactions = []
1172 transactions = []
1172 if neednewdiff:
1173 if neednewdiff:
1173 diff = creatediff(basectx, ctx)
1174 diff = creatediff(basectx, ctx)
1174 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1175 transactions.append({b'type': b'update', b'value': diff[b'phid']})
1175 if comment:
1176 if comment:
1176 transactions.append({b'type': b'comment', b'value': comment})
1177 transactions.append({b'type': b'comment', b'value': comment})
1177 else:
1178 else:
1178 # Even if we don't need to upload a new diff because the patch content
1179 # Even if we don't need to upload a new diff because the patch content
1179 # does not change. We might still need to update its metadata so
1180 # does not change. We might still need to update its metadata so
1180 # pushers could know the correct node metadata.
1181 # pushers could know the correct node metadata.
1181 assert olddiff
1182 assert olddiff
1182 diff = olddiff
1183 diff = olddiff
1183 writediffproperties(ctxs, diff)
1184 writediffproperties(ctxs, diff)
1184
1185
1185 # Set the parent Revision every time, so commit re-ordering is picked-up
1186 # Set the parent Revision every time, so commit re-ordering is picked-up
1186 if parentrevphid:
1187 if parentrevphid:
1187 transactions.append(
1188 transactions.append(
1188 {b'type': b'parents.set', b'value': [parentrevphid]}
1189 {b'type': b'parents.set', b'value': [parentrevphid]}
1189 )
1190 )
1190
1191
1191 if actions:
1192 if actions:
1192 transactions += actions
1193 transactions += actions
1193
1194
1194 # When folding multiple local commits into a single review, arcanist will
1195 # When folding multiple local commits into a single review, arcanist will
1195 # take the summary line of the first commit as the title, and then
1196 # take the summary line of the first commit as the title, and then
1196 # concatenate the rest of the remaining messages (including each of their
1197 # concatenate the rest of the remaining messages (including each of their
1197 # first lines) to the rest of the first commit message (each separated by
1198 # first lines) to the rest of the first commit message (each separated by
1198 # an empty line), and use that as the summary field. Do the same here.
1199 # an empty line), and use that as the summary field. Do the same here.
1199 # For commits with only a one line message, there is no summary field, as
1200 # For commits with only a one line message, there is no summary field, as
1200 # this gets assigned to the title.
1201 # this gets assigned to the title.
1201 fields = util.sortdict() # sorted for stable wire protocol in tests
1202 fields = util.sortdict() # sorted for stable wire protocol in tests
1202
1203
1203 for i, _ctx in enumerate(ctxs):
1204 for i, _ctx in enumerate(ctxs):
1204 # Parse commit message and update related fields.
1205 # Parse commit message and update related fields.
1205 desc = _ctx.description()
1206 desc = _ctx.description()
1206 info = callconduit(
1207 info = callconduit(
1207 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1208 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
1208 )
1209 )
1209
1210
1210 for k in [b'title', b'summary', b'testPlan']:
1211 for k in [b'title', b'summary', b'testPlan']:
1211 v = info[b'fields'].get(k)
1212 v = info[b'fields'].get(k)
1212 if not v:
1213 if not v:
1213 continue
1214 continue
1214
1215
1215 if i == 0:
1216 if i == 0:
1216 # Title, summary and test plan (if present) are taken verbatim
1217 # Title, summary and test plan (if present) are taken verbatim
1217 # for the first commit.
1218 # for the first commit.
1218 fields[k] = v.rstrip()
1219 fields[k] = v.rstrip()
1219 continue
1220 continue
1220 elif k == b'title':
1221 elif k == b'title':
1221 # Add subsequent titles (i.e. the first line of the commit
1222 # Add subsequent titles (i.e. the first line of the commit
1222 # message) back to the summary.
1223 # message) back to the summary.
1223 k = b'summary'
1224 k = b'summary'
1224
1225
1225 # Append any current field to the existing composite field
1226 # Append any current field to the existing composite field
1226 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1227 fields[k] = b'\n\n'.join(filter(None, [fields.get(k), v.rstrip()]))
1227
1228
1228 for k, v in fields.items():
1229 for k, v in fields.items():
1229 transactions.append({b'type': k, b'value': v})
1230 transactions.append({b'type': k, b'value': v})
1230
1231
1231 params = {b'transactions': transactions}
1232 params = {b'transactions': transactions}
1232 if revid is not None:
1233 if revid is not None:
1233 # Update an existing Differential Revision
1234 # Update an existing Differential Revision
1234 params[b'objectIdentifier'] = revid
1235 params[b'objectIdentifier'] = revid
1235
1236
1236 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1237 revision = callconduit(repo.ui, b'differential.revision.edit', params)
1237 if not revision:
1238 if not revision:
1238 if len(ctxs) == 1:
1239 if len(ctxs) == 1:
1239 msg = _(b'cannot create revision for %s') % ctx
1240 msg = _(b'cannot create revision for %s') % ctx
1240 else:
1241 else:
1241 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1242 msg = _(b'cannot create revision for %s::%s') % (basectx, ctx)
1242 raise error.Abort(msg)
1243 raise error.Abort(msg)
1243
1244
1244 return revision, diff
1245 return revision, diff
1245
1246
1246
1247
1247 def userphids(ui, names):
1248 def userphids(ui, names):
1248 """convert user names to PHIDs"""
1249 """convert user names to PHIDs"""
1249 names = [name.lower() for name in names]
1250 names = [name.lower() for name in names]
1250 query = {b'constraints': {b'usernames': names}}
1251 query = {b'constraints': {b'usernames': names}}
1251 result = callconduit(ui, b'user.search', query)
1252 result = callconduit(ui, b'user.search', query)
1252 # username not found is not an error of the API. So check if we have missed
1253 # username not found is not an error of the API. So check if we have missed
1253 # some names here.
1254 # some names here.
1254 data = result[b'data']
1255 data = result[b'data']
1255 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1256 resolved = {entry[b'fields'][b'username'].lower() for entry in data}
1256 unresolved = set(names) - resolved
1257 unresolved = set(names) - resolved
1257 if unresolved:
1258 if unresolved:
1258 raise error.Abort(
1259 raise error.Abort(
1259 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1260 _(b'unknown username: %s') % b' '.join(sorted(unresolved))
1260 )
1261 )
1261 return [entry[b'phid'] for entry in data]
1262 return [entry[b'phid'] for entry in data]
1262
1263
1263
1264
1264 def _print_phabsend_action(ui, ctx, newrevid, action):
1265 def _print_phabsend_action(ui, ctx, newrevid, action):
1265 """print the ``action`` that occurred when posting ``ctx`` for review
1266 """print the ``action`` that occurred when posting ``ctx`` for review
1266
1267
1267 This is a utility function for the sending phase of ``phabsend``, which
1268 This is a utility function for the sending phase of ``phabsend``, which
1268 makes it easier to show a status for all local commits with `--fold``.
1269 makes it easier to show a status for all local commits with `--fold``.
1269 """
1270 """
1270 actiondesc = ui.label(
1271 actiondesc = ui.label(
1271 {
1272 {
1272 b'created': _(b'created'),
1273 b'created': _(b'created'),
1273 b'skipped': _(b'skipped'),
1274 b'skipped': _(b'skipped'),
1274 b'updated': _(b'updated'),
1275 b'updated': _(b'updated'),
1275 }[action],
1276 }[action],
1276 b'phabricator.action.%s' % action,
1277 b'phabricator.action.%s' % action,
1277 )
1278 )
1278 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1279 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
1279 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1280 summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
1280 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1281 ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
1281
1282
1282
1283
1283 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1284 def _amend_diff_properties(unfi, drevid, newnodes, diff):
1284 """update the local commit list for the ``diff`` associated with ``drevid``
1285 """update the local commit list for the ``diff`` associated with ``drevid``
1285
1286
1286 This is a utility function for the amend phase of ``phabsend``, which
1287 This is a utility function for the amend phase of ``phabsend``, which
1287 converts failures to warning messages.
1288 converts failures to warning messages.
1288 """
1289 """
1289 _debug(
1290 _debug(
1290 unfi.ui,
1291 unfi.ui,
1291 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1292 b"new commits: %s\n" % stringutil.pprint([short(n) for n in newnodes]),
1292 )
1293 )
1293
1294
1294 try:
1295 try:
1295 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1296 writediffproperties([unfi[newnode] for newnode in newnodes], diff)
1296 except util.urlerr.urlerror:
1297 except util.urlerr.urlerror:
1297 # If it fails just warn and keep going, otherwise the DREV
1298 # If it fails just warn and keep going, otherwise the DREV
1298 # associations will be lost
1299 # associations will be lost
1299 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1300 unfi.ui.warnnoi18n(b'Failed to update metadata for D%d\n' % drevid)
1300
1301
1301
1302
1302 @vcrcommand(
1303 @vcrcommand(
1303 b'phabsend',
1304 b'phabsend',
1304 [
1305 [
1305 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1306 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
1306 (b'', b'amend', True, _(b'update commit messages')),
1307 (b'', b'amend', True, _(b'update commit messages')),
1307 (b'', b'reviewer', [], _(b'specify reviewers')),
1308 (b'', b'reviewer', [], _(b'specify reviewers')),
1308 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1309 (b'', b'blocker', [], _(b'specify blocking reviewers')),
1309 (
1310 (
1310 b'm',
1311 b'm',
1311 b'comment',
1312 b'comment',
1312 b'',
1313 b'',
1313 _(b'add a comment to Revisions with new/updated Diffs'),
1314 _(b'add a comment to Revisions with new/updated Diffs'),
1314 ),
1315 ),
1315 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1316 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
1316 (b'', b'fold', False, _(b'combine the revisions into one review')),
1317 (b'', b'fold', False, _(b'combine the revisions into one review')),
1317 ],
1318 ],
1318 _(b'REV [OPTIONS]'),
1319 _(b'REV [OPTIONS]'),
1319 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1320 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1320 )
1321 )
1321 def phabsend(ui, repo, *revs, **opts):
1322 def phabsend(ui, repo, *revs, **opts):
1322 """upload changesets to Phabricator
1323 """upload changesets to Phabricator
1323
1324
1324 If there are multiple revisions specified, they will be send as a stack
1325 If there are multiple revisions specified, they will be send as a stack
1325 with a linear dependencies relationship using the order specified by the
1326 with a linear dependencies relationship using the order specified by the
1326 revset.
1327 revset.
1327
1328
1328 For the first time uploading changesets, local tags will be created to
1329 For the first time uploading changesets, local tags will be created to
1329 maintain the association. After the first time, phabsend will check
1330 maintain the association. After the first time, phabsend will check
1330 obsstore and tags information so it can figure out whether to update an
1331 obsstore and tags information so it can figure out whether to update an
1331 existing Differential Revision, or create a new one.
1332 existing Differential Revision, or create a new one.
1332
1333
1333 If --amend is set, update commit messages so they have the
1334 If --amend is set, update commit messages so they have the
1334 ``Differential Revision`` URL, remove related tags. This is similar to what
1335 ``Differential Revision`` URL, remove related tags. This is similar to what
1335 arcanist will do, and is more desired in author-push workflows. Otherwise,
1336 arcanist will do, and is more desired in author-push workflows. Otherwise,
1336 use local tags to record the ``Differential Revision`` association.
1337 use local tags to record the ``Differential Revision`` association.
1337
1338
1338 The --confirm option lets you confirm changesets before sending them. You
1339 The --confirm option lets you confirm changesets before sending them. You
1339 can also add following to your configuration file to make it default
1340 can also add following to your configuration file to make it default
1340 behaviour::
1341 behaviour::
1341
1342
1342 [phabsend]
1343 [phabsend]
1343 confirm = true
1344 confirm = true
1344
1345
1345 By default, a separate review will be created for each commit that is
1346 By default, a separate review will be created for each commit that is
1346 selected, and will have the same parent/child relationship in Phabricator.
1347 selected, and will have the same parent/child relationship in Phabricator.
1347 If ``--fold`` is set, multiple commits are rolled up into a single review
1348 If ``--fold`` is set, multiple commits are rolled up into a single review
1348 as if diffed from the parent of the first revision to the last. The commit
1349 as if diffed from the parent of the first revision to the last. The commit
1349 messages are concatenated in the summary field on Phabricator.
1350 messages are concatenated in the summary field on Phabricator.
1350
1351
1351 phabsend will check obsstore and the above association to decide whether to
1352 phabsend will check obsstore and the above association to decide whether to
1352 update an existing Differential Revision, or create a new one.
1353 update an existing Differential Revision, or create a new one.
1353 """
1354 """
1354 opts = pycompat.byteskwargs(opts)
1355 opts = pycompat.byteskwargs(opts)
1355 revs = list(revs) + opts.get(b'rev', [])
1356 revs = list(revs) + opts.get(b'rev', [])
1356 revs = scmutil.revrange(repo, revs)
1357 revs = scmutil.revrange(repo, revs)
1357 revs.sort() # ascending order to preserve topological parent/child in phab
1358 revs.sort() # ascending order to preserve topological parent/child in phab
1358
1359
1359 if not revs:
1360 if not revs:
1360 raise error.Abort(_(b'phabsend requires at least one changeset'))
1361 raise error.Abort(_(b'phabsend requires at least one changeset'))
1361 if opts.get(b'amend'):
1362 if opts.get(b'amend'):
1362 cmdutil.checkunfinished(repo)
1363 cmdutil.checkunfinished(repo)
1363
1364
1364 ctxs = [repo[rev] for rev in revs]
1365 ctxs = [repo[rev] for rev in revs]
1365
1366
1366 if any(c for c in ctxs if c.obsolete()):
1367 if any(c for c in ctxs if c.obsolete()):
1367 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1368 raise error.Abort(_(b"obsolete commits cannot be posted for review"))
1368
1369
1369 # Ensure the local commits are an unbroken range. The semantics of the
1370 # Ensure the local commits are an unbroken range. The semantics of the
1370 # --fold option implies this, and the auto restacking of orphans requires
1371 # --fold option implies this, and the auto restacking of orphans requires
1371 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1372 # it. Otherwise A+C in A->B->C will cause B to be orphaned, and C' to
1372 # get A' as a parent.
1373 # get A' as a parent.
1373 def _fail_nonlinear_revs(revs, revtype):
1374 def _fail_nonlinear_revs(revs, revtype):
1374 badnodes = [repo[r].node() for r in revs]
1375 badnodes = [repo[r].node() for r in revs]
1375 raise error.Abort(
1376 raise error.Abort(
1376 _(b"cannot phabsend multiple %s revisions: %s")
1377 _(b"cannot phabsend multiple %s revisions: %s")
1377 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1378 % (revtype, scmutil.nodesummaries(repo, badnodes)),
1378 hint=_(b"the revisions must form a linear chain"),
1379 hint=_(b"the revisions must form a linear chain"),
1379 )
1380 )
1380
1381
1381 heads = repo.revs(b'heads(%ld)', revs)
1382 heads = repo.revs(b'heads(%ld)', revs)
1382 if len(heads) > 1:
1383 if len(heads) > 1:
1383 _fail_nonlinear_revs(heads, b"head")
1384 _fail_nonlinear_revs(heads, b"head")
1384
1385
1385 roots = repo.revs(b'roots(%ld)', revs)
1386 roots = repo.revs(b'roots(%ld)', revs)
1386 if len(roots) > 1:
1387 if len(roots) > 1:
1387 _fail_nonlinear_revs(roots, b"root")
1388 _fail_nonlinear_revs(roots, b"root")
1388
1389
1389 fold = opts.get(b'fold')
1390 fold = opts.get(b'fold')
1390 if fold:
1391 if fold:
1391 if len(revs) == 1:
1392 if len(revs) == 1:
1392 # TODO: just switch to --no-fold instead?
1393 # TODO: just switch to --no-fold instead?
1393 raise error.Abort(_(b"cannot fold a single revision"))
1394 raise error.Abort(_(b"cannot fold a single revision"))
1394
1395
1395 # There's no clear way to manage multiple commits with a Dxxx tag, so
1396 # There's no clear way to manage multiple commits with a Dxxx tag, so
1396 # require the amend option. (We could append "_nnn", but then it
1397 # require the amend option. (We could append "_nnn", but then it
1397 # becomes jumbled if earlier commits are added to an update.) It should
1398 # becomes jumbled if earlier commits are added to an update.) It should
1398 # lock the repo and ensure that the range is editable, but that would
1399 # lock the repo and ensure that the range is editable, but that would
1399 # make the code pretty convoluted. The default behavior of `arc` is to
1400 # make the code pretty convoluted. The default behavior of `arc` is to
1400 # create a new review anyway.
1401 # create a new review anyway.
1401 if not opts.get(b"amend"):
1402 if not opts.get(b"amend"):
1402 raise error.Abort(_(b"cannot fold with --no-amend"))
1403 raise error.Abort(_(b"cannot fold with --no-amend"))
1403
1404
1404 # It might be possible to bucketize the revisions by the DREV value, and
1405 # It might be possible to bucketize the revisions by the DREV value, and
1405 # iterate over those groups when posting, and then again when amending.
1406 # iterate over those groups when posting, and then again when amending.
1406 # But for simplicity, require all selected revisions to be for the same
1407 # But for simplicity, require all selected revisions to be for the same
1407 # DREV (if present). Adding local revisions to an existing DREV is
1408 # DREV (if present). Adding local revisions to an existing DREV is
1408 # acceptable.
1409 # acceptable.
1409 drevmatchers = [
1410 drevmatchers = [
1410 _differentialrevisiondescre.search(ctx.description())
1411 _differentialrevisiondescre.search(ctx.description())
1411 for ctx in ctxs
1412 for ctx in ctxs
1412 ]
1413 ]
1413 if len({m.group('url') for m in drevmatchers if m}) > 1:
1414 if len({m.group('url') for m in drevmatchers if m}) > 1:
1414 raise error.Abort(
1415 raise error.Abort(
1415 _(b"cannot fold revisions with different DREV values")
1416 _(b"cannot fold revisions with different DREV values")
1416 )
1417 )
1417
1418
1418 # {newnode: (oldnode, olddiff, olddrev}
1419 # {newnode: (oldnode, olddiff, olddrev}
1419 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1420 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
1420
1421
1421 confirm = ui.configbool(b'phabsend', b'confirm')
1422 confirm = ui.configbool(b'phabsend', b'confirm')
1422 confirm |= bool(opts.get(b'confirm'))
1423 confirm |= bool(opts.get(b'confirm'))
1423 if confirm:
1424 if confirm:
1424 confirmed = _confirmbeforesend(repo, revs, oldmap)
1425 confirmed = _confirmbeforesend(repo, revs, oldmap)
1425 if not confirmed:
1426 if not confirmed:
1426 raise error.Abort(_(b'phabsend cancelled'))
1427 raise error.Abort(_(b'phabsend cancelled'))
1427
1428
1428 actions = []
1429 actions = []
1429 reviewers = opts.get(b'reviewer', [])
1430 reviewers = opts.get(b'reviewer', [])
1430 blockers = opts.get(b'blocker', [])
1431 blockers = opts.get(b'blocker', [])
1431 phids = []
1432 phids = []
1432 if reviewers:
1433 if reviewers:
1433 phids.extend(userphids(repo.ui, reviewers))
1434 phids.extend(userphids(repo.ui, reviewers))
1434 if blockers:
1435 if blockers:
1435 phids.extend(
1436 phids.extend(
1436 map(
1437 map(
1437 lambda phid: b'blocking(%s)' % phid,
1438 lambda phid: b'blocking(%s)' % phid,
1438 userphids(repo.ui, blockers),
1439 userphids(repo.ui, blockers),
1439 )
1440 )
1440 )
1441 )
1441 if phids:
1442 if phids:
1442 actions.append({b'type': b'reviewers.add', b'value': phids})
1443 actions.append({b'type': b'reviewers.add', b'value': phids})
1443
1444
1444 drevids = [] # [int]
1445 drevids = [] # [int]
1445 diffmap = {} # {newnode: diff}
1446 diffmap = {} # {newnode: diff}
1446
1447
1447 # Send patches one by one so we know their Differential Revision PHIDs and
1448 # Send patches one by one so we know their Differential Revision PHIDs and
1448 # can provide dependency relationship
1449 # can provide dependency relationship
1449 lastrevphid = None
1450 lastrevphid = None
1450 for ctx in ctxs:
1451 for ctx in ctxs:
1451 if fold:
1452 if fold:
1452 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1453 ui.debug(b'sending rev %d::%d\n' % (ctx.rev(), ctxs[-1].rev()))
1453 else:
1454 else:
1454 ui.debug(b'sending rev %d\n' % ctx.rev())
1455 ui.debug(b'sending rev %d\n' % ctx.rev())
1455
1456
1456 # Get Differential Revision ID
1457 # Get Differential Revision ID
1457 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1458 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
1458 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1459 oldbasenode, oldbasediff, oldbaserevid = oldnode, olddiff, revid
1459
1460
1460 if fold:
1461 if fold:
1461 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1462 oldbasenode, oldbasediff, oldbaserevid = oldmap.get(
1462 ctxs[-1].node(), (None, None, None)
1463 ctxs[-1].node(), (None, None, None)
1463 )
1464 )
1464
1465
1465 if oldnode != ctx.node() or opts.get(b'amend'):
1466 if oldnode != ctx.node() or opts.get(b'amend'):
1466 # Create or update Differential Revision
1467 # Create or update Differential Revision
1467 revision, diff = createdifferentialrevision(
1468 revision, diff = createdifferentialrevision(
1468 ctxs if fold else [ctx],
1469 ctxs if fold else [ctx],
1469 revid,
1470 revid,
1470 lastrevphid,
1471 lastrevphid,
1471 oldbasenode,
1472 oldbasenode,
1472 oldnode,
1473 oldnode,
1473 olddiff,
1474 olddiff,
1474 actions,
1475 actions,
1475 opts.get(b'comment'),
1476 opts.get(b'comment'),
1476 )
1477 )
1477
1478
1478 if fold:
1479 if fold:
1479 for ctx in ctxs:
1480 for ctx in ctxs:
1480 diffmap[ctx.node()] = diff
1481 diffmap[ctx.node()] = diff
1481 else:
1482 else:
1482 diffmap[ctx.node()] = diff
1483 diffmap[ctx.node()] = diff
1483
1484
1484 newrevid = int(revision[b'object'][b'id'])
1485 newrevid = int(revision[b'object'][b'id'])
1485 newrevphid = revision[b'object'][b'phid']
1486 newrevphid = revision[b'object'][b'phid']
1486 if revid:
1487 if revid:
1487 action = b'updated'
1488 action = b'updated'
1488 else:
1489 else:
1489 action = b'created'
1490 action = b'created'
1490
1491
1491 # Create a local tag to note the association, if commit message
1492 # Create a local tag to note the association, if commit message
1492 # does not have it already
1493 # does not have it already
1493 if not fold:
1494 if not fold:
1494 m = _differentialrevisiondescre.search(ctx.description())
1495 m = _differentialrevisiondescre.search(ctx.description())
1495 if not m or int(m.group('id')) != newrevid:
1496 if not m or int(m.group('id')) != newrevid:
1496 tagname = b'D%d' % newrevid
1497 tagname = b'D%d' % newrevid
1497 tags.tag(
1498 tags.tag(
1498 repo,
1499 repo,
1499 tagname,
1500 tagname,
1500 ctx.node(),
1501 ctx.node(),
1501 message=None,
1502 message=None,
1502 user=None,
1503 user=None,
1503 date=None,
1504 date=None,
1504 local=True,
1505 local=True,
1505 )
1506 )
1506 else:
1507 else:
1507 # Nothing changed. But still set "newrevphid" so the next revision
1508 # Nothing changed. But still set "newrevphid" so the next revision
1508 # could depend on this one and "newrevid" for the summary line.
1509 # could depend on this one and "newrevid" for the summary line.
1509 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1510 newrevphid = querydrev(repo.ui, b'%d' % revid)[0][b'phid']
1510 newrevid = revid
1511 newrevid = revid
1511 action = b'skipped'
1512 action = b'skipped'
1512
1513
1513 drevids.append(newrevid)
1514 drevids.append(newrevid)
1514 lastrevphid = newrevphid
1515 lastrevphid = newrevphid
1515
1516
1516 if fold:
1517 if fold:
1517 for c in ctxs:
1518 for c in ctxs:
1518 if oldmap.get(c.node(), (None, None, None))[2]:
1519 if oldmap.get(c.node(), (None, None, None))[2]:
1519 action = b'updated'
1520 action = b'updated'
1520 else:
1521 else:
1521 action = b'created'
1522 action = b'created'
1522 _print_phabsend_action(ui, c, newrevid, action)
1523 _print_phabsend_action(ui, c, newrevid, action)
1523 break
1524 break
1524
1525
1525 _print_phabsend_action(ui, ctx, newrevid, action)
1526 _print_phabsend_action(ui, ctx, newrevid, action)
1526
1527
1527 # Update commit messages and remove tags
1528 # Update commit messages and remove tags
1528 if opts.get(b'amend'):
1529 if opts.get(b'amend'):
1529 unfi = repo.unfiltered()
1530 unfi = repo.unfiltered()
1530 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1531 drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
1531 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1532 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
1532 # Eagerly evaluate commits to restabilize before creating new
1533 # Eagerly evaluate commits to restabilize before creating new
1533 # commits. The selected revisions are excluded because they are
1534 # commits. The selected revisions are excluded because they are
1534 # automatically restacked as part of the submission process.
1535 # automatically restacked as part of the submission process.
1535 restack = [
1536 restack = [
1536 c
1537 c
1537 for c in repo.set(
1538 for c in repo.set(
1538 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1539 b"(%ld::) - (%ld) - unstable() - obsolete() - public()",
1539 revs,
1540 revs,
1540 revs,
1541 revs,
1541 )
1542 )
1542 ]
1543 ]
1543 wnode = unfi[b'.'].node()
1544 wnode = unfi[b'.'].node()
1544 mapping = {} # {oldnode: [newnode]}
1545 mapping = {} # {oldnode: [newnode]}
1545 newnodes = []
1546 newnodes = []
1546
1547
1547 drevid = drevids[0]
1548 drevid = drevids[0]
1548
1549
1549 for i, rev in enumerate(revs):
1550 for i, rev in enumerate(revs):
1550 old = unfi[rev]
1551 old = unfi[rev]
1551 if not fold:
1552 if not fold:
1552 drevid = drevids[i]
1553 drevid = drevids[i]
1553 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1554 drev = [d for d in drevs if int(d[b'id']) == drevid][0]
1554
1555
1555 newdesc = get_amended_desc(drev, old, fold)
1556 newdesc = get_amended_desc(drev, old, fold)
1556 # Make sure commit message contain "Differential Revision"
1557 # Make sure commit message contain "Differential Revision"
1557 if (
1558 if (
1558 old.description() != newdesc
1559 old.description() != newdesc
1559 or old.p1().node() in mapping
1560 or old.p1().node() in mapping
1560 or old.p2().node() in mapping
1561 or old.p2().node() in mapping
1561 ):
1562 ):
1562 if old.phase() == phases.public:
1563 if old.phase() == phases.public:
1563 ui.warn(
1564 ui.warn(
1564 _(b"warning: not updating public commit %s\n")
1565 _(b"warning: not updating public commit %s\n")
1565 % scmutil.formatchangeid(old)
1566 % scmutil.formatchangeid(old)
1566 )
1567 )
1567 continue
1568 continue
1568 parents = [
1569 parents = [
1569 mapping.get(old.p1().node(), (old.p1(),))[0],
1570 mapping.get(old.p1().node(), (old.p1(),))[0],
1570 mapping.get(old.p2().node(), (old.p2(),))[0],
1571 mapping.get(old.p2().node(), (old.p2(),))[0],
1571 ]
1572 ]
1572 newdesc = rewriteutil.update_hash_refs(
1573 newdesc = rewriteutil.update_hash_refs(
1573 repo,
1574 repo,
1574 newdesc,
1575 newdesc,
1575 mapping,
1576 mapping,
1576 )
1577 )
1577 new = context.metadataonlyctx(
1578 new = context.metadataonlyctx(
1578 repo,
1579 repo,
1579 old,
1580 old,
1580 parents=parents,
1581 parents=parents,
1581 text=newdesc,
1582 text=newdesc,
1582 user=old.user(),
1583 user=old.user(),
1583 date=old.date(),
1584 date=old.date(),
1584 extra=old.extra(),
1585 extra=old.extra(),
1585 )
1586 )
1586
1587
1587 newnode = new.commit()
1588 newnode = new.commit()
1588
1589
1589 mapping[old.node()] = [newnode]
1590 mapping[old.node()] = [newnode]
1590
1591
1591 if fold:
1592 if fold:
1592 # Defer updating the (single) Diff until all nodes are
1593 # Defer updating the (single) Diff until all nodes are
1593 # collected. No tags were created, so none need to be
1594 # collected. No tags were created, so none need to be
1594 # removed.
1595 # removed.
1595 newnodes.append(newnode)
1596 newnodes.append(newnode)
1596 continue
1597 continue
1597
1598
1598 _amend_diff_properties(
1599 _amend_diff_properties(
1599 unfi, drevid, [newnode], diffmap[old.node()]
1600 unfi, drevid, [newnode], diffmap[old.node()]
1600 )
1601 )
1601
1602
1602 # Remove local tags since it's no longer necessary
1603 # Remove local tags since it's no longer necessary
1603 tagname = b'D%d' % drevid
1604 tagname = b'D%d' % drevid
1604 if tagname in repo.tags():
1605 if tagname in repo.tags():
1605 tags.tag(
1606 tags.tag(
1606 repo,
1607 repo,
1607 tagname,
1608 tagname,
1608 nullid,
1609 nullid,
1609 message=None,
1610 message=None,
1610 user=None,
1611 user=None,
1611 date=None,
1612 date=None,
1612 local=True,
1613 local=True,
1613 )
1614 )
1614 elif fold:
1615 elif fold:
1615 # When folding multiple commits into one review with
1616 # When folding multiple commits into one review with
1616 # --fold, track even the commits that weren't amended, so
1617 # --fold, track even the commits that weren't amended, so
1617 # that their association isn't lost if the properties are
1618 # that their association isn't lost if the properties are
1618 # rewritten below.
1619 # rewritten below.
1619 newnodes.append(old.node())
1620 newnodes.append(old.node())
1620
1621
1621 # If the submitted commits are public, no amend takes place so
1622 # If the submitted commits are public, no amend takes place so
1622 # there are no newnodes and therefore no diff update to do.
1623 # there are no newnodes and therefore no diff update to do.
1623 if fold and newnodes:
1624 if fold and newnodes:
1624 diff = diffmap[old.node()]
1625 diff = diffmap[old.node()]
1625
1626
1626 # The diff object in diffmap doesn't have the local commits
1627 # The diff object in diffmap doesn't have the local commits
1627 # because that could be returned from differential.creatediff,
1628 # because that could be returned from differential.creatediff,
1628 # not differential.querydiffs. So use the queried diff (if
1629 # not differential.querydiffs. So use the queried diff (if
1629 # present), or force the amend (a new revision is being posted.)
1630 # present), or force the amend (a new revision is being posted.)
1630 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1631 if not olddiff or set(newnodes) != getlocalcommits(olddiff):
1631 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1632 _debug(ui, b"updating local commit list for D%d\n" % drevid)
1632 _amend_diff_properties(unfi, drevid, newnodes, diff)
1633 _amend_diff_properties(unfi, drevid, newnodes, diff)
1633 else:
1634 else:
1634 _debug(
1635 _debug(
1635 ui,
1636 ui,
1636 b"local commit list for D%d is already up-to-date\n"
1637 b"local commit list for D%d is already up-to-date\n"
1637 % drevid,
1638 % drevid,
1638 )
1639 )
1639 elif fold:
1640 elif fold:
1640 _debug(ui, b"no newnodes to update\n")
1641 _debug(ui, b"no newnodes to update\n")
1641
1642
1642 # Restack any children of first-time submissions that were orphaned
1643 # Restack any children of first-time submissions that were orphaned
1643 # in the process. The ctx won't report that it is an orphan until
1644 # in the process. The ctx won't report that it is an orphan until
1644 # the cleanup takes place below.
1645 # the cleanup takes place below.
1645 for old in restack:
1646 for old in restack:
1646 parents = [
1647 parents = [
1647 mapping.get(old.p1().node(), (old.p1(),))[0],
1648 mapping.get(old.p1().node(), (old.p1(),))[0],
1648 mapping.get(old.p2().node(), (old.p2(),))[0],
1649 mapping.get(old.p2().node(), (old.p2(),))[0],
1649 ]
1650 ]
1650 new = context.metadataonlyctx(
1651 new = context.metadataonlyctx(
1651 repo,
1652 repo,
1652 old,
1653 old,
1653 parents=parents,
1654 parents=parents,
1654 text=rewriteutil.update_hash_refs(
1655 text=rewriteutil.update_hash_refs(
1655 repo, old.description(), mapping
1656 repo, old.description(), mapping
1656 ),
1657 ),
1657 user=old.user(),
1658 user=old.user(),
1658 date=old.date(),
1659 date=old.date(),
1659 extra=old.extra(),
1660 extra=old.extra(),
1660 )
1661 )
1661
1662
1662 newnode = new.commit()
1663 newnode = new.commit()
1663
1664
1664 # Don't obsolete unselected descendants of nodes that have not
1665 # Don't obsolete unselected descendants of nodes that have not
1665 # been changed in this transaction- that results in an error.
1666 # been changed in this transaction- that results in an error.
1666 if newnode != old.node():
1667 if newnode != old.node():
1667 mapping[old.node()] = [newnode]
1668 mapping[old.node()] = [newnode]
1668 _debug(
1669 _debug(
1669 ui,
1670 ui,
1670 b"restabilizing %s as %s\n"
1671 b"restabilizing %s as %s\n"
1671 % (short(old.node()), short(newnode)),
1672 % (short(old.node()), short(newnode)),
1672 )
1673 )
1673 else:
1674 else:
1674 _debug(
1675 _debug(
1675 ui,
1676 ui,
1676 b"not restabilizing unchanged %s\n" % short(old.node()),
1677 b"not restabilizing unchanged %s\n" % short(old.node()),
1677 )
1678 )
1678
1679
1679 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1680 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
1680 if wnode in mapping:
1681 if wnode in mapping:
1681 unfi.setparents(mapping[wnode][0])
1682 unfi.setparents(mapping[wnode][0])
1682
1683
1683
1684
1684 # Map from "hg:meta" keys to header understood by "hg import". The order is
1685 # Map from "hg:meta" keys to header understood by "hg import". The order is
1685 # consistent with "hg export" output.
1686 # consistent with "hg export" output.
1686 _metanamemap = util.sortdict(
1687 _metanamemap = util.sortdict(
1687 [
1688 [
1688 (b'user', b'User'),
1689 (b'user', b'User'),
1689 (b'date', b'Date'),
1690 (b'date', b'Date'),
1690 (b'branch', b'Branch'),
1691 (b'branch', b'Branch'),
1691 (b'node', b'Node ID'),
1692 (b'node', b'Node ID'),
1692 (b'parent', b'Parent '),
1693 (b'parent', b'Parent '),
1693 ]
1694 ]
1694 )
1695 )
1695
1696
1696
1697
1697 def _confirmbeforesend(repo, revs, oldmap):
1698 def _confirmbeforesend(repo, revs, oldmap):
1698 url, token = readurltoken(repo.ui)
1699 url, token = readurltoken(repo.ui)
1699 ui = repo.ui
1700 ui = repo.ui
1700 for rev in revs:
1701 for rev in revs:
1701 ctx = repo[rev]
1702 ctx = repo[rev]
1702 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1703 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
1703 if drevid:
1704 if drevid:
1704 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1705 drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
1705 else:
1706 else:
1706 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1707 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
1707
1708
1708 ui.write(
1709 ui.write(
1709 _(b'%s - %s\n')
1710 _(b'%s - %s\n')
1710 % (
1711 % (
1711 drevdesc,
1712 drevdesc,
1712 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1713 cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
1713 )
1714 )
1714 )
1715 )
1715
1716
1716 if ui.promptchoice(
1717 if ui.promptchoice(
1717 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1718 _(b'Send the above changes to %s (Y/n)?$$ &Yes $$ &No') % url
1718 ):
1719 ):
1719 return False
1720 return False
1720
1721
1721 return True
1722 return True
1722
1723
1723
1724
1724 _knownstatusnames = {
1725 _knownstatusnames = {
1725 b'accepted',
1726 b'accepted',
1726 b'needsreview',
1727 b'needsreview',
1727 b'needsrevision',
1728 b'needsrevision',
1728 b'closed',
1729 b'closed',
1729 b'abandoned',
1730 b'abandoned',
1730 b'changesplanned',
1731 b'changesplanned',
1731 }
1732 }
1732
1733
1733
1734
1734 def _getstatusname(drev):
1735 def _getstatusname(drev):
1735 """get normalized status name from a Differential Revision"""
1736 """get normalized status name from a Differential Revision"""
1736 return drev[b'statusName'].replace(b' ', b'').lower()
1737 return drev[b'statusName'].replace(b' ', b'').lower()
1737
1738
1738
1739
1739 # Small language to specify differential revisions. Support symbols: (), :X,
1740 # Small language to specify differential revisions. Support symbols: (), :X,
1740 # +, and -.
1741 # +, and -.
1741
1742
1742 _elements = {
1743 _elements = {
1743 # token-type: binding-strength, primary, prefix, infix, suffix
1744 # token-type: binding-strength, primary, prefix, infix, suffix
1744 b'(': (12, None, (b'group', 1, b')'), None, None),
1745 b'(': (12, None, (b'group', 1, b')'), None, None),
1745 b':': (8, None, (b'ancestors', 8), None, None),
1746 b':': (8, None, (b'ancestors', 8), None, None),
1746 b'&': (5, None, None, (b'and_', 5), None),
1747 b'&': (5, None, None, (b'and_', 5), None),
1747 b'+': (4, None, None, (b'add', 4), None),
1748 b'+': (4, None, None, (b'add', 4), None),
1748 b'-': (4, None, None, (b'sub', 4), None),
1749 b'-': (4, None, None, (b'sub', 4), None),
1749 b')': (0, None, None, None, None),
1750 b')': (0, None, None, None, None),
1750 b'symbol': (0, b'symbol', None, None, None),
1751 b'symbol': (0, b'symbol', None, None, None),
1751 b'end': (0, None, None, None, None),
1752 b'end': (0, None, None, None, None),
1752 }
1753 }
1753
1754
1754
1755
1755 def _tokenize(text):
1756 def _tokenize(text):
1756 view = memoryview(text) # zero-copy slice
1757 view = memoryview(text) # zero-copy slice
1757 special = b'():+-& '
1758 special = b'():+-& '
1758 pos = 0
1759 pos = 0
1759 length = len(text)
1760 length = len(text)
1760 while pos < length:
1761 while pos < length:
1761 symbol = b''.join(
1762 symbol = b''.join(
1762 itertools.takewhile(
1763 itertools.takewhile(
1763 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1764 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
1764 )
1765 )
1765 )
1766 )
1766 if symbol:
1767 if symbol:
1767 yield (b'symbol', symbol, pos)
1768 yield (b'symbol', symbol, pos)
1768 pos += len(symbol)
1769 pos += len(symbol)
1769 else: # special char, ignore space
1770 else: # special char, ignore space
1770 if text[pos : pos + 1] != b' ':
1771 if text[pos : pos + 1] != b' ':
1771 yield (text[pos : pos + 1], None, pos)
1772 yield (text[pos : pos + 1], None, pos)
1772 pos += 1
1773 pos += 1
1773 yield (b'end', None, pos)
1774 yield (b'end', None, pos)
1774
1775
1775
1776
1776 def _parse(text):
1777 def _parse(text):
1777 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1778 tree, pos = parser.parser(_elements).parse(_tokenize(text))
1778 if pos != len(text):
1779 if pos != len(text):
1779 raise error.ParseError(b'invalid token', pos)
1780 raise error.ParseError(b'invalid token', pos)
1780 return tree
1781 return tree
1781
1782
1782
1783
1783 def _parsedrev(symbol):
1784 def _parsedrev(symbol):
1784 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1785 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
1785 if symbol.startswith(b'D') and symbol[1:].isdigit():
1786 if symbol.startswith(b'D') and symbol[1:].isdigit():
1786 return int(symbol[1:])
1787 return int(symbol[1:])
1787 if symbol.isdigit():
1788 if symbol.isdigit():
1788 return int(symbol)
1789 return int(symbol)
1789
1790
1790
1791
1791 def _prefetchdrevs(tree):
1792 def _prefetchdrevs(tree):
1792 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1793 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
1793 drevs = set()
1794 drevs = set()
1794 ancestordrevs = set()
1795 ancestordrevs = set()
1795 op = tree[0]
1796 op = tree[0]
1796 if op == b'symbol':
1797 if op == b'symbol':
1797 r = _parsedrev(tree[1])
1798 r = _parsedrev(tree[1])
1798 if r:
1799 if r:
1799 drevs.add(r)
1800 drevs.add(r)
1800 elif op == b'ancestors':
1801 elif op == b'ancestors':
1801 r, a = _prefetchdrevs(tree[1])
1802 r, a = _prefetchdrevs(tree[1])
1802 drevs.update(r)
1803 drevs.update(r)
1803 ancestordrevs.update(r)
1804 ancestordrevs.update(r)
1804 ancestordrevs.update(a)
1805 ancestordrevs.update(a)
1805 else:
1806 else:
1806 for t in tree[1:]:
1807 for t in tree[1:]:
1807 r, a = _prefetchdrevs(t)
1808 r, a = _prefetchdrevs(t)
1808 drevs.update(r)
1809 drevs.update(r)
1809 ancestordrevs.update(a)
1810 ancestordrevs.update(a)
1810 return drevs, ancestordrevs
1811 return drevs, ancestordrevs
1811
1812
1812
1813
1813 def querydrev(ui, spec):
1814 def querydrev(ui, spec):
1814 """return a list of "Differential Revision" dicts
1815 """return a list of "Differential Revision" dicts
1815
1816
1816 spec is a string using a simple query language, see docstring in phabread
1817 spec is a string using a simple query language, see docstring in phabread
1817 for details.
1818 for details.
1818
1819
1819 A "Differential Revision dict" looks like:
1820 A "Differential Revision dict" looks like:
1820
1821
1821 {
1822 {
1822 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1823 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
1823 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1824 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
1824 "auxiliary": {
1825 "auxiliary": {
1825 "phabricator:depends-on": [
1826 "phabricator:depends-on": [
1826 "PHID-DREV-gbapp366kutjebt7agcd"
1827 "PHID-DREV-gbapp366kutjebt7agcd"
1827 ]
1828 ]
1828 "phabricator:projects": [],
1829 "phabricator:projects": [],
1829 },
1830 },
1830 "branch": "default",
1831 "branch": "default",
1831 "ccs": [],
1832 "ccs": [],
1832 "commits": [],
1833 "commits": [],
1833 "dateCreated": "1499181406",
1834 "dateCreated": "1499181406",
1834 "dateModified": "1499182103",
1835 "dateModified": "1499182103",
1835 "diffs": [
1836 "diffs": [
1836 "3",
1837 "3",
1837 "4",
1838 "4",
1838 ],
1839 ],
1839 "hashes": [],
1840 "hashes": [],
1840 "id": "2",
1841 "id": "2",
1841 "lineCount": "2",
1842 "lineCount": "2",
1842 "phid": "PHID-DREV-672qvysjcczopag46qty",
1843 "phid": "PHID-DREV-672qvysjcczopag46qty",
1843 "properties": {},
1844 "properties": {},
1844 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1845 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
1845 "reviewers": [],
1846 "reviewers": [],
1846 "sourcePath": null
1847 "sourcePath": null
1847 "status": "0",
1848 "status": "0",
1848 "statusName": "Needs Review",
1849 "statusName": "Needs Review",
1849 "summary": "",
1850 "summary": "",
1850 "testPlan": "",
1851 "testPlan": "",
1851 "title": "example",
1852 "title": "example",
1852 "uri": "https://phab.example.com/D2",
1853 "uri": "https://phab.example.com/D2",
1853 }
1854 }
1854 """
1855 """
1855 # TODO: replace differential.query and differential.querydiffs with
1856 # TODO: replace differential.query and differential.querydiffs with
1856 # differential.diff.search because the former (and their output) are
1857 # differential.diff.search because the former (and their output) are
1857 # frozen, and planned to be deprecated and removed.
1858 # frozen, and planned to be deprecated and removed.
1858
1859
1859 def fetch(params):
1860 def fetch(params):
1860 """params -> single drev or None"""
1861 """params -> single drev or None"""
1861 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1862 key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
1862 if key in prefetched:
1863 if key in prefetched:
1863 return prefetched[key]
1864 return prefetched[key]
1864 drevs = callconduit(ui, b'differential.query', params)
1865 drevs = callconduit(ui, b'differential.query', params)
1865 # Fill prefetched with the result
1866 # Fill prefetched with the result
1866 for drev in drevs:
1867 for drev in drevs:
1867 prefetched[drev[b'phid']] = drev
1868 prefetched[drev[b'phid']] = drev
1868 prefetched[int(drev[b'id'])] = drev
1869 prefetched[int(drev[b'id'])] = drev
1869 if key not in prefetched:
1870 if key not in prefetched:
1870 raise error.Abort(
1871 raise error.Abort(
1871 _(b'cannot get Differential Revision %r') % params
1872 _(b'cannot get Differential Revision %r') % params
1872 )
1873 )
1873 return prefetched[key]
1874 return prefetched[key]
1874
1875
1875 def getstack(topdrevids):
1876 def getstack(topdrevids):
1876 """given a top, get a stack from the bottom, [id] -> [id]"""
1877 """given a top, get a stack from the bottom, [id] -> [id]"""
1877 visited = set()
1878 visited = set()
1878 result = []
1879 result = []
1879 queue = [{b'ids': [i]} for i in topdrevids]
1880 queue = [{b'ids': [i]} for i in topdrevids]
1880 while queue:
1881 while queue:
1881 params = queue.pop()
1882 params = queue.pop()
1882 drev = fetch(params)
1883 drev = fetch(params)
1883 if drev[b'id'] in visited:
1884 if drev[b'id'] in visited:
1884 continue
1885 continue
1885 visited.add(drev[b'id'])
1886 visited.add(drev[b'id'])
1886 result.append(int(drev[b'id']))
1887 result.append(int(drev[b'id']))
1887 auxiliary = drev.get(b'auxiliary', {})
1888 auxiliary = drev.get(b'auxiliary', {})
1888 depends = auxiliary.get(b'phabricator:depends-on', [])
1889 depends = auxiliary.get(b'phabricator:depends-on', [])
1889 for phid in depends:
1890 for phid in depends:
1890 queue.append({b'phids': [phid]})
1891 queue.append({b'phids': [phid]})
1891 result.reverse()
1892 result.reverse()
1892 return smartset.baseset(result)
1893 return smartset.baseset(result)
1893
1894
1894 # Initialize prefetch cache
1895 # Initialize prefetch cache
1895 prefetched = {} # {id or phid: drev}
1896 prefetched = {} # {id or phid: drev}
1896
1897
1897 tree = _parse(spec)
1898 tree = _parse(spec)
1898 drevs, ancestordrevs = _prefetchdrevs(tree)
1899 drevs, ancestordrevs = _prefetchdrevs(tree)
1899
1900
1900 # developer config: phabricator.batchsize
1901 # developer config: phabricator.batchsize
1901 batchsize = ui.configint(b'phabricator', b'batchsize')
1902 batchsize = ui.configint(b'phabricator', b'batchsize')
1902
1903
1903 # Prefetch Differential Revisions in batch
1904 # Prefetch Differential Revisions in batch
1904 tofetch = set(drevs)
1905 tofetch = set(drevs)
1905 for r in ancestordrevs:
1906 for r in ancestordrevs:
1906 tofetch.update(range(max(1, r - batchsize), r + 1))
1907 tofetch.update(range(max(1, r - batchsize), r + 1))
1907 if drevs:
1908 if drevs:
1908 fetch({b'ids': list(tofetch)})
1909 fetch({b'ids': list(tofetch)})
1909 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1910 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
1910
1911
1911 # Walk through the tree, return smartsets
1912 # Walk through the tree, return smartsets
1912 def walk(tree):
1913 def walk(tree):
1913 op = tree[0]
1914 op = tree[0]
1914 if op == b'symbol':
1915 if op == b'symbol':
1915 drev = _parsedrev(tree[1])
1916 drev = _parsedrev(tree[1])
1916 if drev:
1917 if drev:
1917 return smartset.baseset([drev])
1918 return smartset.baseset([drev])
1918 elif tree[1] in _knownstatusnames:
1919 elif tree[1] in _knownstatusnames:
1919 drevs = [
1920 drevs = [
1920 r
1921 r
1921 for r in validids
1922 for r in validids
1922 if _getstatusname(prefetched[r]) == tree[1]
1923 if _getstatusname(prefetched[r]) == tree[1]
1923 ]
1924 ]
1924 return smartset.baseset(drevs)
1925 return smartset.baseset(drevs)
1925 else:
1926 else:
1926 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1927 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
1927 elif op in {b'and_', b'add', b'sub'}:
1928 elif op in {b'and_', b'add', b'sub'}:
1928 assert len(tree) == 3
1929 assert len(tree) == 3
1929 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1930 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
1930 elif op == b'group':
1931 elif op == b'group':
1931 return walk(tree[1])
1932 return walk(tree[1])
1932 elif op == b'ancestors':
1933 elif op == b'ancestors':
1933 return getstack(walk(tree[1]))
1934 return getstack(walk(tree[1]))
1934 else:
1935 else:
1935 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1936 raise error.ProgrammingError(b'illegal tree: %r' % tree)
1936
1937
1937 return [prefetched[r] for r in walk(tree)]
1938 return [prefetched[r] for r in walk(tree)]
1938
1939
1939
1940
1940 def getdescfromdrev(drev):
1941 def getdescfromdrev(drev):
1941 """get description (commit message) from "Differential Revision"
1942 """get description (commit message) from "Differential Revision"
1942
1943
1943 This is similar to differential.getcommitmessage API. But we only care
1944 This is similar to differential.getcommitmessage API. But we only care
1944 about limited fields: title, summary, test plan, and URL.
1945 about limited fields: title, summary, test plan, and URL.
1945 """
1946 """
1946 title = drev[b'title']
1947 title = drev[b'title']
1947 summary = drev[b'summary'].rstrip()
1948 summary = drev[b'summary'].rstrip()
1948 testplan = drev[b'testPlan'].rstrip()
1949 testplan = drev[b'testPlan'].rstrip()
1949 if testplan:
1950 if testplan:
1950 testplan = b'Test Plan:\n%s' % testplan
1951 testplan = b'Test Plan:\n%s' % testplan
1951 uri = b'Differential Revision: %s' % drev[b'uri']
1952 uri = b'Differential Revision: %s' % drev[b'uri']
1952 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1953 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
1953
1954
1954
1955
1955 def get_amended_desc(drev, ctx, folded):
1956 def get_amended_desc(drev, ctx, folded):
1956 """similar to ``getdescfromdrev``, but supports a folded series of commits
1957 """similar to ``getdescfromdrev``, but supports a folded series of commits
1957
1958
1958 This is used when determining if an individual commit needs to have its
1959 This is used when determining if an individual commit needs to have its
1959 message amended after posting it for review. The determination is made for
1960 message amended after posting it for review. The determination is made for
1960 each individual commit, even when they were folded into one review.
1961 each individual commit, even when they were folded into one review.
1961 """
1962 """
1962 if not folded:
1963 if not folded:
1963 return getdescfromdrev(drev)
1964 return getdescfromdrev(drev)
1964
1965
1965 uri = b'Differential Revision: %s' % drev[b'uri']
1966 uri = b'Differential Revision: %s' % drev[b'uri']
1966
1967
1967 # Since the commit messages were combined when posting multiple commits
1968 # Since the commit messages were combined when posting multiple commits
1968 # with --fold, the fields can't be read from Phabricator here, or *all*
1969 # with --fold, the fields can't be read from Phabricator here, or *all*
1969 # affected local revisions will end up with the same commit message after
1970 # affected local revisions will end up with the same commit message after
1970 # the URI is amended in. Append in the DREV line, or update it if it
1971 # the URI is amended in. Append in the DREV line, or update it if it
1971 # exists. At worst, this means commit message or test plan updates on
1972 # exists. At worst, this means commit message or test plan updates on
1972 # Phabricator aren't propagated back to the repository, but that seems
1973 # Phabricator aren't propagated back to the repository, but that seems
1973 # reasonable for the case where local commits are effectively combined
1974 # reasonable for the case where local commits are effectively combined
1974 # in Phabricator.
1975 # in Phabricator.
1975 m = _differentialrevisiondescre.search(ctx.description())
1976 m = _differentialrevisiondescre.search(ctx.description())
1976 if not m:
1977 if not m:
1977 return b'\n\n'.join([ctx.description(), uri])
1978 return b'\n\n'.join([ctx.description(), uri])
1978
1979
1979 return _differentialrevisiondescre.sub(uri, ctx.description())
1980 return _differentialrevisiondescre.sub(uri, ctx.description())
1980
1981
1981
1982
1982 def getlocalcommits(diff):
1983 def getlocalcommits(diff):
1983 """get the set of local commits from a diff object
1984 """get the set of local commits from a diff object
1984
1985
1985 See ``getdiffmeta()`` for an example diff object.
1986 See ``getdiffmeta()`` for an example diff object.
1986 """
1987 """
1987 props = diff.get(b'properties') or {}
1988 props = diff.get(b'properties') or {}
1988 commits = props.get(b'local:commits') or {}
1989 commits = props.get(b'local:commits') or {}
1989 if len(commits) > 1:
1990 if len(commits) > 1:
1990 return {bin(c) for c in commits.keys()}
1991 return {bin(c) for c in commits.keys()}
1991
1992
1992 # Storing the diff metadata predates storing `local:commits`, so continue
1993 # Storing the diff metadata predates storing `local:commits`, so continue
1993 # to use that in the --no-fold case.
1994 # to use that in the --no-fold case.
1994 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1995 return {bin(getdiffmeta(diff).get(b'node', b'')) or None}
1995
1996
1996
1997
1997 def getdiffmeta(diff):
1998 def getdiffmeta(diff):
1998 """get commit metadata (date, node, user, p1) from a diff object
1999 """get commit metadata (date, node, user, p1) from a diff object
1999
2000
2000 The metadata could be "hg:meta", sent by phabsend, like:
2001 The metadata could be "hg:meta", sent by phabsend, like:
2001
2002
2002 "properties": {
2003 "properties": {
2003 "hg:meta": {
2004 "hg:meta": {
2004 "branch": "default",
2005 "branch": "default",
2005 "date": "1499571514 25200",
2006 "date": "1499571514 25200",
2006 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2007 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
2007 "user": "Foo Bar <foo@example.com>",
2008 "user": "Foo Bar <foo@example.com>",
2008 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2009 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
2009 }
2010 }
2010 }
2011 }
2011
2012
2012 Or converted from "local:commits", sent by "arc", like:
2013 Or converted from "local:commits", sent by "arc", like:
2013
2014
2014 "properties": {
2015 "properties": {
2015 "local:commits": {
2016 "local:commits": {
2016 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2017 "98c08acae292b2faf60a279b4189beb6cff1414d": {
2017 "author": "Foo Bar",
2018 "author": "Foo Bar",
2018 "authorEmail": "foo@example.com"
2019 "authorEmail": "foo@example.com"
2019 "branch": "default",
2020 "branch": "default",
2020 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2021 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
2021 "local": "1000",
2022 "local": "1000",
2022 "message": "...",
2023 "message": "...",
2023 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2024 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
2024 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2025 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
2025 "summary": "...",
2026 "summary": "...",
2026 "tag": "",
2027 "tag": "",
2027 "time": 1499546314,
2028 "time": 1499546314,
2028 }
2029 }
2029 }
2030 }
2030 }
2031 }
2031
2032
2032 Note: metadata extracted from "local:commits" will lose time zone
2033 Note: metadata extracted from "local:commits" will lose time zone
2033 information.
2034 information.
2034 """
2035 """
2035 props = diff.get(b'properties') or {}
2036 props = diff.get(b'properties') or {}
2036 meta = props.get(b'hg:meta')
2037 meta = props.get(b'hg:meta')
2037 if not meta:
2038 if not meta:
2038 if props.get(b'local:commits'):
2039 if props.get(b'local:commits'):
2039 commit = sorted(props[b'local:commits'].values())[0]
2040 commit = sorted(props[b'local:commits'].values())[0]
2040 meta = {}
2041 meta = {}
2041 if b'author' in commit and b'authorEmail' in commit:
2042 if b'author' in commit and b'authorEmail' in commit:
2042 meta[b'user'] = b'%s <%s>' % (
2043 meta[b'user'] = b'%s <%s>' % (
2043 commit[b'author'],
2044 commit[b'author'],
2044 commit[b'authorEmail'],
2045 commit[b'authorEmail'],
2045 )
2046 )
2046 if b'time' in commit:
2047 if b'time' in commit:
2047 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2048 meta[b'date'] = b'%d 0' % int(commit[b'time'])
2048 if b'branch' in commit:
2049 if b'branch' in commit:
2049 meta[b'branch'] = commit[b'branch']
2050 meta[b'branch'] = commit[b'branch']
2050 node = commit.get(b'commit', commit.get(b'rev'))
2051 node = commit.get(b'commit', commit.get(b'rev'))
2051 if node:
2052 if node:
2052 meta[b'node'] = node
2053 meta[b'node'] = node
2053 if len(commit.get(b'parents', ())) >= 1:
2054 if len(commit.get(b'parents', ())) >= 1:
2054 meta[b'parent'] = commit[b'parents'][0]
2055 meta[b'parent'] = commit[b'parents'][0]
2055 else:
2056 else:
2056 meta = {}
2057 meta = {}
2057 if b'date' not in meta and b'dateCreated' in diff:
2058 if b'date' not in meta and b'dateCreated' in diff:
2058 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2059 meta[b'date'] = b'%s 0' % diff[b'dateCreated']
2059 if b'branch' not in meta and diff.get(b'branch'):
2060 if b'branch' not in meta and diff.get(b'branch'):
2060 meta[b'branch'] = diff[b'branch']
2061 meta[b'branch'] = diff[b'branch']
2061 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2062 if b'parent' not in meta and diff.get(b'sourceControlBaseRevision'):
2062 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2063 meta[b'parent'] = diff[b'sourceControlBaseRevision']
2063 return meta
2064 return meta
2064
2065
2065
2066
2066 def _getdrevs(ui, stack, specs):
2067 def _getdrevs(ui, stack, specs):
2067 """convert user supplied DREVSPECs into "Differential Revision" dicts
2068 """convert user supplied DREVSPECs into "Differential Revision" dicts
2068
2069
2069 See ``hg help phabread`` for how to specify each DREVSPEC.
2070 See ``hg help phabread`` for how to specify each DREVSPEC.
2070 """
2071 """
2071 if len(specs) > 0:
2072 if len(specs) > 0:
2072
2073
2073 def _formatspec(s):
2074 def _formatspec(s):
2074 if stack:
2075 if stack:
2075 s = b':(%s)' % s
2076 s = b':(%s)' % s
2076 return b'(%s)' % s
2077 return b'(%s)' % s
2077
2078
2078 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2079 spec = b'+'.join(pycompat.maplist(_formatspec, specs))
2079
2080
2080 drevs = querydrev(ui, spec)
2081 drevs = querydrev(ui, spec)
2081 if drevs:
2082 if drevs:
2082 return drevs
2083 return drevs
2083
2084
2084 raise error.Abort(_(b"empty DREVSPEC set"))
2085 raise error.Abort(_(b"empty DREVSPEC set"))
2085
2086
2086
2087
2087 def readpatch(ui, drevs, write):
2088 def readpatch(ui, drevs, write):
2088 """generate plain-text patch readable by 'hg import'
2089 """generate plain-text patch readable by 'hg import'
2089
2090
2090 write takes a list of (DREV, bytes), where DREV is the differential number
2091 write takes a list of (DREV, bytes), where DREV is the differential number
2091 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2092 (as bytes, without the "D" prefix) and the bytes are the text of a patch
2092 to be imported. drevs is what "querydrev" returns, results of
2093 to be imported. drevs is what "querydrev" returns, results of
2093 "differential.query".
2094 "differential.query".
2094 """
2095 """
2095 # Prefetch hg:meta property for all diffs
2096 # Prefetch hg:meta property for all diffs
2096 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2097 diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs})
2097 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2098 diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids})
2098
2099
2099 patches = []
2100 patches = []
2100
2101
2101 # Generate patch for each drev
2102 # Generate patch for each drev
2102 for drev in drevs:
2103 for drev in drevs:
2103 ui.note(_(b'reading D%s\n') % drev[b'id'])
2104 ui.note(_(b'reading D%s\n') % drev[b'id'])
2104
2105
2105 diffid = max(int(v) for v in drev[b'diffs'])
2106 diffid = max(int(v) for v in drev[b'diffs'])
2106 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2107 body = callconduit(ui, b'differential.getrawdiff', {b'diffID': diffid})
2107 desc = getdescfromdrev(drev)
2108 desc = getdescfromdrev(drev)
2108 header = b'# HG changeset patch\n'
2109 header = b'# HG changeset patch\n'
2109
2110
2110 # Try to preserve metadata from hg:meta property. Write hg patch
2111 # Try to preserve metadata from hg:meta property. Write hg patch
2111 # headers that can be read by the "import" command. See patchheadermap
2112 # headers that can be read by the "import" command. See patchheadermap
2112 # and extract in mercurial/patch.py for supported headers.
2113 # and extract in mercurial/patch.py for supported headers.
2113 meta = getdiffmeta(diffs[b'%d' % diffid])
2114 meta = getdiffmeta(diffs[b'%d' % diffid])
2114 for k in _metanamemap.keys():
2115 for k in _metanamemap.keys():
2115 if k in meta:
2116 if k in meta:
2116 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2117 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
2117
2118
2118 content = b'%s%s\n%s' % (header, desc, body)
2119 content = b'%s%s\n%s' % (header, desc, body)
2119 patches.append((drev[b'id'], content))
2120 patches.append((drev[b'id'], content))
2120
2121
2121 # Write patches to the supplied callback
2122 # Write patches to the supplied callback
2122 write(patches)
2123 write(patches)
2123
2124
2124
2125
2125 @vcrcommand(
2126 @vcrcommand(
2126 b'phabread',
2127 b'phabread',
2127 [(b'', b'stack', False, _(b'read dependencies'))],
2128 [(b'', b'stack', False, _(b'read dependencies'))],
2128 _(b'DREVSPEC... [OPTIONS]'),
2129 _(b'DREVSPEC... [OPTIONS]'),
2129 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2130 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2130 optionalrepo=True,
2131 optionalrepo=True,
2131 )
2132 )
2132 def phabread(ui, repo, *specs, **opts):
2133 def phabread(ui, repo, *specs, **opts):
2133 """print patches from Phabricator suitable for importing
2134 """print patches from Phabricator suitable for importing
2134
2135
2135 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2136 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
2136 the number ``123``. It could also have common operators like ``+``, ``-``,
2137 the number ``123``. It could also have common operators like ``+``, ``-``,
2137 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2138 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
2138 select a stack. If multiple DREVSPEC values are given, the result is the
2139 select a stack. If multiple DREVSPEC values are given, the result is the
2139 union of each individually evaluated value. No attempt is currently made
2140 union of each individually evaluated value. No attempt is currently made
2140 to reorder the values to run from parent to child.
2141 to reorder the values to run from parent to child.
2141
2142
2142 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2143 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
2143 could be used to filter patches by status. For performance reason, they
2144 could be used to filter patches by status. For performance reason, they
2144 only represent a subset of non-status selections and cannot be used alone.
2145 only represent a subset of non-status selections and cannot be used alone.
2145
2146
2146 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2147 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
2147 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2148 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
2148 stack up to D9.
2149 stack up to D9.
2149
2150
2150 If --stack is given, follow dependencies information and read all patches.
2151 If --stack is given, follow dependencies information and read all patches.
2151 It is equivalent to the ``:`` operator.
2152 It is equivalent to the ``:`` operator.
2152 """
2153 """
2153 opts = pycompat.byteskwargs(opts)
2154 opts = pycompat.byteskwargs(opts)
2154 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2155 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2155
2156
2156 def _write(patches):
2157 def _write(patches):
2157 for drev, content in patches:
2158 for drev, content in patches:
2158 ui.write(content)
2159 ui.write(content)
2159
2160
2160 readpatch(ui, drevs, _write)
2161 readpatch(ui, drevs, _write)
2161
2162
2162
2163
2163 @vcrcommand(
2164 @vcrcommand(
2164 b'phabimport',
2165 b'phabimport',
2165 [(b'', b'stack', False, _(b'import dependencies as well'))],
2166 [(b'', b'stack', False, _(b'import dependencies as well'))],
2166 _(b'DREVSPEC... [OPTIONS]'),
2167 _(b'DREVSPEC... [OPTIONS]'),
2167 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2168 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2168 )
2169 )
2169 def phabimport(ui, repo, *specs, **opts):
2170 def phabimport(ui, repo, *specs, **opts):
2170 """import patches from Phabricator for the specified Differential Revisions
2171 """import patches from Phabricator for the specified Differential Revisions
2171
2172
2172 The patches are read and applied starting at the parent of the working
2173 The patches are read and applied starting at the parent of the working
2173 directory.
2174 directory.
2174
2175
2175 See ``hg help phabread`` for how to specify DREVSPEC.
2176 See ``hg help phabread`` for how to specify DREVSPEC.
2176 """
2177 """
2177 opts = pycompat.byteskwargs(opts)
2178 opts = pycompat.byteskwargs(opts)
2178
2179
2179 # --bypass avoids losing exec and symlink bits when importing on Windows,
2180 # --bypass avoids losing exec and symlink bits when importing on Windows,
2180 # and allows importing with a dirty wdir. It also aborts instead of leaving
2181 # and allows importing with a dirty wdir. It also aborts instead of leaving
2181 # rejects.
2182 # rejects.
2182 opts[b'bypass'] = True
2183 opts[b'bypass'] = True
2183
2184
2184 # Mandatory default values, synced with commands.import
2185 # Mandatory default values, synced with commands.import
2185 opts[b'strip'] = 1
2186 opts[b'strip'] = 1
2186 opts[b'prefix'] = b''
2187 opts[b'prefix'] = b''
2187 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2188 # Evolve 9.3.0 assumes this key is present in cmdutil.tryimportone()
2188 opts[b'obsolete'] = False
2189 opts[b'obsolete'] = False
2189
2190
2190 if ui.configbool(b'phabimport', b'secret'):
2191 if ui.configbool(b'phabimport', b'secret'):
2191 opts[b'secret'] = True
2192 opts[b'secret'] = True
2192 if ui.configbool(b'phabimport', b'obsolete'):
2193 if ui.configbool(b'phabimport', b'obsolete'):
2193 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2194 opts[b'obsolete'] = True # Handled by evolve wrapping tryimportone()
2194
2195
2195 def _write(patches):
2196 def _write(patches):
2196 parents = repo[None].parents()
2197 parents = repo[None].parents()
2197
2198
2198 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2199 with repo.wlock(), repo.lock(), repo.transaction(b'phabimport'):
2199 for drev, contents in patches:
2200 for drev, contents in patches:
2200 ui.status(_(b'applying patch from D%s\n') % drev)
2201 ui.status(_(b'applying patch from D%s\n') % drev)
2201
2202
2202 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2203 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2203 msg, node, rej = cmdutil.tryimportone(
2204 msg, node, rej = cmdutil.tryimportone(
2204 ui,
2205 ui,
2205 repo,
2206 repo,
2206 patchdata,
2207 patchdata,
2207 parents,
2208 parents,
2208 opts,
2209 opts,
2209 [],
2210 [],
2210 None, # Never update wdir to another revision
2211 None, # Never update wdir to another revision
2211 )
2212 )
2212
2213
2213 if not node:
2214 if not node:
2214 raise error.Abort(_(b'D%s: no diffs found') % drev)
2215 raise error.Abort(_(b'D%s: no diffs found') % drev)
2215
2216
2216 ui.note(msg + b'\n')
2217 ui.note(msg + b'\n')
2217 parents = [repo[node]]
2218 parents = [repo[node]]
2218
2219
2219 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2220 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2220
2221
2221 readpatch(repo.ui, drevs, _write)
2222 readpatch(repo.ui, drevs, _write)
2222
2223
2223
2224
2224 @vcrcommand(
2225 @vcrcommand(
2225 b'phabupdate',
2226 b'phabupdate',
2226 [
2227 [
2227 (b'', b'accept', False, _(b'accept revisions')),
2228 (b'', b'accept', False, _(b'accept revisions')),
2228 (b'', b'reject', False, _(b'reject revisions')),
2229 (b'', b'reject', False, _(b'reject revisions')),
2229 (b'', b'request-review', False, _(b'request review on revisions')),
2230 (b'', b'request-review', False, _(b'request review on revisions')),
2230 (b'', b'abandon', False, _(b'abandon revisions')),
2231 (b'', b'abandon', False, _(b'abandon revisions')),
2231 (b'', b'reclaim', False, _(b'reclaim revisions')),
2232 (b'', b'reclaim', False, _(b'reclaim revisions')),
2232 (b'', b'close', False, _(b'close revisions')),
2233 (b'', b'close', False, _(b'close revisions')),
2233 (b'', b'reopen', False, _(b'reopen revisions')),
2234 (b'', b'reopen', False, _(b'reopen revisions')),
2234 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2235 (b'', b'plan-changes', False, _(b'plan changes for revisions')),
2235 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2236 (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
2236 (b'', b'commandeer', False, _(b'commandeer revisions')),
2237 (b'', b'commandeer', False, _(b'commandeer revisions')),
2237 (b'm', b'comment', b'', _(b'comment on the last revision')),
2238 (b'm', b'comment', b'', _(b'comment on the last revision')),
2238 (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
2239 (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
2239 ],
2240 ],
2240 _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
2241 _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
2241 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2242 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2242 optionalrepo=True,
2243 optionalrepo=True,
2243 )
2244 )
2244 def phabupdate(ui, repo, *specs, **opts):
2245 def phabupdate(ui, repo, *specs, **opts):
2245 """update Differential Revision in batch
2246 """update Differential Revision in batch
2246
2247
2247 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2248 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
2248 """
2249 """
2249 opts = pycompat.byteskwargs(opts)
2250 opts = pycompat.byteskwargs(opts)
2250 transactions = [
2251 transactions = [
2251 b'abandon',
2252 b'abandon',
2252 b'accept',
2253 b'accept',
2253 b'close',
2254 b'close',
2254 b'commandeer',
2255 b'commandeer',
2255 b'plan-changes',
2256 b'plan-changes',
2256 b'reclaim',
2257 b'reclaim',
2257 b'reject',
2258 b'reject',
2258 b'reopen',
2259 b'reopen',
2259 b'request-review',
2260 b'request-review',
2260 b'resign',
2261 b'resign',
2261 ]
2262 ]
2262 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2263 flags = [n for n in transactions if opts.get(n.replace(b'-', b'_'))]
2263 if len(flags) > 1:
2264 if len(flags) > 1:
2264 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2265 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
2265
2266
2266 actions = []
2267 actions = []
2267 for f in flags:
2268 for f in flags:
2268 actions.append({b'type': f, b'value': True})
2269 actions.append({b'type': f, b'value': True})
2269
2270
2270 revs = opts.get(b'rev')
2271 revs = opts.get(b'rev')
2271 if revs:
2272 if revs:
2272 if not repo:
2273 if not repo:
2273 raise error.InputError(_(b'--rev requires a repository'))
2274 raise error.InputError(_(b'--rev requires a repository'))
2274
2275
2275 if specs:
2276 if specs:
2276 raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
2277 raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
2277
2278
2278 drevmap = getdrevmap(repo, scmutil.revrange(repo, [revs]))
2279 drevmap = getdrevmap(repo, scmutil.revrange(repo, [revs]))
2279 specs = []
2280 specs = []
2280 unknown = []
2281 unknown = []
2281 for r, d in pycompat.iteritems(drevmap):
2282 for r, d in pycompat.iteritems(drevmap):
2282 if d is None:
2283 if d is None:
2283 unknown.append(repo[r])
2284 unknown.append(repo[r])
2284 else:
2285 else:
2285 specs.append(b'D%d' % d)
2286 specs.append(b'D%d' % d)
2286 if unknown:
2287 if unknown:
2287 raise error.InputError(
2288 raise error.InputError(
2288 _(b'selected revisions without a Differential: %s')
2289 _(b'selected revisions without a Differential: %s')
2289 % scmutil.nodesummaries(repo, unknown)
2290 % scmutil.nodesummaries(repo, unknown)
2290 )
2291 )
2291
2292
2292 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2293 drevs = _getdrevs(ui, opts.get(b'stack'), specs)
2293 for i, drev in enumerate(drevs):
2294 for i, drev in enumerate(drevs):
2294 if i + 1 == len(drevs) and opts.get(b'comment'):
2295 if i + 1 == len(drevs) and opts.get(b'comment'):
2295 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2296 actions.append({b'type': b'comment', b'value': opts[b'comment']})
2296 if actions:
2297 if actions:
2297 params = {
2298 params = {
2298 b'objectIdentifier': drev[b'phid'],
2299 b'objectIdentifier': drev[b'phid'],
2299 b'transactions': actions,
2300 b'transactions': actions,
2300 }
2301 }
2301 callconduit(ui, b'differential.revision.edit', params)
2302 callconduit(ui, b'differential.revision.edit', params)
2302
2303
2303
2304
2304 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2305 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
2305 def template_review(context, mapping):
2306 def template_review(context, mapping):
2306 """:phabreview: Object describing the review for this changeset.
2307 """:phabreview: Object describing the review for this changeset.
2307 Has attributes `url` and `id`.
2308 Has attributes `url` and `id`.
2308 """
2309 """
2309 ctx = context.resource(mapping, b'ctx')
2310 ctx = context.resource(mapping, b'ctx')
2310 m = _differentialrevisiondescre.search(ctx.description())
2311 m = _differentialrevisiondescre.search(ctx.description())
2311 if m:
2312 if m:
2312 return templateutil.hybriddict(
2313 return templateutil.hybriddict(
2313 {
2314 {
2314 b'url': m.group('url'),
2315 b'url': m.group('url'),
2315 b'id': b"D%s" % m.group('id'),
2316 b'id': b"D%s" % m.group('id'),
2316 }
2317 }
2317 )
2318 )
2318 else:
2319 else:
2319 tags = ctx.repo().nodetags(ctx.node())
2320 tags = ctx.repo().nodetags(ctx.node())
2320 for t in tags:
2321 for t in tags:
2321 if _differentialrevisiontagre.match(t):
2322 if _differentialrevisiontagre.match(t):
2322 url = ctx.repo().ui.config(b'phabricator', b'url')
2323 url = ctx.repo().ui.config(b'phabricator', b'url')
2323 if not url.endswith(b'/'):
2324 if not url.endswith(b'/'):
2324 url += b'/'
2325 url += b'/'
2325 url += t
2326 url += t
2326
2327
2327 return templateutil.hybriddict(
2328 return templateutil.hybriddict(
2328 {
2329 {
2329 b'url': url,
2330 b'url': url,
2330 b'id': t,
2331 b'id': t,
2331 }
2332 }
2332 )
2333 )
2333 return None
2334 return None
2334
2335
2335
2336
2336 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2337 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2337 def template_status(context, mapping):
2338 def template_status(context, mapping):
2338 """:phabstatus: String. Status of Phabricator differential."""
2339 """:phabstatus: String. Status of Phabricator differential."""
2339 ctx = context.resource(mapping, b'ctx')
2340 ctx = context.resource(mapping, b'ctx')
2340 repo = context.resource(mapping, b'repo')
2341 repo = context.resource(mapping, b'repo')
2341 ui = context.resource(mapping, b'ui')
2342 ui = context.resource(mapping, b'ui')
2342
2343
2343 rev = ctx.rev()
2344 rev = ctx.rev()
2344 try:
2345 try:
2345 drevid = getdrevmap(repo, [rev])[rev]
2346 drevid = getdrevmap(repo, [rev])[rev]
2346 except KeyError:
2347 except KeyError:
2347 return None
2348 return None
2348 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2349 drevs = callconduit(ui, b'differential.query', {b'ids': [drevid]})
2349 for drev in drevs:
2350 for drev in drevs:
2350 if int(drev[b'id']) == drevid:
2351 if int(drev[b'id']) == drevid:
2351 return templateutil.hybriddict(
2352 return templateutil.hybriddict(
2352 {
2353 {
2353 b'url': drev[b'uri'],
2354 b'url': drev[b'uri'],
2354 b'status': drev[b'statusName'],
2355 b'status': drev[b'statusName'],
2355 }
2356 }
2356 )
2357 )
2357 return None
2358 return None
2358
2359
2359
2360
2360 @show.showview(b'phabstatus', csettopic=b'work')
2361 @show.showview(b'phabstatus', csettopic=b'work')
2361 def phabstatusshowview(ui, repo, displayer):
2362 def phabstatusshowview(ui, repo, displayer):
2362 """Phabricator differiential status"""
2363 """Phabricator differiential status"""
2363 revs = repo.revs('sort(_underway(), topo)')
2364 revs = repo.revs('sort(_underway(), topo)')
2364 drevmap = getdrevmap(repo, revs)
2365 drevmap = getdrevmap(repo, revs)
2365 unknownrevs, drevids, revsbydrevid = [], set(), {}
2366 unknownrevs, drevids, revsbydrevid = [], set(), {}
2366 for rev, drevid in pycompat.iteritems(drevmap):
2367 for rev, drevid in pycompat.iteritems(drevmap):
2367 if drevid is not None:
2368 if drevid is not None:
2368 drevids.add(drevid)
2369 drevids.add(drevid)
2369 revsbydrevid.setdefault(drevid, set()).add(rev)
2370 revsbydrevid.setdefault(drevid, set()).add(rev)
2370 else:
2371 else:
2371 unknownrevs.append(rev)
2372 unknownrevs.append(rev)
2372
2373
2373 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2374 drevs = callconduit(ui, b'differential.query', {b'ids': list(drevids)})
2374 drevsbyrev = {}
2375 drevsbyrev = {}
2375 for drev in drevs:
2376 for drev in drevs:
2376 for rev in revsbydrevid[int(drev[b'id'])]:
2377 for rev in revsbydrevid[int(drev[b'id'])]:
2377 drevsbyrev[rev] = drev
2378 drevsbyrev[rev] = drev
2378
2379
2379 def phabstatus(ctx):
2380 def phabstatus(ctx):
2380 drev = drevsbyrev[ctx.rev()]
2381 drev = drevsbyrev[ctx.rev()]
2381 status = ui.label(
2382 status = ui.label(
2382 b'%(statusName)s' % drev,
2383 b'%(statusName)s' % drev,
2383 b'phabricator.status.%s' % _getstatusname(drev),
2384 b'phabricator.status.%s' % _getstatusname(drev),
2384 )
2385 )
2385 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2386 ui.write(b"\n%s %s\n" % (drev[b'uri'], status))
2386
2387
2387 revs -= smartset.baseset(unknownrevs)
2388 revs -= smartset.baseset(unknownrevs)
2388 revdag = graphmod.dagwalker(repo, revs)
2389 revdag = graphmod.dagwalker(repo, revs)
2389
2390
2390 ui.setconfig(b'experimental', b'graphshorten', True)
2391 ui.setconfig(b'experimental', b'graphshorten', True)
2391 displayer._exthook = phabstatus
2392 displayer._exthook = phabstatus
2392 nodelen = show.longestshortest(repo, revs)
2393 nodelen = show.longestshortest(repo, revs)
2393 logcmdutil.displaygraph(
2394 logcmdutil.displaygraph(
2394 ui,
2395 ui,
2395 repo,
2396 repo,
2396 revdag,
2397 revdag,
2397 displayer,
2398 displayer,
2398 graphmod.asciiedges,
2399 graphmod.asciiedges,
2399 props={b'nodelen': nodelen},
2400 props={b'nodelen': nodelen},
2400 )
2401 )
@@ -1,149 +1,151 b''
1 # Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
1 # Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """extend schemes with shortcuts to repository swarms
6 """extend schemes with shortcuts to repository swarms
7
7
8 This extension allows you to specify shortcuts for parent URLs with a
8 This extension allows you to specify shortcuts for parent URLs with a
9 lot of repositories to act like a scheme, for example::
9 lot of repositories to act like a scheme, for example::
10
10
11 [schemes]
11 [schemes]
12 py = http://code.python.org/hg/
12 py = http://code.python.org/hg/
13
13
14 After that you can use it like::
14 After that you can use it like::
15
15
16 hg clone py://trunk/
16 hg clone py://trunk/
17
17
18 Additionally there is support for some more complex schemas, for
18 Additionally there is support for some more complex schemas, for
19 example used by Google Code::
19 example used by Google Code::
20
20
21 [schemes]
21 [schemes]
22 gcode = http://{1}.googlecode.com/hg/
22 gcode = http://{1}.googlecode.com/hg/
23
23
24 The syntax is taken from Mercurial templates, and you have unlimited
24 The syntax is taken from Mercurial templates, and you have unlimited
25 number of variables, starting with ``{1}`` and continuing with
25 number of variables, starting with ``{1}`` and continuing with
26 ``{2}``, ``{3}`` and so on. This variables will receive parts of URL
26 ``{2}``, ``{3}`` and so on. This variables will receive parts of URL
27 supplied, split by ``/``. Anything not specified as ``{part}`` will be
27 supplied, split by ``/``. Anything not specified as ``{part}`` will be
28 just appended to an URL.
28 just appended to an URL.
29
29
30 For convenience, the extension adds these schemes by default::
30 For convenience, the extension adds these schemes by default::
31
31
32 [schemes]
32 [schemes]
33 py = http://hg.python.org/
33 py = http://hg.python.org/
34 bb = https://bitbucket.org/
34 bb = https://bitbucket.org/
35 bb+ssh = ssh://hg@bitbucket.org/
35 bb+ssh = ssh://hg@bitbucket.org/
36 gcode = https://{1}.googlecode.com/hg/
36 gcode = https://{1}.googlecode.com/hg/
37 kiln = https://{1}.kilnhg.com/Repo/
37 kiln = https://{1}.kilnhg.com/Repo/
38
38
39 You can override a predefined scheme by defining a new scheme with the
39 You can override a predefined scheme by defining a new scheme with the
40 same name.
40 same name.
41 """
41 """
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import os
44 import os
45 import re
45 import re
46
46
47 from mercurial.i18n import _
47 from mercurial.i18n import _
48 from mercurial import (
48 from mercurial import (
49 error,
49 error,
50 extensions,
50 extensions,
51 hg,
51 hg,
52 pycompat,
52 pycompat,
53 registrar,
53 registrar,
54 templater,
54 templater,
55 util,
55 )
56 from mercurial.utils import (
57 urlutil,
56 )
58 )
57
59
58 cmdtable = {}
60 cmdtable = {}
59 command = registrar.command(cmdtable)
61 command = registrar.command(cmdtable)
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
62 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
63 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # be specifying the version(s) of Mercurial they are tested with, or
64 # be specifying the version(s) of Mercurial they are tested with, or
63 # leave the attribute unspecified.
65 # leave the attribute unspecified.
64 testedwith = b'ships-with-hg-core'
66 testedwith = b'ships-with-hg-core'
65
67
66 _partre = re.compile(br'{(\d+)\}')
68 _partre = re.compile(br'{(\d+)\}')
67
69
68
70
69 class ShortRepository(object):
71 class ShortRepository(object):
70 def __init__(self, url, scheme, templater):
72 def __init__(self, url, scheme, templater):
71 self.scheme = scheme
73 self.scheme = scheme
72 self.templater = templater
74 self.templater = templater
73 self.url = url
75 self.url = url
74 try:
76 try:
75 self.parts = max(map(int, _partre.findall(self.url)))
77 self.parts = max(map(int, _partre.findall(self.url)))
76 except ValueError:
78 except ValueError:
77 self.parts = 0
79 self.parts = 0
78
80
79 def __repr__(self):
81 def __repr__(self):
80 return b'<ShortRepository: %s>' % self.scheme
82 return b'<ShortRepository: %s>' % self.scheme
81
83
82 def instance(self, ui, url, create, intents=None, createopts=None):
84 def instance(self, ui, url, create, intents=None, createopts=None):
83 url = self.resolve(url)
85 url = self.resolve(url)
84 return hg._peerlookup(url).instance(
86 return hg._peerlookup(url).instance(
85 ui, url, create, intents=intents, createopts=createopts
87 ui, url, create, intents=intents, createopts=createopts
86 )
88 )
87
89
88 def resolve(self, url):
90 def resolve(self, url):
89 # Should this use the util.url class, or is manual parsing better?
91 # Should this use the urlutil.url class, or is manual parsing better?
90 try:
92 try:
91 url = url.split(b'://', 1)[1]
93 url = url.split(b'://', 1)[1]
92 except IndexError:
94 except IndexError:
93 raise error.Abort(_(b"no '://' in scheme url '%s'") % url)
95 raise error.Abort(_(b"no '://' in scheme url '%s'") % url)
94 parts = url.split(b'/', self.parts)
96 parts = url.split(b'/', self.parts)
95 if len(parts) > self.parts:
97 if len(parts) > self.parts:
96 tail = parts[-1]
98 tail = parts[-1]
97 parts = parts[:-1]
99 parts = parts[:-1]
98 else:
100 else:
99 tail = b''
101 tail = b''
100 context = {b'%d' % (i + 1): v for i, v in enumerate(parts)}
102 context = {b'%d' % (i + 1): v for i, v in enumerate(parts)}
101 return b''.join(self.templater.process(self.url, context)) + tail
103 return b''.join(self.templater.process(self.url, context)) + tail
102
104
103
105
104 def hasdriveletter(orig, path):
106 def hasdriveletter(orig, path):
105 if path:
107 if path:
106 for scheme in schemes:
108 for scheme in schemes:
107 if path.startswith(scheme + b':'):
109 if path.startswith(scheme + b':'):
108 return False
110 return False
109 return orig(path)
111 return orig(path)
110
112
111
113
112 schemes = {
114 schemes = {
113 b'py': b'http://hg.python.org/',
115 b'py': b'http://hg.python.org/',
114 b'bb': b'https://bitbucket.org/',
116 b'bb': b'https://bitbucket.org/',
115 b'bb+ssh': b'ssh://hg@bitbucket.org/',
117 b'bb+ssh': b'ssh://hg@bitbucket.org/',
116 b'gcode': b'https://{1}.googlecode.com/hg/',
118 b'gcode': b'https://{1}.googlecode.com/hg/',
117 b'kiln': b'https://{1}.kilnhg.com/Repo/',
119 b'kiln': b'https://{1}.kilnhg.com/Repo/',
118 }
120 }
119
121
120
122
121 def extsetup(ui):
123 def extsetup(ui):
122 schemes.update(dict(ui.configitems(b'schemes')))
124 schemes.update(dict(ui.configitems(b'schemes')))
123 t = templater.engine(templater.parse)
125 t = templater.engine(templater.parse)
124 for scheme, url in schemes.items():
126 for scheme, url in schemes.items():
125 if (
127 if (
126 pycompat.iswindows
128 pycompat.iswindows
127 and len(scheme) == 1
129 and len(scheme) == 1
128 and scheme.isalpha()
130 and scheme.isalpha()
129 and os.path.exists(b'%s:\\' % scheme)
131 and os.path.exists(b'%s:\\' % scheme)
130 ):
132 ):
131 raise error.Abort(
133 raise error.Abort(
132 _(
134 _(
133 b'custom scheme %s:// conflicts with drive '
135 b'custom scheme %s:// conflicts with drive '
134 b'letter %s:\\\n'
136 b'letter %s:\\\n'
135 )
137 )
136 % (scheme, scheme.upper())
138 % (scheme, scheme.upper())
137 )
139 )
138 hg.schemes[scheme] = ShortRepository(url, scheme, t)
140 hg.schemes[scheme] = ShortRepository(url, scheme, t)
139
141
140 extensions.wrapfunction(util, b'hasdriveletter', hasdriveletter)
142 extensions.wrapfunction(urlutil, b'hasdriveletter', hasdriveletter)
141
143
142
144
143 @command(b'debugexpandscheme', norepo=True)
145 @command(b'debugexpandscheme', norepo=True)
144 def expandscheme(ui, url, **opts):
146 def expandscheme(ui, url, **opts):
145 """given a repo path, provide the scheme-expanded path"""
147 """given a repo path, provide the scheme-expanded path"""
146 repo = hg._peerlookup(url)
148 repo = hg._peerlookup(url)
147 if isinstance(repo, ShortRepository):
149 if isinstance(repo, ShortRepository):
148 url = repo.resolve(url)
150 url = repo.resolve(url)
149 ui.write(url + b'\n')
151 ui.write(url + b'\n')
@@ -1,1067 +1,1070 b''
1 # Mercurial bookmark support code
1 # Mercurial bookmark support code
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import struct
11 import struct
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 bin,
15 bin,
16 hex,
16 hex,
17 short,
17 short,
18 wdirid,
18 wdirid,
19 )
19 )
20 from .pycompat import getattr
20 from .pycompat import getattr
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 obsutil,
24 obsutil,
25 pycompat,
25 pycompat,
26 scmutil,
26 scmutil,
27 txnutil,
27 txnutil,
28 util,
28 util,
29 )
29 )
30 from .utils import (
31 urlutil,
32 )
30
33
31 # label constants
34 # label constants
32 # until 3.5, bookmarks.current was the advertised name, not
35 # until 3.5, bookmarks.current was the advertised name, not
33 # bookmarks.active, so we must use both to avoid breaking old
36 # bookmarks.active, so we must use both to avoid breaking old
34 # custom styles
37 # custom styles
35 activebookmarklabel = b'bookmarks.active bookmarks.current'
38 activebookmarklabel = b'bookmarks.active bookmarks.current'
36
39
37 BOOKMARKS_IN_STORE_REQUIREMENT = b'bookmarksinstore'
40 BOOKMARKS_IN_STORE_REQUIREMENT = b'bookmarksinstore'
38
41
39
42
40 def bookmarksinstore(repo):
43 def bookmarksinstore(repo):
41 return BOOKMARKS_IN_STORE_REQUIREMENT in repo.requirements
44 return BOOKMARKS_IN_STORE_REQUIREMENT in repo.requirements
42
45
43
46
44 def bookmarksvfs(repo):
47 def bookmarksvfs(repo):
45 return repo.svfs if bookmarksinstore(repo) else repo.vfs
48 return repo.svfs if bookmarksinstore(repo) else repo.vfs
46
49
47
50
48 def _getbkfile(repo):
51 def _getbkfile(repo):
49 """Hook so that extensions that mess with the store can hook bm storage.
52 """Hook so that extensions that mess with the store can hook bm storage.
50
53
51 For core, this just handles wether we should see pending
54 For core, this just handles wether we should see pending
52 bookmarks or the committed ones. Other extensions (like share)
55 bookmarks or the committed ones. Other extensions (like share)
53 may need to tweak this behavior further.
56 may need to tweak this behavior further.
54 """
57 """
55 fp, pending = txnutil.trypending(
58 fp, pending = txnutil.trypending(
56 repo.root, bookmarksvfs(repo), b'bookmarks'
59 repo.root, bookmarksvfs(repo), b'bookmarks'
57 )
60 )
58 return fp
61 return fp
59
62
60
63
61 class bmstore(object):
64 class bmstore(object):
62 r"""Storage for bookmarks.
65 r"""Storage for bookmarks.
63
66
64 This object should do all bookmark-related reads and writes, so
67 This object should do all bookmark-related reads and writes, so
65 that it's fairly simple to replace the storage underlying
68 that it's fairly simple to replace the storage underlying
66 bookmarks without having to clone the logic surrounding
69 bookmarks without having to clone the logic surrounding
67 bookmarks. This type also should manage the active bookmark, if
70 bookmarks. This type also should manage the active bookmark, if
68 any.
71 any.
69
72
70 This particular bmstore implementation stores bookmarks as
73 This particular bmstore implementation stores bookmarks as
71 {hash}\s{name}\n (the same format as localtags) in
74 {hash}\s{name}\n (the same format as localtags) in
72 .hg/bookmarks. The mapping is stored as {name: nodeid}.
75 .hg/bookmarks. The mapping is stored as {name: nodeid}.
73 """
76 """
74
77
75 def __init__(self, repo):
78 def __init__(self, repo):
76 self._repo = repo
79 self._repo = repo
77 self._refmap = refmap = {} # refspec: node
80 self._refmap = refmap = {} # refspec: node
78 self._nodemap = nodemap = {} # node: sorted([refspec, ...])
81 self._nodemap = nodemap = {} # node: sorted([refspec, ...])
79 self._clean = True
82 self._clean = True
80 self._aclean = True
83 self._aclean = True
81 has_node = repo.changelog.index.has_node
84 has_node = repo.changelog.index.has_node
82 tonode = bin # force local lookup
85 tonode = bin # force local lookup
83 try:
86 try:
84 with _getbkfile(repo) as bkfile:
87 with _getbkfile(repo) as bkfile:
85 for line in bkfile:
88 for line in bkfile:
86 line = line.strip()
89 line = line.strip()
87 if not line:
90 if not line:
88 continue
91 continue
89 try:
92 try:
90 sha, refspec = line.split(b' ', 1)
93 sha, refspec = line.split(b' ', 1)
91 node = tonode(sha)
94 node = tonode(sha)
92 if has_node(node):
95 if has_node(node):
93 refspec = encoding.tolocal(refspec)
96 refspec = encoding.tolocal(refspec)
94 refmap[refspec] = node
97 refmap[refspec] = node
95 nrefs = nodemap.get(node)
98 nrefs = nodemap.get(node)
96 if nrefs is None:
99 if nrefs is None:
97 nodemap[node] = [refspec]
100 nodemap[node] = [refspec]
98 else:
101 else:
99 nrefs.append(refspec)
102 nrefs.append(refspec)
100 if nrefs[-2] > refspec:
103 if nrefs[-2] > refspec:
101 # bookmarks weren't sorted before 4.5
104 # bookmarks weren't sorted before 4.5
102 nrefs.sort()
105 nrefs.sort()
103 except (TypeError, ValueError):
106 except (TypeError, ValueError):
104 # TypeError:
107 # TypeError:
105 # - bin(...)
108 # - bin(...)
106 # ValueError:
109 # ValueError:
107 # - node in nm, for non-20-bytes entry
110 # - node in nm, for non-20-bytes entry
108 # - split(...), for string without ' '
111 # - split(...), for string without ' '
109 bookmarkspath = b'.hg/bookmarks'
112 bookmarkspath = b'.hg/bookmarks'
110 if bookmarksinstore(repo):
113 if bookmarksinstore(repo):
111 bookmarkspath = b'.hg/store/bookmarks'
114 bookmarkspath = b'.hg/store/bookmarks'
112 repo.ui.warn(
115 repo.ui.warn(
113 _(b'malformed line in %s: %r\n')
116 _(b'malformed line in %s: %r\n')
114 % (bookmarkspath, pycompat.bytestr(line))
117 % (bookmarkspath, pycompat.bytestr(line))
115 )
118 )
116 except IOError as inst:
119 except IOError as inst:
117 if inst.errno != errno.ENOENT:
120 if inst.errno != errno.ENOENT:
118 raise
121 raise
119 self._active = _readactive(repo, self)
122 self._active = _readactive(repo, self)
120
123
121 @property
124 @property
122 def active(self):
125 def active(self):
123 return self._active
126 return self._active
124
127
125 @active.setter
128 @active.setter
126 def active(self, mark):
129 def active(self, mark):
127 if mark is not None and mark not in self._refmap:
130 if mark is not None and mark not in self._refmap:
128 raise AssertionError(b'bookmark %s does not exist!' % mark)
131 raise AssertionError(b'bookmark %s does not exist!' % mark)
129
132
130 self._active = mark
133 self._active = mark
131 self._aclean = False
134 self._aclean = False
132
135
133 def __len__(self):
136 def __len__(self):
134 return len(self._refmap)
137 return len(self._refmap)
135
138
136 def __iter__(self):
139 def __iter__(self):
137 return iter(self._refmap)
140 return iter(self._refmap)
138
141
139 def iteritems(self):
142 def iteritems(self):
140 return pycompat.iteritems(self._refmap)
143 return pycompat.iteritems(self._refmap)
141
144
142 def items(self):
145 def items(self):
143 return self._refmap.items()
146 return self._refmap.items()
144
147
145 # TODO: maybe rename to allnames()?
148 # TODO: maybe rename to allnames()?
146 def keys(self):
149 def keys(self):
147 return self._refmap.keys()
150 return self._refmap.keys()
148
151
149 # TODO: maybe rename to allnodes()? but nodes would have to be deduplicated
152 # TODO: maybe rename to allnodes()? but nodes would have to be deduplicated
150 # could be self._nodemap.keys()
153 # could be self._nodemap.keys()
151 def values(self):
154 def values(self):
152 return self._refmap.values()
155 return self._refmap.values()
153
156
154 def __contains__(self, mark):
157 def __contains__(self, mark):
155 return mark in self._refmap
158 return mark in self._refmap
156
159
157 def __getitem__(self, mark):
160 def __getitem__(self, mark):
158 return self._refmap[mark]
161 return self._refmap[mark]
159
162
160 def get(self, mark, default=None):
163 def get(self, mark, default=None):
161 return self._refmap.get(mark, default)
164 return self._refmap.get(mark, default)
162
165
163 def _set(self, mark, node):
166 def _set(self, mark, node):
164 self._clean = False
167 self._clean = False
165 if mark in self._refmap:
168 if mark in self._refmap:
166 self._del(mark)
169 self._del(mark)
167 self._refmap[mark] = node
170 self._refmap[mark] = node
168 nrefs = self._nodemap.get(node)
171 nrefs = self._nodemap.get(node)
169 if nrefs is None:
172 if nrefs is None:
170 self._nodemap[node] = [mark]
173 self._nodemap[node] = [mark]
171 else:
174 else:
172 nrefs.append(mark)
175 nrefs.append(mark)
173 nrefs.sort()
176 nrefs.sort()
174
177
175 def _del(self, mark):
178 def _del(self, mark):
176 if mark not in self._refmap:
179 if mark not in self._refmap:
177 return
180 return
178 self._clean = False
181 self._clean = False
179 node = self._refmap.pop(mark)
182 node = self._refmap.pop(mark)
180 nrefs = self._nodemap[node]
183 nrefs = self._nodemap[node]
181 if len(nrefs) == 1:
184 if len(nrefs) == 1:
182 assert nrefs[0] == mark
185 assert nrefs[0] == mark
183 del self._nodemap[node]
186 del self._nodemap[node]
184 else:
187 else:
185 nrefs.remove(mark)
188 nrefs.remove(mark)
186
189
187 def names(self, node):
190 def names(self, node):
188 """Return a sorted list of bookmarks pointing to the specified node"""
191 """Return a sorted list of bookmarks pointing to the specified node"""
189 return self._nodemap.get(node, [])
192 return self._nodemap.get(node, [])
190
193
191 def applychanges(self, repo, tr, changes):
194 def applychanges(self, repo, tr, changes):
192 """Apply a list of changes to bookmarks"""
195 """Apply a list of changes to bookmarks"""
193 bmchanges = tr.changes.get(b'bookmarks')
196 bmchanges = tr.changes.get(b'bookmarks')
194 for name, node in changes:
197 for name, node in changes:
195 old = self._refmap.get(name)
198 old = self._refmap.get(name)
196 if node is None:
199 if node is None:
197 self._del(name)
200 self._del(name)
198 else:
201 else:
199 self._set(name, node)
202 self._set(name, node)
200 if bmchanges is not None:
203 if bmchanges is not None:
201 # if a previous value exist preserve the "initial" value
204 # if a previous value exist preserve the "initial" value
202 previous = bmchanges.get(name)
205 previous = bmchanges.get(name)
203 if previous is not None:
206 if previous is not None:
204 old = previous[0]
207 old = previous[0]
205 bmchanges[name] = (old, node)
208 bmchanges[name] = (old, node)
206 self._recordchange(tr)
209 self._recordchange(tr)
207
210
208 def _recordchange(self, tr):
211 def _recordchange(self, tr):
209 """record that bookmarks have been changed in a transaction
212 """record that bookmarks have been changed in a transaction
210
213
211 The transaction is then responsible for updating the file content."""
214 The transaction is then responsible for updating the file content."""
212 location = b'' if bookmarksinstore(self._repo) else b'plain'
215 location = b'' if bookmarksinstore(self._repo) else b'plain'
213 tr.addfilegenerator(
216 tr.addfilegenerator(
214 b'bookmarks', (b'bookmarks',), self._write, location=location
217 b'bookmarks', (b'bookmarks',), self._write, location=location
215 )
218 )
216 tr.hookargs[b'bookmark_moved'] = b'1'
219 tr.hookargs[b'bookmark_moved'] = b'1'
217
220
218 def _writerepo(self, repo):
221 def _writerepo(self, repo):
219 """Factored out for extensibility"""
222 """Factored out for extensibility"""
220 rbm = repo._bookmarks
223 rbm = repo._bookmarks
221 if rbm.active not in self._refmap:
224 if rbm.active not in self._refmap:
222 rbm.active = None
225 rbm.active = None
223 rbm._writeactive()
226 rbm._writeactive()
224
227
225 if bookmarksinstore(repo):
228 if bookmarksinstore(repo):
226 vfs = repo.svfs
229 vfs = repo.svfs
227 lock = repo.lock()
230 lock = repo.lock()
228 else:
231 else:
229 vfs = repo.vfs
232 vfs = repo.vfs
230 lock = repo.wlock()
233 lock = repo.wlock()
231 with lock:
234 with lock:
232 with vfs(b'bookmarks', b'w', atomictemp=True, checkambig=True) as f:
235 with vfs(b'bookmarks', b'w', atomictemp=True, checkambig=True) as f:
233 self._write(f)
236 self._write(f)
234
237
235 def _writeactive(self):
238 def _writeactive(self):
236 if self._aclean:
239 if self._aclean:
237 return
240 return
238 with self._repo.wlock():
241 with self._repo.wlock():
239 if self._active is not None:
242 if self._active is not None:
240 with self._repo.vfs(
243 with self._repo.vfs(
241 b'bookmarks.current', b'w', atomictemp=True, checkambig=True
244 b'bookmarks.current', b'w', atomictemp=True, checkambig=True
242 ) as f:
245 ) as f:
243 f.write(encoding.fromlocal(self._active))
246 f.write(encoding.fromlocal(self._active))
244 else:
247 else:
245 self._repo.vfs.tryunlink(b'bookmarks.current')
248 self._repo.vfs.tryunlink(b'bookmarks.current')
246 self._aclean = True
249 self._aclean = True
247
250
248 def _write(self, fp):
251 def _write(self, fp):
249 for name, node in sorted(pycompat.iteritems(self._refmap)):
252 for name, node in sorted(pycompat.iteritems(self._refmap)):
250 fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name)))
253 fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name)))
251 self._clean = True
254 self._clean = True
252 self._repo.invalidatevolatilesets()
255 self._repo.invalidatevolatilesets()
253
256
254 def expandname(self, bname):
257 def expandname(self, bname):
255 if bname == b'.':
258 if bname == b'.':
256 if self.active:
259 if self.active:
257 return self.active
260 return self.active
258 else:
261 else:
259 raise error.RepoLookupError(_(b"no active bookmark"))
262 raise error.RepoLookupError(_(b"no active bookmark"))
260 return bname
263 return bname
261
264
262 def checkconflict(self, mark, force=False, target=None):
265 def checkconflict(self, mark, force=False, target=None):
263 """check repo for a potential clash of mark with an existing bookmark,
266 """check repo for a potential clash of mark with an existing bookmark,
264 branch, or hash
267 branch, or hash
265
268
266 If target is supplied, then check that we are moving the bookmark
269 If target is supplied, then check that we are moving the bookmark
267 forward.
270 forward.
268
271
269 If force is supplied, then forcibly move the bookmark to a new commit
272 If force is supplied, then forcibly move the bookmark to a new commit
270 regardless if it is a move forward.
273 regardless if it is a move forward.
271
274
272 If divergent bookmark are to be deleted, they will be returned as list.
275 If divergent bookmark are to be deleted, they will be returned as list.
273 """
276 """
274 cur = self._repo[b'.'].node()
277 cur = self._repo[b'.'].node()
275 if mark in self._refmap and not force:
278 if mark in self._refmap and not force:
276 if target:
279 if target:
277 if self._refmap[mark] == target and target == cur:
280 if self._refmap[mark] == target and target == cur:
278 # re-activating a bookmark
281 # re-activating a bookmark
279 return []
282 return []
280 rev = self._repo[target].rev()
283 rev = self._repo[target].rev()
281 anc = self._repo.changelog.ancestors([rev])
284 anc = self._repo.changelog.ancestors([rev])
282 bmctx = self._repo[self[mark]]
285 bmctx = self._repo[self[mark]]
283 divs = [
286 divs = [
284 self._refmap[b]
287 self._refmap[b]
285 for b in self._refmap
288 for b in self._refmap
286 if b.split(b'@', 1)[0] == mark.split(b'@', 1)[0]
289 if b.split(b'@', 1)[0] == mark.split(b'@', 1)[0]
287 ]
290 ]
288
291
289 # allow resolving a single divergent bookmark even if moving
292 # allow resolving a single divergent bookmark even if moving
290 # the bookmark across branches when a revision is specified
293 # the bookmark across branches when a revision is specified
291 # that contains a divergent bookmark
294 # that contains a divergent bookmark
292 if bmctx.rev() not in anc and target in divs:
295 if bmctx.rev() not in anc and target in divs:
293 return divergent2delete(self._repo, [target], mark)
296 return divergent2delete(self._repo, [target], mark)
294
297
295 deletefrom = [
298 deletefrom = [
296 b for b in divs if self._repo[b].rev() in anc or b == target
299 b for b in divs if self._repo[b].rev() in anc or b == target
297 ]
300 ]
298 delbms = divergent2delete(self._repo, deletefrom, mark)
301 delbms = divergent2delete(self._repo, deletefrom, mark)
299 if validdest(self._repo, bmctx, self._repo[target]):
302 if validdest(self._repo, bmctx, self._repo[target]):
300 self._repo.ui.status(
303 self._repo.ui.status(
301 _(b"moving bookmark '%s' forward from %s\n")
304 _(b"moving bookmark '%s' forward from %s\n")
302 % (mark, short(bmctx.node()))
305 % (mark, short(bmctx.node()))
303 )
306 )
304 return delbms
307 return delbms
305 raise error.Abort(
308 raise error.Abort(
306 _(b"bookmark '%s' already exists (use -f to force)") % mark
309 _(b"bookmark '%s' already exists (use -f to force)") % mark
307 )
310 )
308 if (
311 if (
309 mark in self._repo.branchmap()
312 mark in self._repo.branchmap()
310 or mark == self._repo.dirstate.branch()
313 or mark == self._repo.dirstate.branch()
311 ) and not force:
314 ) and not force:
312 raise error.Abort(
315 raise error.Abort(
313 _(b"a bookmark cannot have the name of an existing branch")
316 _(b"a bookmark cannot have the name of an existing branch")
314 )
317 )
315 if len(mark) > 3 and not force:
318 if len(mark) > 3 and not force:
316 try:
319 try:
317 shadowhash = scmutil.isrevsymbol(self._repo, mark)
320 shadowhash = scmutil.isrevsymbol(self._repo, mark)
318 except error.LookupError: # ambiguous identifier
321 except error.LookupError: # ambiguous identifier
319 shadowhash = False
322 shadowhash = False
320 if shadowhash:
323 if shadowhash:
321 self._repo.ui.warn(
324 self._repo.ui.warn(
322 _(
325 _(
323 b"bookmark %s matches a changeset hash\n"
326 b"bookmark %s matches a changeset hash\n"
324 b"(did you leave a -r out of an 'hg bookmark' "
327 b"(did you leave a -r out of an 'hg bookmark' "
325 b"command?)\n"
328 b"command?)\n"
326 )
329 )
327 % mark
330 % mark
328 )
331 )
329 return []
332 return []
330
333
331
334
332 def _readactive(repo, marks):
335 def _readactive(repo, marks):
333 """
336 """
334 Get the active bookmark. We can have an active bookmark that updates
337 Get the active bookmark. We can have an active bookmark that updates
335 itself as we commit. This function returns the name of that bookmark.
338 itself as we commit. This function returns the name of that bookmark.
336 It is stored in .hg/bookmarks.current
339 It is stored in .hg/bookmarks.current
337 """
340 """
338 # No readline() in osutil.posixfile, reading everything is
341 # No readline() in osutil.posixfile, reading everything is
339 # cheap.
342 # cheap.
340 content = repo.vfs.tryread(b'bookmarks.current')
343 content = repo.vfs.tryread(b'bookmarks.current')
341 mark = encoding.tolocal((content.splitlines() or [b''])[0])
344 mark = encoding.tolocal((content.splitlines() or [b''])[0])
342 if mark == b'' or mark not in marks:
345 if mark == b'' or mark not in marks:
343 mark = None
346 mark = None
344 return mark
347 return mark
345
348
346
349
347 def activate(repo, mark):
350 def activate(repo, mark):
348 """
351 """
349 Set the given bookmark to be 'active', meaning that this bookmark will
352 Set the given bookmark to be 'active', meaning that this bookmark will
350 follow new commits that are made.
353 follow new commits that are made.
351 The name is recorded in .hg/bookmarks.current
354 The name is recorded in .hg/bookmarks.current
352 """
355 """
353 repo._bookmarks.active = mark
356 repo._bookmarks.active = mark
354 repo._bookmarks._writeactive()
357 repo._bookmarks._writeactive()
355
358
356
359
357 def deactivate(repo):
360 def deactivate(repo):
358 """
361 """
359 Unset the active bookmark in this repository.
362 Unset the active bookmark in this repository.
360 """
363 """
361 repo._bookmarks.active = None
364 repo._bookmarks.active = None
362 repo._bookmarks._writeactive()
365 repo._bookmarks._writeactive()
363
366
364
367
365 def isactivewdirparent(repo):
368 def isactivewdirparent(repo):
366 """
369 """
367 Tell whether the 'active' bookmark (the one that follows new commits)
370 Tell whether the 'active' bookmark (the one that follows new commits)
368 points to one of the parents of the current working directory (wdir).
371 points to one of the parents of the current working directory (wdir).
369
372
370 While this is normally the case, it can on occasion be false; for example,
373 While this is normally the case, it can on occasion be false; for example,
371 immediately after a pull, the active bookmark can be moved to point
374 immediately after a pull, the active bookmark can be moved to point
372 to a place different than the wdir. This is solved by running `hg update`.
375 to a place different than the wdir. This is solved by running `hg update`.
373 """
376 """
374 mark = repo._activebookmark
377 mark = repo._activebookmark
375 marks = repo._bookmarks
378 marks = repo._bookmarks
376 parents = [p.node() for p in repo[None].parents()]
379 parents = [p.node() for p in repo[None].parents()]
377 return mark in marks and marks[mark] in parents
380 return mark in marks and marks[mark] in parents
378
381
379
382
380 def divergent2delete(repo, deletefrom, bm):
383 def divergent2delete(repo, deletefrom, bm):
381 """find divergent versions of bm on nodes in deletefrom.
384 """find divergent versions of bm on nodes in deletefrom.
382
385
383 the list of bookmark to delete."""
386 the list of bookmark to delete."""
384 todelete = []
387 todelete = []
385 marks = repo._bookmarks
388 marks = repo._bookmarks
386 divergent = [
389 divergent = [
387 b for b in marks if b.split(b'@', 1)[0] == bm.split(b'@', 1)[0]
390 b for b in marks if b.split(b'@', 1)[0] == bm.split(b'@', 1)[0]
388 ]
391 ]
389 for mark in divergent:
392 for mark in divergent:
390 if mark == b'@' or b'@' not in mark:
393 if mark == b'@' or b'@' not in mark:
391 # can't be divergent by definition
394 # can't be divergent by definition
392 continue
395 continue
393 if mark and marks[mark] in deletefrom:
396 if mark and marks[mark] in deletefrom:
394 if mark != bm:
397 if mark != bm:
395 todelete.append(mark)
398 todelete.append(mark)
396 return todelete
399 return todelete
397
400
398
401
399 def headsforactive(repo):
402 def headsforactive(repo):
400 """Given a repo with an active bookmark, return divergent bookmark nodes.
403 """Given a repo with an active bookmark, return divergent bookmark nodes.
401
404
402 Args:
405 Args:
403 repo: A repository with an active bookmark.
406 repo: A repository with an active bookmark.
404
407
405 Returns:
408 Returns:
406 A list of binary node ids that is the full list of other
409 A list of binary node ids that is the full list of other
407 revisions with bookmarks divergent from the active bookmark. If
410 revisions with bookmarks divergent from the active bookmark. If
408 there were no divergent bookmarks, then this list will contain
411 there were no divergent bookmarks, then this list will contain
409 only one entry.
412 only one entry.
410 """
413 """
411 if not repo._activebookmark:
414 if not repo._activebookmark:
412 raise ValueError(
415 raise ValueError(
413 b'headsforactive() only makes sense with an active bookmark'
416 b'headsforactive() only makes sense with an active bookmark'
414 )
417 )
415 name = repo._activebookmark.split(b'@', 1)[0]
418 name = repo._activebookmark.split(b'@', 1)[0]
416 heads = []
419 heads = []
417 for mark, n in pycompat.iteritems(repo._bookmarks):
420 for mark, n in pycompat.iteritems(repo._bookmarks):
418 if mark.split(b'@', 1)[0] == name:
421 if mark.split(b'@', 1)[0] == name:
419 heads.append(n)
422 heads.append(n)
420 return heads
423 return heads
421
424
422
425
423 def calculateupdate(ui, repo):
426 def calculateupdate(ui, repo):
424 """Return a tuple (activemark, movemarkfrom) indicating the active bookmark
427 """Return a tuple (activemark, movemarkfrom) indicating the active bookmark
425 and where to move the active bookmark from, if needed."""
428 and where to move the active bookmark from, if needed."""
426 checkout, movemarkfrom = None, None
429 checkout, movemarkfrom = None, None
427 activemark = repo._activebookmark
430 activemark = repo._activebookmark
428 if isactivewdirparent(repo):
431 if isactivewdirparent(repo):
429 movemarkfrom = repo[b'.'].node()
432 movemarkfrom = repo[b'.'].node()
430 elif activemark:
433 elif activemark:
431 ui.status(_(b"updating to active bookmark %s\n") % activemark)
434 ui.status(_(b"updating to active bookmark %s\n") % activemark)
432 checkout = activemark
435 checkout = activemark
433 return (checkout, movemarkfrom)
436 return (checkout, movemarkfrom)
434
437
435
438
436 def update(repo, parents, node):
439 def update(repo, parents, node):
437 deletefrom = parents
440 deletefrom = parents
438 marks = repo._bookmarks
441 marks = repo._bookmarks
439 active = marks.active
442 active = marks.active
440 if not active:
443 if not active:
441 return False
444 return False
442
445
443 bmchanges = []
446 bmchanges = []
444 if marks[active] in parents:
447 if marks[active] in parents:
445 new = repo[node]
448 new = repo[node]
446 divs = [
449 divs = [
447 repo[marks[b]]
450 repo[marks[b]]
448 for b in marks
451 for b in marks
449 if b.split(b'@', 1)[0] == active.split(b'@', 1)[0]
452 if b.split(b'@', 1)[0] == active.split(b'@', 1)[0]
450 ]
453 ]
451 anc = repo.changelog.ancestors([new.rev()])
454 anc = repo.changelog.ancestors([new.rev()])
452 deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
455 deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
453 if validdest(repo, repo[marks[active]], new):
456 if validdest(repo, repo[marks[active]], new):
454 bmchanges.append((active, new.node()))
457 bmchanges.append((active, new.node()))
455
458
456 for bm in divergent2delete(repo, deletefrom, active):
459 for bm in divergent2delete(repo, deletefrom, active):
457 bmchanges.append((bm, None))
460 bmchanges.append((bm, None))
458
461
459 if bmchanges:
462 if bmchanges:
460 with repo.lock(), repo.transaction(b'bookmark') as tr:
463 with repo.lock(), repo.transaction(b'bookmark') as tr:
461 marks.applychanges(repo, tr, bmchanges)
464 marks.applychanges(repo, tr, bmchanges)
462 return bool(bmchanges)
465 return bool(bmchanges)
463
466
464
467
465 def isdivergent(b):
468 def isdivergent(b):
466 return b'@' in b and not b.endswith(b'@')
469 return b'@' in b and not b.endswith(b'@')
467
470
468
471
469 def listbinbookmarks(repo):
472 def listbinbookmarks(repo):
470 # We may try to list bookmarks on a repo type that does not
473 # We may try to list bookmarks on a repo type that does not
471 # support it (e.g., statichttprepository).
474 # support it (e.g., statichttprepository).
472 marks = getattr(repo, '_bookmarks', {})
475 marks = getattr(repo, '_bookmarks', {})
473
476
474 hasnode = repo.changelog.hasnode
477 hasnode = repo.changelog.hasnode
475 for k, v in pycompat.iteritems(marks):
478 for k, v in pycompat.iteritems(marks):
476 # don't expose local divergent bookmarks
479 # don't expose local divergent bookmarks
477 if hasnode(v) and not isdivergent(k):
480 if hasnode(v) and not isdivergent(k):
478 yield k, v
481 yield k, v
479
482
480
483
481 def listbookmarks(repo):
484 def listbookmarks(repo):
482 d = {}
485 d = {}
483 for book, node in listbinbookmarks(repo):
486 for book, node in listbinbookmarks(repo):
484 d[book] = hex(node)
487 d[book] = hex(node)
485 return d
488 return d
486
489
487
490
488 def pushbookmark(repo, key, old, new):
491 def pushbookmark(repo, key, old, new):
489 if isdivergent(key):
492 if isdivergent(key):
490 return False
493 return False
491 if bookmarksinstore(repo):
494 if bookmarksinstore(repo):
492 wlock = util.nullcontextmanager()
495 wlock = util.nullcontextmanager()
493 else:
496 else:
494 wlock = repo.wlock()
497 wlock = repo.wlock()
495 with wlock, repo.lock(), repo.transaction(b'bookmarks') as tr:
498 with wlock, repo.lock(), repo.transaction(b'bookmarks') as tr:
496 marks = repo._bookmarks
499 marks = repo._bookmarks
497 existing = hex(marks.get(key, b''))
500 existing = hex(marks.get(key, b''))
498 if existing != old and existing != new:
501 if existing != old and existing != new:
499 return False
502 return False
500 if new == b'':
503 if new == b'':
501 changes = [(key, None)]
504 changes = [(key, None)]
502 else:
505 else:
503 if new not in repo:
506 if new not in repo:
504 return False
507 return False
505 changes = [(key, repo[new].node())]
508 changes = [(key, repo[new].node())]
506 marks.applychanges(repo, tr, changes)
509 marks.applychanges(repo, tr, changes)
507 return True
510 return True
508
511
509
512
510 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
513 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
511 """Compare bookmarks between srcmarks and dstmarks
514 """Compare bookmarks between srcmarks and dstmarks
512
515
513 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
516 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
514 differ, invalid)", each are list of bookmarks below:
517 differ, invalid)", each are list of bookmarks below:
515
518
516 :addsrc: added on src side (removed on dst side, perhaps)
519 :addsrc: added on src side (removed on dst side, perhaps)
517 :adddst: added on dst side (removed on src side, perhaps)
520 :adddst: added on dst side (removed on src side, perhaps)
518 :advsrc: advanced on src side
521 :advsrc: advanced on src side
519 :advdst: advanced on dst side
522 :advdst: advanced on dst side
520 :diverge: diverge
523 :diverge: diverge
521 :differ: changed, but changeset referred on src is unknown on dst
524 :differ: changed, but changeset referred on src is unknown on dst
522 :invalid: unknown on both side
525 :invalid: unknown on both side
523 :same: same on both side
526 :same: same on both side
524
527
525 Each elements of lists in result tuple is tuple "(bookmark name,
528 Each elements of lists in result tuple is tuple "(bookmark name,
526 changeset ID on source side, changeset ID on destination
529 changeset ID on source side, changeset ID on destination
527 side)". Each changeset ID is a binary node or None.
530 side)". Each changeset ID is a binary node or None.
528
531
529 Changeset IDs of tuples in "addsrc", "adddst", "differ" or
532 Changeset IDs of tuples in "addsrc", "adddst", "differ" or
530 "invalid" list may be unknown for repo.
533 "invalid" list may be unknown for repo.
531
534
532 If "targets" is specified, only bookmarks listed in it are
535 If "targets" is specified, only bookmarks listed in it are
533 examined.
536 examined.
534 """
537 """
535
538
536 if targets:
539 if targets:
537 bset = set(targets)
540 bset = set(targets)
538 else:
541 else:
539 srcmarkset = set(srcmarks)
542 srcmarkset = set(srcmarks)
540 dstmarkset = set(dstmarks)
543 dstmarkset = set(dstmarks)
541 bset = srcmarkset | dstmarkset
544 bset = srcmarkset | dstmarkset
542
545
543 results = ([], [], [], [], [], [], [], [])
546 results = ([], [], [], [], [], [], [], [])
544 addsrc = results[0].append
547 addsrc = results[0].append
545 adddst = results[1].append
548 adddst = results[1].append
546 advsrc = results[2].append
549 advsrc = results[2].append
547 advdst = results[3].append
550 advdst = results[3].append
548 diverge = results[4].append
551 diverge = results[4].append
549 differ = results[5].append
552 differ = results[5].append
550 invalid = results[6].append
553 invalid = results[6].append
551 same = results[7].append
554 same = results[7].append
552
555
553 for b in sorted(bset):
556 for b in sorted(bset):
554 if b not in srcmarks:
557 if b not in srcmarks:
555 if b in dstmarks:
558 if b in dstmarks:
556 adddst((b, None, dstmarks[b]))
559 adddst((b, None, dstmarks[b]))
557 else:
560 else:
558 invalid((b, None, None))
561 invalid((b, None, None))
559 elif b not in dstmarks:
562 elif b not in dstmarks:
560 addsrc((b, srcmarks[b], None))
563 addsrc((b, srcmarks[b], None))
561 else:
564 else:
562 scid = srcmarks[b]
565 scid = srcmarks[b]
563 dcid = dstmarks[b]
566 dcid = dstmarks[b]
564 if scid == dcid:
567 if scid == dcid:
565 same((b, scid, dcid))
568 same((b, scid, dcid))
566 elif scid in repo and dcid in repo:
569 elif scid in repo and dcid in repo:
567 sctx = repo[scid]
570 sctx = repo[scid]
568 dctx = repo[dcid]
571 dctx = repo[dcid]
569 if sctx.rev() < dctx.rev():
572 if sctx.rev() < dctx.rev():
570 if validdest(repo, sctx, dctx):
573 if validdest(repo, sctx, dctx):
571 advdst((b, scid, dcid))
574 advdst((b, scid, dcid))
572 else:
575 else:
573 diverge((b, scid, dcid))
576 diverge((b, scid, dcid))
574 else:
577 else:
575 if validdest(repo, dctx, sctx):
578 if validdest(repo, dctx, sctx):
576 advsrc((b, scid, dcid))
579 advsrc((b, scid, dcid))
577 else:
580 else:
578 diverge((b, scid, dcid))
581 diverge((b, scid, dcid))
579 else:
582 else:
580 # it is too expensive to examine in detail, in this case
583 # it is too expensive to examine in detail, in this case
581 differ((b, scid, dcid))
584 differ((b, scid, dcid))
582
585
583 return results
586 return results
584
587
585
588
586 def _diverge(ui, b, path, localmarks, remotenode):
589 def _diverge(ui, b, path, localmarks, remotenode):
587 """Return appropriate diverged bookmark for specified ``path``
590 """Return appropriate diverged bookmark for specified ``path``
588
591
589 This returns None, if it is failed to assign any divergent
592 This returns None, if it is failed to assign any divergent
590 bookmark name.
593 bookmark name.
591
594
592 This reuses already existing one with "@number" suffix, if it
595 This reuses already existing one with "@number" suffix, if it
593 refers ``remotenode``.
596 refers ``remotenode``.
594 """
597 """
595 if b == b'@':
598 if b == b'@':
596 b = b''
599 b = b''
597 # try to use an @pathalias suffix
600 # try to use an @pathalias suffix
598 # if an @pathalias already exists, we overwrite (update) it
601 # if an @pathalias already exists, we overwrite (update) it
599 if path.startswith(b"file:"):
602 if path.startswith(b"file:"):
600 path = util.url(path).path
603 path = urlutil.url(path).path
601 for p, u in ui.configitems(b"paths"):
604 for p, u in ui.configitems(b"paths"):
602 if u.startswith(b"file:"):
605 if u.startswith(b"file:"):
603 u = util.url(u).path
606 u = urlutil.url(u).path
604 if path == u:
607 if path == u:
605 return b'%s@%s' % (b, p)
608 return b'%s@%s' % (b, p)
606
609
607 # assign a unique "@number" suffix newly
610 # assign a unique "@number" suffix newly
608 for x in range(1, 100):
611 for x in range(1, 100):
609 n = b'%s@%d' % (b, x)
612 n = b'%s@%d' % (b, x)
610 if n not in localmarks or localmarks[n] == remotenode:
613 if n not in localmarks or localmarks[n] == remotenode:
611 return n
614 return n
612
615
613 return None
616 return None
614
617
615
618
616 def unhexlifybookmarks(marks):
619 def unhexlifybookmarks(marks):
617 binremotemarks = {}
620 binremotemarks = {}
618 for name, node in marks.items():
621 for name, node in marks.items():
619 binremotemarks[name] = bin(node)
622 binremotemarks[name] = bin(node)
620 return binremotemarks
623 return binremotemarks
621
624
622
625
623 _binaryentry = struct.Struct(b'>20sH')
626 _binaryentry = struct.Struct(b'>20sH')
624
627
625
628
626 def binaryencode(repo, bookmarks):
629 def binaryencode(repo, bookmarks):
627 """encode a '(bookmark, node)' iterable into a binary stream
630 """encode a '(bookmark, node)' iterable into a binary stream
628
631
629 the binary format is:
632 the binary format is:
630
633
631 <node><bookmark-length><bookmark-name>
634 <node><bookmark-length><bookmark-name>
632
635
633 :node: is a 20 bytes binary node,
636 :node: is a 20 bytes binary node,
634 :bookmark-length: an unsigned short,
637 :bookmark-length: an unsigned short,
635 :bookmark-name: the name of the bookmark (of length <bookmark-length>)
638 :bookmark-name: the name of the bookmark (of length <bookmark-length>)
636
639
637 wdirid (all bits set) will be used as a special value for "missing"
640 wdirid (all bits set) will be used as a special value for "missing"
638 """
641 """
639 binarydata = []
642 binarydata = []
640 for book, node in bookmarks:
643 for book, node in bookmarks:
641 if not node: # None or ''
644 if not node: # None or ''
642 node = wdirid
645 node = wdirid
643 binarydata.append(_binaryentry.pack(node, len(book)))
646 binarydata.append(_binaryentry.pack(node, len(book)))
644 binarydata.append(book)
647 binarydata.append(book)
645 return b''.join(binarydata)
648 return b''.join(binarydata)
646
649
647
650
648 def binarydecode(repo, stream):
651 def binarydecode(repo, stream):
649 """decode a binary stream into an '(bookmark, node)' iterable
652 """decode a binary stream into an '(bookmark, node)' iterable
650
653
651 the binary format is:
654 the binary format is:
652
655
653 <node><bookmark-length><bookmark-name>
656 <node><bookmark-length><bookmark-name>
654
657
655 :node: is a 20 bytes binary node,
658 :node: is a 20 bytes binary node,
656 :bookmark-length: an unsigned short,
659 :bookmark-length: an unsigned short,
657 :bookmark-name: the name of the bookmark (of length <bookmark-length>))
660 :bookmark-name: the name of the bookmark (of length <bookmark-length>))
658
661
659 wdirid (all bits set) will be used as a special value for "missing"
662 wdirid (all bits set) will be used as a special value for "missing"
660 """
663 """
661 entrysize = _binaryentry.size
664 entrysize = _binaryentry.size
662 books = []
665 books = []
663 while True:
666 while True:
664 entry = stream.read(entrysize)
667 entry = stream.read(entrysize)
665 if len(entry) < entrysize:
668 if len(entry) < entrysize:
666 if entry:
669 if entry:
667 raise error.Abort(_(b'bad bookmark stream'))
670 raise error.Abort(_(b'bad bookmark stream'))
668 break
671 break
669 node, length = _binaryentry.unpack(entry)
672 node, length = _binaryentry.unpack(entry)
670 bookmark = stream.read(length)
673 bookmark = stream.read(length)
671 if len(bookmark) < length:
674 if len(bookmark) < length:
672 if entry:
675 if entry:
673 raise error.Abort(_(b'bad bookmark stream'))
676 raise error.Abort(_(b'bad bookmark stream'))
674 if node == wdirid:
677 if node == wdirid:
675 node = None
678 node = None
676 books.append((bookmark, node))
679 books.append((bookmark, node))
677 return books
680 return books
678
681
679
682
680 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
683 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
681 ui.debug(b"checking for updated bookmarks\n")
684 ui.debug(b"checking for updated bookmarks\n")
682 localmarks = repo._bookmarks
685 localmarks = repo._bookmarks
683 (
686 (
684 addsrc,
687 addsrc,
685 adddst,
688 adddst,
686 advsrc,
689 advsrc,
687 advdst,
690 advdst,
688 diverge,
691 diverge,
689 differ,
692 differ,
690 invalid,
693 invalid,
691 same,
694 same,
692 ) = comparebookmarks(repo, remotemarks, localmarks)
695 ) = comparebookmarks(repo, remotemarks, localmarks)
693
696
694 status = ui.status
697 status = ui.status
695 warn = ui.warn
698 warn = ui.warn
696 if ui.configbool(b'ui', b'quietbookmarkmove'):
699 if ui.configbool(b'ui', b'quietbookmarkmove'):
697 status = warn = ui.debug
700 status = warn = ui.debug
698
701
699 explicit = set(explicit)
702 explicit = set(explicit)
700 changed = []
703 changed = []
701 for b, scid, dcid in addsrc:
704 for b, scid, dcid in addsrc:
702 if scid in repo: # add remote bookmarks for changes we already have
705 if scid in repo: # add remote bookmarks for changes we already have
703 changed.append(
706 changed.append(
704 (b, scid, status, _(b"adding remote bookmark %s\n") % b)
707 (b, scid, status, _(b"adding remote bookmark %s\n") % b)
705 )
708 )
706 elif b in explicit:
709 elif b in explicit:
707 explicit.remove(b)
710 explicit.remove(b)
708 ui.warn(
711 ui.warn(
709 _(b"remote bookmark %s points to locally missing %s\n")
712 _(b"remote bookmark %s points to locally missing %s\n")
710 % (b, hex(scid)[:12])
713 % (b, hex(scid)[:12])
711 )
714 )
712
715
713 for b, scid, dcid in advsrc:
716 for b, scid, dcid in advsrc:
714 changed.append((b, scid, status, _(b"updating bookmark %s\n") % b))
717 changed.append((b, scid, status, _(b"updating bookmark %s\n") % b))
715 # remove normal movement from explicit set
718 # remove normal movement from explicit set
716 explicit.difference_update(d[0] for d in changed)
719 explicit.difference_update(d[0] for d in changed)
717
720
718 for b, scid, dcid in diverge:
721 for b, scid, dcid in diverge:
719 if b in explicit:
722 if b in explicit:
720 explicit.discard(b)
723 explicit.discard(b)
721 changed.append((b, scid, status, _(b"importing bookmark %s\n") % b))
724 changed.append((b, scid, status, _(b"importing bookmark %s\n") % b))
722 else:
725 else:
723 db = _diverge(ui, b, path, localmarks, scid)
726 db = _diverge(ui, b, path, localmarks, scid)
724 if db:
727 if db:
725 changed.append(
728 changed.append(
726 (
729 (
727 db,
730 db,
728 scid,
731 scid,
729 warn,
732 warn,
730 _(b"divergent bookmark %s stored as %s\n") % (b, db),
733 _(b"divergent bookmark %s stored as %s\n") % (b, db),
731 )
734 )
732 )
735 )
733 else:
736 else:
734 warn(
737 warn(
735 _(
738 _(
736 b"warning: failed to assign numbered name "
739 b"warning: failed to assign numbered name "
737 b"to divergent bookmark %s\n"
740 b"to divergent bookmark %s\n"
738 )
741 )
739 % b
742 % b
740 )
743 )
741 for b, scid, dcid in adddst + advdst:
744 for b, scid, dcid in adddst + advdst:
742 if b in explicit:
745 if b in explicit:
743 explicit.discard(b)
746 explicit.discard(b)
744 changed.append((b, scid, status, _(b"importing bookmark %s\n") % b))
747 changed.append((b, scid, status, _(b"importing bookmark %s\n") % b))
745 for b, scid, dcid in differ:
748 for b, scid, dcid in differ:
746 if b in explicit:
749 if b in explicit:
747 explicit.remove(b)
750 explicit.remove(b)
748 ui.warn(
751 ui.warn(
749 _(b"remote bookmark %s points to locally missing %s\n")
752 _(b"remote bookmark %s points to locally missing %s\n")
750 % (b, hex(scid)[:12])
753 % (b, hex(scid)[:12])
751 )
754 )
752
755
753 if changed:
756 if changed:
754 tr = trfunc()
757 tr = trfunc()
755 changes = []
758 changes = []
756 key = lambda t: (t[0], t[1] or b'')
759 key = lambda t: (t[0], t[1] or b'')
757 for b, node, writer, msg in sorted(changed, key=key):
760 for b, node, writer, msg in sorted(changed, key=key):
758 changes.append((b, node))
761 changes.append((b, node))
759 writer(msg)
762 writer(msg)
760 localmarks.applychanges(repo, tr, changes)
763 localmarks.applychanges(repo, tr, changes)
761
764
762
765
763 def incoming(ui, repo, peer):
766 def incoming(ui, repo, peer):
764 """Show bookmarks incoming from other to repo"""
767 """Show bookmarks incoming from other to repo"""
765 ui.status(_(b"searching for changed bookmarks\n"))
768 ui.status(_(b"searching for changed bookmarks\n"))
766
769
767 with peer.commandexecutor() as e:
770 with peer.commandexecutor() as e:
768 remotemarks = unhexlifybookmarks(
771 remotemarks = unhexlifybookmarks(
769 e.callcommand(
772 e.callcommand(
770 b'listkeys',
773 b'listkeys',
771 {
774 {
772 b'namespace': b'bookmarks',
775 b'namespace': b'bookmarks',
773 },
776 },
774 ).result()
777 ).result()
775 )
778 )
776
779
777 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
780 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
778 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
781 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
779
782
780 incomings = []
783 incomings = []
781 if ui.debugflag:
784 if ui.debugflag:
782 getid = lambda id: id
785 getid = lambda id: id
783 else:
786 else:
784 getid = lambda id: id[:12]
787 getid = lambda id: id[:12]
785 if ui.verbose:
788 if ui.verbose:
786
789
787 def add(b, id, st):
790 def add(b, id, st):
788 incomings.append(b" %-25s %s %s\n" % (b, getid(id), st))
791 incomings.append(b" %-25s %s %s\n" % (b, getid(id), st))
789
792
790 else:
793 else:
791
794
792 def add(b, id, st):
795 def add(b, id, st):
793 incomings.append(b" %-25s %s\n" % (b, getid(id)))
796 incomings.append(b" %-25s %s\n" % (b, getid(id)))
794
797
795 for b, scid, dcid in addsrc:
798 for b, scid, dcid in addsrc:
796 # i18n: "added" refers to a bookmark
799 # i18n: "added" refers to a bookmark
797 add(b, hex(scid), _(b'added'))
800 add(b, hex(scid), _(b'added'))
798 for b, scid, dcid in advsrc:
801 for b, scid, dcid in advsrc:
799 # i18n: "advanced" refers to a bookmark
802 # i18n: "advanced" refers to a bookmark
800 add(b, hex(scid), _(b'advanced'))
803 add(b, hex(scid), _(b'advanced'))
801 for b, scid, dcid in diverge:
804 for b, scid, dcid in diverge:
802 # i18n: "diverged" refers to a bookmark
805 # i18n: "diverged" refers to a bookmark
803 add(b, hex(scid), _(b'diverged'))
806 add(b, hex(scid), _(b'diverged'))
804 for b, scid, dcid in differ:
807 for b, scid, dcid in differ:
805 # i18n: "changed" refers to a bookmark
808 # i18n: "changed" refers to a bookmark
806 add(b, hex(scid), _(b'changed'))
809 add(b, hex(scid), _(b'changed'))
807
810
808 if not incomings:
811 if not incomings:
809 ui.status(_(b"no changed bookmarks found\n"))
812 ui.status(_(b"no changed bookmarks found\n"))
810 return 1
813 return 1
811
814
812 for s in sorted(incomings):
815 for s in sorted(incomings):
813 ui.write(s)
816 ui.write(s)
814
817
815 return 0
818 return 0
816
819
817
820
818 def outgoing(ui, repo, other):
821 def outgoing(ui, repo, other):
819 """Show bookmarks outgoing from repo to other"""
822 """Show bookmarks outgoing from repo to other"""
820 ui.status(_(b"searching for changed bookmarks\n"))
823 ui.status(_(b"searching for changed bookmarks\n"))
821
824
822 remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
825 remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
823 r = comparebookmarks(repo, repo._bookmarks, remotemarks)
826 r = comparebookmarks(repo, repo._bookmarks, remotemarks)
824 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
827 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
825
828
826 outgoings = []
829 outgoings = []
827 if ui.debugflag:
830 if ui.debugflag:
828 getid = lambda id: id
831 getid = lambda id: id
829 else:
832 else:
830 getid = lambda id: id[:12]
833 getid = lambda id: id[:12]
831 if ui.verbose:
834 if ui.verbose:
832
835
833 def add(b, id, st):
836 def add(b, id, st):
834 outgoings.append(b" %-25s %s %s\n" % (b, getid(id), st))
837 outgoings.append(b" %-25s %s %s\n" % (b, getid(id), st))
835
838
836 else:
839 else:
837
840
838 def add(b, id, st):
841 def add(b, id, st):
839 outgoings.append(b" %-25s %s\n" % (b, getid(id)))
842 outgoings.append(b" %-25s %s\n" % (b, getid(id)))
840
843
841 for b, scid, dcid in addsrc:
844 for b, scid, dcid in addsrc:
842 # i18n: "added refers to a bookmark
845 # i18n: "added refers to a bookmark
843 add(b, hex(scid), _(b'added'))
846 add(b, hex(scid), _(b'added'))
844 for b, scid, dcid in adddst:
847 for b, scid, dcid in adddst:
845 # i18n: "deleted" refers to a bookmark
848 # i18n: "deleted" refers to a bookmark
846 add(b, b' ' * 40, _(b'deleted'))
849 add(b, b' ' * 40, _(b'deleted'))
847 for b, scid, dcid in advsrc:
850 for b, scid, dcid in advsrc:
848 # i18n: "advanced" refers to a bookmark
851 # i18n: "advanced" refers to a bookmark
849 add(b, hex(scid), _(b'advanced'))
852 add(b, hex(scid), _(b'advanced'))
850 for b, scid, dcid in diverge:
853 for b, scid, dcid in diverge:
851 # i18n: "diverged" refers to a bookmark
854 # i18n: "diverged" refers to a bookmark
852 add(b, hex(scid), _(b'diverged'))
855 add(b, hex(scid), _(b'diverged'))
853 for b, scid, dcid in differ:
856 for b, scid, dcid in differ:
854 # i18n: "changed" refers to a bookmark
857 # i18n: "changed" refers to a bookmark
855 add(b, hex(scid), _(b'changed'))
858 add(b, hex(scid), _(b'changed'))
856
859
857 if not outgoings:
860 if not outgoings:
858 ui.status(_(b"no changed bookmarks found\n"))
861 ui.status(_(b"no changed bookmarks found\n"))
859 return 1
862 return 1
860
863
861 for s in sorted(outgoings):
864 for s in sorted(outgoings):
862 ui.write(s)
865 ui.write(s)
863
866
864 return 0
867 return 0
865
868
866
869
867 def summary(repo, peer):
870 def summary(repo, peer):
868 """Compare bookmarks between repo and other for "hg summary" output
871 """Compare bookmarks between repo and other for "hg summary" output
869
872
870 This returns "(# of incoming, # of outgoing)" tuple.
873 This returns "(# of incoming, # of outgoing)" tuple.
871 """
874 """
872 with peer.commandexecutor() as e:
875 with peer.commandexecutor() as e:
873 remotemarks = unhexlifybookmarks(
876 remotemarks = unhexlifybookmarks(
874 e.callcommand(
877 e.callcommand(
875 b'listkeys',
878 b'listkeys',
876 {
879 {
877 b'namespace': b'bookmarks',
880 b'namespace': b'bookmarks',
878 },
881 },
879 ).result()
882 ).result()
880 )
883 )
881
884
882 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
885 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
883 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
886 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
884 return (len(addsrc), len(adddst))
887 return (len(addsrc), len(adddst))
885
888
886
889
887 def validdest(repo, old, new):
890 def validdest(repo, old, new):
888 """Is the new bookmark destination a valid update from the old one"""
891 """Is the new bookmark destination a valid update from the old one"""
889 repo = repo.unfiltered()
892 repo = repo.unfiltered()
890 if old == new:
893 if old == new:
891 # Old == new -> nothing to update.
894 # Old == new -> nothing to update.
892 return False
895 return False
893 elif not old:
896 elif not old:
894 # old is nullrev, anything is valid.
897 # old is nullrev, anything is valid.
895 # (new != nullrev has been excluded by the previous check)
898 # (new != nullrev has been excluded by the previous check)
896 return True
899 return True
897 elif repo.obsstore:
900 elif repo.obsstore:
898 return new.node() in obsutil.foreground(repo, [old.node()])
901 return new.node() in obsutil.foreground(repo, [old.node()])
899 else:
902 else:
900 # still an independent clause as it is lazier (and therefore faster)
903 # still an independent clause as it is lazier (and therefore faster)
901 return old.isancestorof(new)
904 return old.isancestorof(new)
902
905
903
906
904 def checkformat(repo, mark):
907 def checkformat(repo, mark):
905 """return a valid version of a potential bookmark name
908 """return a valid version of a potential bookmark name
906
909
907 Raises an abort error if the bookmark name is not valid.
910 Raises an abort error if the bookmark name is not valid.
908 """
911 """
909 mark = mark.strip()
912 mark = mark.strip()
910 if not mark:
913 if not mark:
911 raise error.InputError(
914 raise error.InputError(
912 _(b"bookmark names cannot consist entirely of whitespace")
915 _(b"bookmark names cannot consist entirely of whitespace")
913 )
916 )
914 scmutil.checknewlabel(repo, mark, b'bookmark')
917 scmutil.checknewlabel(repo, mark, b'bookmark')
915 return mark
918 return mark
916
919
917
920
918 def delete(repo, tr, names):
921 def delete(repo, tr, names):
919 """remove a mark from the bookmark store
922 """remove a mark from the bookmark store
920
923
921 Raises an abort error if mark does not exist.
924 Raises an abort error if mark does not exist.
922 """
925 """
923 marks = repo._bookmarks
926 marks = repo._bookmarks
924 changes = []
927 changes = []
925 for mark in names:
928 for mark in names:
926 if mark not in marks:
929 if mark not in marks:
927 raise error.InputError(_(b"bookmark '%s' does not exist") % mark)
930 raise error.InputError(_(b"bookmark '%s' does not exist") % mark)
928 if mark == repo._activebookmark:
931 if mark == repo._activebookmark:
929 deactivate(repo)
932 deactivate(repo)
930 changes.append((mark, None))
933 changes.append((mark, None))
931 marks.applychanges(repo, tr, changes)
934 marks.applychanges(repo, tr, changes)
932
935
933
936
934 def rename(repo, tr, old, new, force=False, inactive=False):
937 def rename(repo, tr, old, new, force=False, inactive=False):
935 """rename a bookmark from old to new
938 """rename a bookmark from old to new
936
939
937 If force is specified, then the new name can overwrite an existing
940 If force is specified, then the new name can overwrite an existing
938 bookmark.
941 bookmark.
939
942
940 If inactive is specified, then do not activate the new bookmark.
943 If inactive is specified, then do not activate the new bookmark.
941
944
942 Raises an abort error if old is not in the bookmark store.
945 Raises an abort error if old is not in the bookmark store.
943 """
946 """
944 marks = repo._bookmarks
947 marks = repo._bookmarks
945 mark = checkformat(repo, new)
948 mark = checkformat(repo, new)
946 if old not in marks:
949 if old not in marks:
947 raise error.InputError(_(b"bookmark '%s' does not exist") % old)
950 raise error.InputError(_(b"bookmark '%s' does not exist") % old)
948 changes = []
951 changes = []
949 for bm in marks.checkconflict(mark, force):
952 for bm in marks.checkconflict(mark, force):
950 changes.append((bm, None))
953 changes.append((bm, None))
951 changes.extend([(mark, marks[old]), (old, None)])
954 changes.extend([(mark, marks[old]), (old, None)])
952 marks.applychanges(repo, tr, changes)
955 marks.applychanges(repo, tr, changes)
953 if repo._activebookmark == old and not inactive:
956 if repo._activebookmark == old and not inactive:
954 activate(repo, mark)
957 activate(repo, mark)
955
958
956
959
957 def addbookmarks(repo, tr, names, rev=None, force=False, inactive=False):
960 def addbookmarks(repo, tr, names, rev=None, force=False, inactive=False):
958 """add a list of bookmarks
961 """add a list of bookmarks
959
962
960 If force is specified, then the new name can overwrite an existing
963 If force is specified, then the new name can overwrite an existing
961 bookmark.
964 bookmark.
962
965
963 If inactive is specified, then do not activate any bookmark. Otherwise, the
966 If inactive is specified, then do not activate any bookmark. Otherwise, the
964 first bookmark is activated.
967 first bookmark is activated.
965
968
966 Raises an abort error if old is not in the bookmark store.
969 Raises an abort error if old is not in the bookmark store.
967 """
970 """
968 marks = repo._bookmarks
971 marks = repo._bookmarks
969 cur = repo[b'.'].node()
972 cur = repo[b'.'].node()
970 newact = None
973 newact = None
971 changes = []
974 changes = []
972
975
973 # unhide revs if any
976 # unhide revs if any
974 if rev:
977 if rev:
975 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
978 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
976
979
977 ctx = scmutil.revsingle(repo, rev, None)
980 ctx = scmutil.revsingle(repo, rev, None)
978 # bookmarking wdir means creating a bookmark on p1 and activating it
981 # bookmarking wdir means creating a bookmark on p1 and activating it
979 activatenew = not inactive and ctx.rev() is None
982 activatenew = not inactive and ctx.rev() is None
980 if ctx.node() is None:
983 if ctx.node() is None:
981 ctx = ctx.p1()
984 ctx = ctx.p1()
982 tgt = ctx.node()
985 tgt = ctx.node()
983 assert tgt
986 assert tgt
984
987
985 for mark in names:
988 for mark in names:
986 mark = checkformat(repo, mark)
989 mark = checkformat(repo, mark)
987 if newact is None:
990 if newact is None:
988 newact = mark
991 newact = mark
989 if inactive and mark == repo._activebookmark:
992 if inactive and mark == repo._activebookmark:
990 deactivate(repo)
993 deactivate(repo)
991 continue
994 continue
992 for bm in marks.checkconflict(mark, force, tgt):
995 for bm in marks.checkconflict(mark, force, tgt):
993 changes.append((bm, None))
996 changes.append((bm, None))
994 changes.append((mark, tgt))
997 changes.append((mark, tgt))
995
998
996 # nothing changed but for the one deactivated above
999 # nothing changed but for the one deactivated above
997 if not changes:
1000 if not changes:
998 return
1001 return
999
1002
1000 if ctx.hidden():
1003 if ctx.hidden():
1001 repo.ui.warn(_(b"bookmarking hidden changeset %s\n") % ctx.hex()[:12])
1004 repo.ui.warn(_(b"bookmarking hidden changeset %s\n") % ctx.hex()[:12])
1002
1005
1003 if ctx.obsolete():
1006 if ctx.obsolete():
1004 msg = obsutil._getfilteredreason(repo, ctx.hex()[:12], ctx)
1007 msg = obsutil._getfilteredreason(repo, ctx.hex()[:12], ctx)
1005 repo.ui.warn(b"(%s)\n" % msg)
1008 repo.ui.warn(b"(%s)\n" % msg)
1006
1009
1007 marks.applychanges(repo, tr, changes)
1010 marks.applychanges(repo, tr, changes)
1008 if activatenew and cur == marks[newact]:
1011 if activatenew and cur == marks[newact]:
1009 activate(repo, newact)
1012 activate(repo, newact)
1010 elif cur != tgt and newact == repo._activebookmark:
1013 elif cur != tgt and newact == repo._activebookmark:
1011 deactivate(repo)
1014 deactivate(repo)
1012
1015
1013
1016
1014 def _printbookmarks(ui, repo, fm, bmarks):
1017 def _printbookmarks(ui, repo, fm, bmarks):
1015 """private method to print bookmarks
1018 """private method to print bookmarks
1016
1019
1017 Provides a way for extensions to control how bookmarks are printed (e.g.
1020 Provides a way for extensions to control how bookmarks are printed (e.g.
1018 prepend or postpend names)
1021 prepend or postpend names)
1019 """
1022 """
1020 hexfn = fm.hexfunc
1023 hexfn = fm.hexfunc
1021 if len(bmarks) == 0 and fm.isplain():
1024 if len(bmarks) == 0 and fm.isplain():
1022 ui.status(_(b"no bookmarks set\n"))
1025 ui.status(_(b"no bookmarks set\n"))
1023 for bmark, (n, prefix, label) in sorted(pycompat.iteritems(bmarks)):
1026 for bmark, (n, prefix, label) in sorted(pycompat.iteritems(bmarks)):
1024 fm.startitem()
1027 fm.startitem()
1025 fm.context(repo=repo)
1028 fm.context(repo=repo)
1026 if not ui.quiet:
1029 if not ui.quiet:
1027 fm.plain(b' %s ' % prefix, label=label)
1030 fm.plain(b' %s ' % prefix, label=label)
1028 fm.write(b'bookmark', b'%s', bmark, label=label)
1031 fm.write(b'bookmark', b'%s', bmark, label=label)
1029 pad = b" " * (25 - encoding.colwidth(bmark))
1032 pad = b" " * (25 - encoding.colwidth(bmark))
1030 fm.condwrite(
1033 fm.condwrite(
1031 not ui.quiet,
1034 not ui.quiet,
1032 b'rev node',
1035 b'rev node',
1033 pad + b' %d:%s',
1036 pad + b' %d:%s',
1034 repo.changelog.rev(n),
1037 repo.changelog.rev(n),
1035 hexfn(n),
1038 hexfn(n),
1036 label=label,
1039 label=label,
1037 )
1040 )
1038 fm.data(active=(activebookmarklabel in label))
1041 fm.data(active=(activebookmarklabel in label))
1039 fm.plain(b'\n')
1042 fm.plain(b'\n')
1040
1043
1041
1044
1042 def printbookmarks(ui, repo, fm, names=None):
1045 def printbookmarks(ui, repo, fm, names=None):
1043 """print bookmarks by the given formatter
1046 """print bookmarks by the given formatter
1044
1047
1045 Provides a way for extensions to control how bookmarks are printed.
1048 Provides a way for extensions to control how bookmarks are printed.
1046 """
1049 """
1047 marks = repo._bookmarks
1050 marks = repo._bookmarks
1048 bmarks = {}
1051 bmarks = {}
1049 for bmark in names or marks:
1052 for bmark in names or marks:
1050 if bmark not in marks:
1053 if bmark not in marks:
1051 raise error.InputError(_(b"bookmark '%s' does not exist") % bmark)
1054 raise error.InputError(_(b"bookmark '%s' does not exist") % bmark)
1052 active = repo._activebookmark
1055 active = repo._activebookmark
1053 if bmark == active:
1056 if bmark == active:
1054 prefix, label = b'*', activebookmarklabel
1057 prefix, label = b'*', activebookmarklabel
1055 else:
1058 else:
1056 prefix, label = b' ', b''
1059 prefix, label = b' ', b''
1057
1060
1058 bmarks[bmark] = (marks[bmark], prefix, label)
1061 bmarks[bmark] = (marks[bmark], prefix, label)
1059 _printbookmarks(ui, repo, fm, bmarks)
1062 _printbookmarks(ui, repo, fm, bmarks)
1060
1063
1061
1064
1062 def preparehookargs(name, old, new):
1065 def preparehookargs(name, old, new):
1063 if new is None:
1066 if new is None:
1064 new = b''
1067 new = b''
1065 if old is None:
1068 if old is None:
1066 old = b''
1069 old = b''
1067 return {b'bookmark': name, b'node': hex(new), b'oldnode': hex(old)}
1070 return {b'bookmark': name, b'node': hex(new), b'oldnode': hex(old)}
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now