Show More
@@ -1,2681 +1,2679 b'' | |||||
1 | # histedit.py - interactive history editing for mercurial |
|
1 | # histedit.py - interactive history editing for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2009 Augie Fackler <raf@durin42.com> |
|
3 | # Copyright 2009 Augie Fackler <raf@durin42.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 | """interactive history editing |
|
7 | """interactive history editing | |
8 |
|
8 | |||
9 | With this extension installed, Mercurial gains one new command: histedit. Usage |
|
9 | With this extension installed, Mercurial gains one new command: histedit. Usage | |
10 | is as follows, assuming the following history:: |
|
10 | is as follows, assuming the following history:: | |
11 |
|
11 | |||
12 | @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42 |
|
12 | @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42 | |
13 | | Add delta |
|
13 | | Add delta | |
14 | | |
|
14 | | | |
15 | o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42 |
|
15 | o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42 | |
16 | | Add gamma |
|
16 | | Add gamma | |
17 | | |
|
17 | | | |
18 | o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42 |
|
18 | o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42 | |
19 | | Add beta |
|
19 | | Add beta | |
20 | | |
|
20 | | | |
21 | o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42 |
|
21 | o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42 | |
22 | Add alpha |
|
22 | Add alpha | |
23 |
|
23 | |||
24 | If you were to run ``hg histedit c561b4e977df``, you would see the following |
|
24 | If you were to run ``hg histedit c561b4e977df``, you would see the following | |
25 | file open in your editor:: |
|
25 | file open in your editor:: | |
26 |
|
26 | |||
27 | pick c561b4e977df Add beta |
|
27 | pick c561b4e977df Add beta | |
28 | pick 030b686bedc4 Add gamma |
|
28 | pick 030b686bedc4 Add gamma | |
29 | pick 7c2fd3b9020c Add delta |
|
29 | pick 7c2fd3b9020c Add delta | |
30 |
|
30 | |||
31 | # Edit history between c561b4e977df and 7c2fd3b9020c |
|
31 | # Edit history between c561b4e977df and 7c2fd3b9020c | |
32 | # |
|
32 | # | |
33 | # Commits are listed from least to most recent |
|
33 | # Commits are listed from least to most recent | |
34 | # |
|
34 | # | |
35 | # Commands: |
|
35 | # Commands: | |
36 | # p, pick = use commit |
|
36 | # p, pick = use commit | |
37 | # e, edit = use commit, but allow edits before making new commit |
|
37 | # e, edit = use commit, but allow edits before making new commit | |
38 | # f, fold = use commit, but combine it with the one above |
|
38 | # f, fold = use commit, but combine it with the one above | |
39 | # r, roll = like fold, but discard this commit's description and date |
|
39 | # r, roll = like fold, but discard this commit's description and date | |
40 | # d, drop = remove commit from history |
|
40 | # d, drop = remove commit from history | |
41 | # m, mess = edit commit message without changing commit content |
|
41 | # m, mess = edit commit message without changing commit content | |
42 | # b, base = checkout changeset and apply further changesets from there |
|
42 | # b, base = checkout changeset and apply further changesets from there | |
43 | # |
|
43 | # | |
44 |
|
44 | |||
45 | In this file, lines beginning with ``#`` are ignored. You must specify a rule |
|
45 | In this file, lines beginning with ``#`` are ignored. You must specify a rule | |
46 | for each revision in your history. For example, if you had meant to add gamma |
|
46 | for each revision in your history. For example, if you had meant to add gamma | |
47 | before beta, and then wanted to add delta in the same revision as beta, you |
|
47 | before beta, and then wanted to add delta in the same revision as beta, you | |
48 | would reorganize the file to look like this:: |
|
48 | would reorganize the file to look like this:: | |
49 |
|
49 | |||
50 | pick 030b686bedc4 Add gamma |
|
50 | pick 030b686bedc4 Add gamma | |
51 | pick c561b4e977df Add beta |
|
51 | pick c561b4e977df Add beta | |
52 | fold 7c2fd3b9020c Add delta |
|
52 | fold 7c2fd3b9020c Add delta | |
53 |
|
53 | |||
54 | # Edit history between c561b4e977df and 7c2fd3b9020c |
|
54 | # Edit history between c561b4e977df and 7c2fd3b9020c | |
55 | # |
|
55 | # | |
56 | # Commits are listed from least to most recent |
|
56 | # Commits are listed from least to most recent | |
57 | # |
|
57 | # | |
58 | # Commands: |
|
58 | # Commands: | |
59 | # p, pick = use commit |
|
59 | # p, pick = use commit | |
60 | # e, edit = use commit, but allow edits before making new commit |
|
60 | # e, edit = use commit, but allow edits before making new commit | |
61 | # f, fold = use commit, but combine it with the one above |
|
61 | # f, fold = use commit, but combine it with the one above | |
62 | # r, roll = like fold, but discard this commit's description and date |
|
62 | # r, roll = like fold, but discard this commit's description and date | |
63 | # d, drop = remove commit from history |
|
63 | # d, drop = remove commit from history | |
64 | # m, mess = edit commit message without changing commit content |
|
64 | # m, mess = edit commit message without changing commit content | |
65 | # b, base = checkout changeset and apply further changesets from there |
|
65 | # b, base = checkout changeset and apply further changesets from there | |
66 | # |
|
66 | # | |
67 |
|
67 | |||
68 | At which point you close the editor and ``histedit`` starts working. When you |
|
68 | At which point you close the editor and ``histedit`` starts working. When you | |
69 | specify a ``fold`` operation, ``histedit`` will open an editor when it folds |
|
69 | specify a ``fold`` operation, ``histedit`` will open an editor when it folds | |
70 | those revisions together, offering you a chance to clean up the commit message:: |
|
70 | those revisions together, offering you a chance to clean up the commit message:: | |
71 |
|
71 | |||
72 | Add beta |
|
72 | Add beta | |
73 | *** |
|
73 | *** | |
74 | Add delta |
|
74 | Add delta | |
75 |
|
75 | |||
76 | Edit the commit message to your liking, then close the editor. The date used |
|
76 | Edit the commit message to your liking, then close the editor. The date used | |
77 | for the commit will be the later of the two commits' dates. For this example, |
|
77 | for the commit will be the later of the two commits' dates. For this example, | |
78 | let's assume that the commit message was changed to ``Add beta and delta.`` |
|
78 | let's assume that the commit message was changed to ``Add beta and delta.`` | |
79 | After histedit has run and had a chance to remove any old or temporary |
|
79 | After histedit has run and had a chance to remove any old or temporary | |
80 | revisions it needed, the history looks like this:: |
|
80 | revisions it needed, the history looks like this:: | |
81 |
|
81 | |||
82 | @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42 |
|
82 | @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42 | |
83 | | Add beta and delta. |
|
83 | | Add beta and delta. | |
84 | | |
|
84 | | | |
85 | o 1 081603921c3f 2009-04-27 18:04 -0500 durin42 |
|
85 | o 1 081603921c3f 2009-04-27 18:04 -0500 durin42 | |
86 | | Add gamma |
|
86 | | Add gamma | |
87 | | |
|
87 | | | |
88 | o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42 |
|
88 | o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42 | |
89 | Add alpha |
|
89 | Add alpha | |
90 |
|
90 | |||
91 | Note that ``histedit`` does *not* remove any revisions (even its own temporary |
|
91 | Note that ``histedit`` does *not* remove any revisions (even its own temporary | |
92 | ones) until after it has completed all the editing operations, so it will |
|
92 | ones) until after it has completed all the editing operations, so it will | |
93 | probably perform several strip operations when it's done. For the above example, |
|
93 | probably perform several strip operations when it's done. For the above example, | |
94 | it had to run strip twice. Strip can be slow depending on a variety of factors, |
|
94 | it had to run strip twice. Strip can be slow depending on a variety of factors, | |
95 | so you might need to be a little patient. You can choose to keep the original |
|
95 | so you might need to be a little patient. You can choose to keep the original | |
96 | revisions by passing the ``--keep`` flag. |
|
96 | revisions by passing the ``--keep`` flag. | |
97 |
|
97 | |||
98 | The ``edit`` operation will drop you back to a command prompt, |
|
98 | The ``edit`` operation will drop you back to a command prompt, | |
99 | allowing you to edit files freely, or even use ``hg record`` to commit |
|
99 | allowing you to edit files freely, or even use ``hg record`` to commit | |
100 | some changes as a separate commit. When you're done, any remaining |
|
100 | some changes as a separate commit. When you're done, any remaining | |
101 | uncommitted changes will be committed as well. When done, run ``hg |
|
101 | uncommitted changes will be committed as well. When done, run ``hg | |
102 | histedit --continue`` to finish this step. If there are uncommitted |
|
102 | histedit --continue`` to finish this step. If there are uncommitted | |
103 | changes, you'll be prompted for a new commit message, but the default |
|
103 | changes, you'll be prompted for a new commit message, but the default | |
104 | commit message will be the original message for the ``edit`` ed |
|
104 | commit message will be the original message for the ``edit`` ed | |
105 | revision, and the date of the original commit will be preserved. |
|
105 | revision, and the date of the original commit will be preserved. | |
106 |
|
106 | |||
107 | The ``message`` operation will give you a chance to revise a commit |
|
107 | The ``message`` operation will give you a chance to revise a commit | |
108 | message without changing the contents. It's a shortcut for doing |
|
108 | message without changing the contents. It's a shortcut for doing | |
109 | ``edit`` immediately followed by `hg histedit --continue``. |
|
109 | ``edit`` immediately followed by `hg histedit --continue``. | |
110 |
|
110 | |||
111 | If ``histedit`` encounters a conflict when moving a revision (while |
|
111 | If ``histedit`` encounters a conflict when moving a revision (while | |
112 | handling ``pick`` or ``fold``), it'll stop in a similar manner to |
|
112 | handling ``pick`` or ``fold``), it'll stop in a similar manner to | |
113 | ``edit`` with the difference that it won't prompt you for a commit |
|
113 | ``edit`` with the difference that it won't prompt you for a commit | |
114 | message when done. If you decide at this point that you don't like how |
|
114 | message when done. If you decide at this point that you don't like how | |
115 | much work it will be to rearrange history, or that you made a mistake, |
|
115 | much work it will be to rearrange history, or that you made a mistake, | |
116 | you can use ``hg histedit --abort`` to abandon the new changes you |
|
116 | you can use ``hg histedit --abort`` to abandon the new changes you | |
117 | have made and return to the state before you attempted to edit your |
|
117 | have made and return to the state before you attempted to edit your | |
118 | history. |
|
118 | history. | |
119 |
|
119 | |||
120 | If we clone the histedit-ed example repository above and add four more |
|
120 | If we clone the histedit-ed example repository above and add four more | |
121 | changes, such that we have the following history:: |
|
121 | changes, such that we have the following history:: | |
122 |
|
122 | |||
123 | @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan |
|
123 | @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan | |
124 | | Add theta |
|
124 | | Add theta | |
125 | | |
|
125 | | | |
126 | o 5 140988835471 2009-04-27 18:04 -0500 stefan |
|
126 | o 5 140988835471 2009-04-27 18:04 -0500 stefan | |
127 | | Add eta |
|
127 | | Add eta | |
128 | | |
|
128 | | | |
129 | o 4 122930637314 2009-04-27 18:04 -0500 stefan |
|
129 | o 4 122930637314 2009-04-27 18:04 -0500 stefan | |
130 | | Add zeta |
|
130 | | Add zeta | |
131 | | |
|
131 | | | |
132 | o 3 836302820282 2009-04-27 18:04 -0500 stefan |
|
132 | o 3 836302820282 2009-04-27 18:04 -0500 stefan | |
133 | | Add epsilon |
|
133 | | Add epsilon | |
134 | | |
|
134 | | | |
135 | o 2 989b4d060121 2009-04-27 18:04 -0500 durin42 |
|
135 | o 2 989b4d060121 2009-04-27 18:04 -0500 durin42 | |
136 | | Add beta and delta. |
|
136 | | Add beta and delta. | |
137 | | |
|
137 | | | |
138 | o 1 081603921c3f 2009-04-27 18:04 -0500 durin42 |
|
138 | o 1 081603921c3f 2009-04-27 18:04 -0500 durin42 | |
139 | | Add gamma |
|
139 | | Add gamma | |
140 | | |
|
140 | | | |
141 | o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42 |
|
141 | o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42 | |
142 | Add alpha |
|
142 | Add alpha | |
143 |
|
143 | |||
144 | If you run ``hg histedit --outgoing`` on the clone then it is the same |
|
144 | If you run ``hg histedit --outgoing`` on the clone then it is the same | |
145 | as running ``hg histedit 836302820282``. If you need plan to push to a |
|
145 | as running ``hg histedit 836302820282``. If you need plan to push to a | |
146 | repository that Mercurial does not detect to be related to the source |
|
146 | repository that Mercurial does not detect to be related to the source | |
147 | repo, you can add a ``--force`` option. |
|
147 | repo, you can add a ``--force`` option. | |
148 |
|
148 | |||
149 | Config |
|
149 | Config | |
150 | ------ |
|
150 | ------ | |
151 |
|
151 | |||
152 | Histedit rule lines are truncated to 80 characters by default. You |
|
152 | Histedit rule lines are truncated to 80 characters by default. You | |
153 | can customize this behavior by setting a different length in your |
|
153 | can customize this behavior by setting a different length in your | |
154 | configuration file:: |
|
154 | configuration file:: | |
155 |
|
155 | |||
156 | [histedit] |
|
156 | [histedit] | |
157 | linelen = 120 # truncate rule lines at 120 characters |
|
157 | linelen = 120 # truncate rule lines at 120 characters | |
158 |
|
158 | |||
159 | The summary of a change can be customized as well:: |
|
159 | The summary of a change can be customized as well:: | |
160 |
|
160 | |||
161 | [histedit] |
|
161 | [histedit] | |
162 | summary-template = '{rev} {bookmarks} {desc|firstline}' |
|
162 | summary-template = '{rev} {bookmarks} {desc|firstline}' | |
163 |
|
163 | |||
164 | The customized summary should be kept short enough that rule lines |
|
164 | The customized summary should be kept short enough that rule lines | |
165 | will fit in the configured line length. See above if that requires |
|
165 | will fit in the configured line length. See above if that requires | |
166 | customization. |
|
166 | customization. | |
167 |
|
167 | |||
168 | ``hg histedit`` attempts to automatically choose an appropriate base |
|
168 | ``hg histedit`` attempts to automatically choose an appropriate base | |
169 | revision to use. To change which base revision is used, define a |
|
169 | revision to use. To change which base revision is used, define a | |
170 | revset in your configuration file:: |
|
170 | revset in your configuration file:: | |
171 |
|
171 | |||
172 | [histedit] |
|
172 | [histedit] | |
173 | defaultrev = only(.) & draft() |
|
173 | defaultrev = only(.) & draft() | |
174 |
|
174 | |||
175 | By default each edited revision needs to be present in histedit commands. |
|
175 | By default each edited revision needs to be present in histedit commands. | |
176 | To remove revision you need to use ``drop`` operation. You can configure |
|
176 | To remove revision you need to use ``drop`` operation. You can configure | |
177 | the drop to be implicit for missing commits by adding:: |
|
177 | the drop to be implicit for missing commits by adding:: | |
178 |
|
178 | |||
179 | [histedit] |
|
179 | [histedit] | |
180 | dropmissing = True |
|
180 | dropmissing = True | |
181 |
|
181 | |||
182 | By default, histedit will close the transaction after each action. For |
|
182 | By default, histedit will close the transaction after each action. For | |
183 | performance purposes, you can configure histedit to use a single transaction |
|
183 | performance purposes, you can configure histedit to use a single transaction | |
184 | across the entire histedit. WARNING: This setting introduces a significant risk |
|
184 | across the entire histedit. WARNING: This setting introduces a significant risk | |
185 | of losing the work you've done in a histedit if the histedit aborts |
|
185 | of losing the work you've done in a histedit if the histedit aborts | |
186 | unexpectedly:: |
|
186 | unexpectedly:: | |
187 |
|
187 | |||
188 | [histedit] |
|
188 | [histedit] | |
189 | singletransaction = True |
|
189 | singletransaction = True | |
190 |
|
190 | |||
191 | """ |
|
191 | """ | |
192 |
|
192 | |||
193 |
|
193 | |||
194 | # chistedit dependencies that are not available everywhere |
|
194 | # chistedit dependencies that are not available everywhere | |
195 | try: |
|
195 | try: | |
196 | import fcntl |
|
196 | import fcntl | |
197 | import termios |
|
197 | import termios | |
198 | except ImportError: |
|
198 | except ImportError: | |
199 | fcntl = None |
|
199 | fcntl = None | |
200 | termios = None |
|
200 | termios = None | |
201 |
|
201 | |||
202 | import binascii |
|
202 | import binascii | |
203 | import functools |
|
203 | import functools | |
204 | import os |
|
204 | import os | |
205 | import pickle |
|
205 | import pickle | |
206 | import struct |
|
206 | import struct | |
207 |
|
207 | |||
208 | from mercurial.i18n import _ |
|
208 | from mercurial.i18n import _ | |
209 | from mercurial.pycompat import ( |
|
209 | from mercurial.pycompat import ( | |
210 | open, |
|
210 | open, | |
211 | ) |
|
211 | ) | |
212 | from mercurial.node import ( |
|
212 | from mercurial.node import ( | |
213 | bin, |
|
213 | bin, | |
214 | hex, |
|
214 | hex, | |
215 | short, |
|
215 | short, | |
216 | ) |
|
216 | ) | |
217 | from mercurial import ( |
|
217 | from mercurial import ( | |
218 | bundle2, |
|
218 | bundle2, | |
219 | cmdutil, |
|
219 | cmdutil, | |
220 | context, |
|
220 | context, | |
221 | copies, |
|
221 | copies, | |
222 | destutil, |
|
222 | destutil, | |
223 | discovery, |
|
223 | discovery, | |
224 | encoding, |
|
224 | encoding, | |
225 | error, |
|
225 | error, | |
226 | exchange, |
|
226 | exchange, | |
227 | extensions, |
|
227 | extensions, | |
228 | hg, |
|
228 | hg, | |
229 | logcmdutil, |
|
229 | logcmdutil, | |
230 | merge as mergemod, |
|
230 | merge as mergemod, | |
231 | mergestate as mergestatemod, |
|
231 | mergestate as mergestatemod, | |
232 | mergeutil, |
|
232 | mergeutil, | |
233 | obsolete, |
|
233 | obsolete, | |
234 | pycompat, |
|
234 | pycompat, | |
235 | registrar, |
|
235 | registrar, | |
236 | repair, |
|
236 | repair, | |
237 | rewriteutil, |
|
237 | rewriteutil, | |
238 | scmutil, |
|
238 | scmutil, | |
239 | state as statemod, |
|
239 | state as statemod, | |
240 | util, |
|
240 | util, | |
241 | ) |
|
241 | ) | |
242 | from mercurial.utils import ( |
|
242 | from mercurial.utils import ( | |
243 | dateutil, |
|
243 | dateutil, | |
244 | stringutil, |
|
244 | stringutil, | |
245 | urlutil, |
|
245 | urlutil, | |
246 | ) |
|
246 | ) | |
247 |
|
247 | |||
248 | cmdtable = {} |
|
248 | cmdtable = {} | |
249 | command = registrar.command(cmdtable) |
|
249 | command = registrar.command(cmdtable) | |
250 |
|
250 | |||
251 | configtable = {} |
|
251 | configtable = {} | |
252 | configitem = registrar.configitem(configtable) |
|
252 | configitem = registrar.configitem(configtable) | |
253 | configitem( |
|
253 | configitem( | |
254 | b'experimental', |
|
254 | b'experimental', | |
255 | b'histedit.autoverb', |
|
255 | b'histedit.autoverb', | |
256 | default=False, |
|
256 | default=False, | |
257 | ) |
|
257 | ) | |
258 | configitem( |
|
258 | configitem( | |
259 | b'histedit', |
|
259 | b'histedit', | |
260 | b'defaultrev', |
|
260 | b'defaultrev', | |
261 | default=None, |
|
261 | default=None, | |
262 | ) |
|
262 | ) | |
263 | configitem( |
|
263 | configitem( | |
264 | b'histedit', |
|
264 | b'histedit', | |
265 | b'dropmissing', |
|
265 | b'dropmissing', | |
266 | default=False, |
|
266 | default=False, | |
267 | ) |
|
267 | ) | |
268 | configitem( |
|
268 | configitem( | |
269 | b'histedit', |
|
269 | b'histedit', | |
270 | b'linelen', |
|
270 | b'linelen', | |
271 | default=80, |
|
271 | default=80, | |
272 | ) |
|
272 | ) | |
273 | configitem( |
|
273 | configitem( | |
274 | b'histedit', |
|
274 | b'histedit', | |
275 | b'singletransaction', |
|
275 | b'singletransaction', | |
276 | default=False, |
|
276 | default=False, | |
277 | ) |
|
277 | ) | |
278 | configitem( |
|
278 | configitem( | |
279 | b'ui', |
|
279 | b'ui', | |
280 | b'interface.histedit', |
|
280 | b'interface.histedit', | |
281 | default=None, |
|
281 | default=None, | |
282 | ) |
|
282 | ) | |
283 | configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}') |
|
283 | configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}') | |
284 | # TODO: Teach the text-based histedit interface to respect this config option |
|
284 | # TODO: Teach the text-based histedit interface to respect this config option | |
285 | # before we make it non-experimental. |
|
285 | # before we make it non-experimental. | |
286 | configitem( |
|
286 | configitem( | |
287 | b'histedit', b'later-commits-first', default=False, experimental=True |
|
287 | b'histedit', b'later-commits-first', default=False, experimental=True | |
288 | ) |
|
288 | ) | |
289 |
|
289 | |||
290 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
290 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | |
291 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
291 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | |
292 | # be specifying the version(s) of Mercurial they are tested with, or |
|
292 | # be specifying the version(s) of Mercurial they are tested with, or | |
293 | # leave the attribute unspecified. |
|
293 | # leave the attribute unspecified. | |
294 | testedwith = b'ships-with-hg-core' |
|
294 | testedwith = b'ships-with-hg-core' | |
295 |
|
295 | |||
296 | actiontable = {} |
|
296 | actiontable = {} | |
297 | primaryactions = set() |
|
297 | primaryactions = set() | |
298 | secondaryactions = set() |
|
298 | secondaryactions = set() | |
299 | tertiaryactions = set() |
|
299 | tertiaryactions = set() | |
300 | internalactions = set() |
|
300 | internalactions = set() | |
301 |
|
301 | |||
302 |
|
302 | |||
303 | def geteditcomment(ui, first, last): |
|
303 | def geteditcomment(ui, first, last): | |
304 | """construct the editor comment |
|
304 | """construct the editor comment | |
305 | The comment includes:: |
|
305 | The comment includes:: | |
306 | - an intro |
|
306 | - an intro | |
307 | - sorted primary commands |
|
307 | - sorted primary commands | |
308 | - sorted short commands |
|
308 | - sorted short commands | |
309 | - sorted long commands |
|
309 | - sorted long commands | |
310 | - additional hints |
|
310 | - additional hints | |
311 |
|
311 | |||
312 | Commands are only included once. |
|
312 | Commands are only included once. | |
313 | """ |
|
313 | """ | |
314 | intro = _( |
|
314 | intro = _( | |
315 | b"""Edit history between %s and %s |
|
315 | b"""Edit history between %s and %s | |
316 |
|
316 | |||
317 | Commits are listed from least to most recent |
|
317 | Commits are listed from least to most recent | |
318 |
|
318 | |||
319 | You can reorder changesets by reordering the lines |
|
319 | You can reorder changesets by reordering the lines | |
320 |
|
320 | |||
321 | Commands: |
|
321 | Commands: | |
322 | """ |
|
322 | """ | |
323 | ) |
|
323 | ) | |
324 | actions = [] |
|
324 | actions = [] | |
325 |
|
325 | |||
326 | def addverb(v): |
|
326 | def addverb(v): | |
327 | a = actiontable[v] |
|
327 | a = actiontable[v] | |
328 | lines = a.message.split(b"\n") |
|
328 | lines = a.message.split(b"\n") | |
329 | if len(a.verbs): |
|
329 | if len(a.verbs): | |
330 | v = b', '.join(sorted(a.verbs, key=lambda v: len(v))) |
|
330 | v = b', '.join(sorted(a.verbs, key=lambda v: len(v))) | |
331 | actions.append(b" %s = %s" % (v, lines[0])) |
|
331 | actions.append(b" %s = %s" % (v, lines[0])) | |
332 | actions.extend([b' %s'] * (len(lines) - 1)) |
|
332 | actions.extend([b' %s'] * (len(lines) - 1)) | |
333 |
|
333 | |||
334 | for v in ( |
|
334 | for v in ( | |
335 | sorted(primaryactions) |
|
335 | sorted(primaryactions) | |
336 | + sorted(secondaryactions) |
|
336 | + sorted(secondaryactions) | |
337 | + sorted(tertiaryactions) |
|
337 | + sorted(tertiaryactions) | |
338 | ): |
|
338 | ): | |
339 | addverb(v) |
|
339 | addverb(v) | |
340 | actions.append(b'') |
|
340 | actions.append(b'') | |
341 |
|
341 | |||
342 | hints = [] |
|
342 | hints = [] | |
343 | if ui.configbool(b'histedit', b'dropmissing'): |
|
343 | if ui.configbool(b'histedit', b'dropmissing'): | |
344 | hints.append( |
|
344 | hints.append( | |
345 | b"Deleting a changeset from the list " |
|
345 | b"Deleting a changeset from the list " | |
346 | b"will DISCARD it from the edited history!" |
|
346 | b"will DISCARD it from the edited history!" | |
347 | ) |
|
347 | ) | |
348 |
|
348 | |||
349 | lines = (intro % (first, last)).split(b'\n') + actions + hints |
|
349 | lines = (intro % (first, last)).split(b'\n') + actions + hints | |
350 |
|
350 | |||
351 | return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines]) |
|
351 | return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines]) | |
352 |
|
352 | |||
353 |
|
353 | |||
354 | class histeditstate: |
|
354 | class histeditstate: | |
355 | def __init__(self, repo): |
|
355 | def __init__(self, repo): | |
356 | self.repo = repo |
|
356 | self.repo = repo | |
357 | self.actions = None |
|
357 | self.actions = None | |
358 | self.keep = None |
|
358 | self.keep = None | |
359 | self.topmost = None |
|
359 | self.topmost = None | |
360 | self.parentctxnode = None |
|
360 | self.parentctxnode = None | |
361 | self.lock = None |
|
361 | self.lock = None | |
362 | self.wlock = None |
|
362 | self.wlock = None | |
363 | self.backupfile = None |
|
363 | self.backupfile = None | |
364 | self.stateobj = statemod.cmdstate(repo, b'histedit-state') |
|
364 | self.stateobj = statemod.cmdstate(repo, b'histedit-state') | |
365 | self.replacements = [] |
|
365 | self.replacements = [] | |
366 |
|
366 | |||
367 | def read(self): |
|
367 | def read(self): | |
368 | """Load histedit state from disk and set fields appropriately.""" |
|
368 | """Load histedit state from disk and set fields appropriately.""" | |
369 | if not self.stateobj.exists(): |
|
369 | if not self.stateobj.exists(): | |
370 | cmdutil.wrongtooltocontinue(self.repo, _(b'histedit')) |
|
370 | cmdutil.wrongtooltocontinue(self.repo, _(b'histedit')) | |
371 |
|
371 | |||
372 | data = self._read() |
|
372 | data = self._read() | |
373 |
|
373 | |||
374 | self.parentctxnode = data[b'parentctxnode'] |
|
374 | self.parentctxnode = data[b'parentctxnode'] | |
375 | actions = parserules(data[b'rules'], self) |
|
375 | actions = parserules(data[b'rules'], self) | |
376 | self.actions = actions |
|
376 | self.actions = actions | |
377 | self.keep = data[b'keep'] |
|
377 | self.keep = data[b'keep'] | |
378 | self.topmost = data[b'topmost'] |
|
378 | self.topmost = data[b'topmost'] | |
379 | self.replacements = data[b'replacements'] |
|
379 | self.replacements = data[b'replacements'] | |
380 | self.backupfile = data[b'backupfile'] |
|
380 | self.backupfile = data[b'backupfile'] | |
381 |
|
381 | |||
382 | def _read(self): |
|
382 | def _read(self): | |
383 | fp = self.repo.vfs.read(b'histedit-state') |
|
383 | fp = self.repo.vfs.read(b'histedit-state') | |
384 | if fp.startswith(b'v1\n'): |
|
384 | if fp.startswith(b'v1\n'): | |
385 | data = self._load() |
|
385 | data = self._load() | |
386 | parentctxnode, rules, keep, topmost, replacements, backupfile = data |
|
386 | parentctxnode, rules, keep, topmost, replacements, backupfile = data | |
387 | else: |
|
387 | else: | |
388 | data = pickle.loads(fp) |
|
388 | data = pickle.loads(fp) | |
389 | parentctxnode, rules, keep, topmost, replacements = data |
|
389 | parentctxnode, rules, keep, topmost, replacements = data | |
390 | backupfile = None |
|
390 | backupfile = None | |
391 | rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules]) |
|
391 | rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules]) | |
392 |
|
392 | |||
393 | return { |
|
393 | return { | |
394 | b'parentctxnode': parentctxnode, |
|
394 | b'parentctxnode': parentctxnode, | |
395 | b"rules": rules, |
|
395 | b"rules": rules, | |
396 | b"keep": keep, |
|
396 | b"keep": keep, | |
397 | b"topmost": topmost, |
|
397 | b"topmost": topmost, | |
398 | b"replacements": replacements, |
|
398 | b"replacements": replacements, | |
399 | b"backupfile": backupfile, |
|
399 | b"backupfile": backupfile, | |
400 | } |
|
400 | } | |
401 |
|
401 | |||
402 | def write(self, tr=None): |
|
402 | def write(self, tr=None): | |
403 | if tr: |
|
403 | if tr: | |
404 | tr.addfilegenerator( |
|
404 | tr.addfilegenerator( | |
405 | b'histedit-state', |
|
405 | b'histedit-state', | |
406 | (b'histedit-state',), |
|
406 | (b'histedit-state',), | |
407 | self._write, |
|
407 | self._write, | |
408 | location=b'plain', |
|
408 | location=b'plain', | |
409 | ) |
|
409 | ) | |
410 | else: |
|
410 | else: | |
411 | with self.repo.vfs(b"histedit-state", b"w") as f: |
|
411 | with self.repo.vfs(b"histedit-state", b"w") as f: | |
412 | self._write(f) |
|
412 | self._write(f) | |
413 |
|
413 | |||
414 | def _write(self, fp): |
|
414 | def _write(self, fp): | |
415 | fp.write(b'v1\n') |
|
415 | fp.write(b'v1\n') | |
416 | fp.write(b'%s\n' % hex(self.parentctxnode)) |
|
416 | fp.write(b'%s\n' % hex(self.parentctxnode)) | |
417 | fp.write(b'%s\n' % hex(self.topmost)) |
|
417 | fp.write(b'%s\n' % hex(self.topmost)) | |
418 | fp.write(b'%s\n' % (b'True' if self.keep else b'False')) |
|
418 | fp.write(b'%s\n' % (b'True' if self.keep else b'False')) | |
419 | fp.write(b'%d\n' % len(self.actions)) |
|
419 | fp.write(b'%d\n' % len(self.actions)) | |
420 | for action in self.actions: |
|
420 | for action in self.actions: | |
421 | fp.write(b'%s\n' % action.tostate()) |
|
421 | fp.write(b'%s\n' % action.tostate()) | |
422 | fp.write(b'%d\n' % len(self.replacements)) |
|
422 | fp.write(b'%d\n' % len(self.replacements)) | |
423 | for replacement in self.replacements: |
|
423 | for replacement in self.replacements: | |
424 | fp.write( |
|
424 | fp.write( | |
425 | b'%s%s\n' |
|
425 | b'%s%s\n' | |
426 | % ( |
|
426 | % ( | |
427 | hex(replacement[0]), |
|
427 | hex(replacement[0]), | |
428 | b''.join(hex(r) for r in replacement[1]), |
|
428 | b''.join(hex(r) for r in replacement[1]), | |
429 | ) |
|
429 | ) | |
430 | ) |
|
430 | ) | |
431 | backupfile = self.backupfile |
|
431 | backupfile = self.backupfile | |
432 | if not backupfile: |
|
432 | if not backupfile: | |
433 | backupfile = b'' |
|
433 | backupfile = b'' | |
434 | fp.write(b'%s\n' % backupfile) |
|
434 | fp.write(b'%s\n' % backupfile) | |
435 |
|
435 | |||
436 | def _load(self): |
|
436 | def _load(self): | |
437 | fp = self.repo.vfs(b'histedit-state', b'r') |
|
437 | fp = self.repo.vfs(b'histedit-state', b'r') | |
438 | lines = [l[:-1] for l in fp.readlines()] |
|
438 | lines = [l[:-1] for l in fp.readlines()] | |
439 |
|
439 | |||
440 | index = 0 |
|
440 | index = 0 | |
441 | lines[index] # version number |
|
441 | lines[index] # version number | |
442 | index += 1 |
|
442 | index += 1 | |
443 |
|
443 | |||
444 | parentctxnode = bin(lines[index]) |
|
444 | parentctxnode = bin(lines[index]) | |
445 | index += 1 |
|
445 | index += 1 | |
446 |
|
446 | |||
447 | topmost = bin(lines[index]) |
|
447 | topmost = bin(lines[index]) | |
448 | index += 1 |
|
448 | index += 1 | |
449 |
|
449 | |||
450 | keep = lines[index] == b'True' |
|
450 | keep = lines[index] == b'True' | |
451 | index += 1 |
|
451 | index += 1 | |
452 |
|
452 | |||
453 | # Rules |
|
453 | # Rules | |
454 | rules = [] |
|
454 | rules = [] | |
455 | rulelen = int(lines[index]) |
|
455 | rulelen = int(lines[index]) | |
456 | index += 1 |
|
456 | index += 1 | |
457 | for i in range(rulelen): |
|
457 | for i in range(rulelen): | |
458 | ruleaction = lines[index] |
|
458 | ruleaction = lines[index] | |
459 | index += 1 |
|
459 | index += 1 | |
460 | rule = lines[index] |
|
460 | rule = lines[index] | |
461 | index += 1 |
|
461 | index += 1 | |
462 | rules.append((ruleaction, rule)) |
|
462 | rules.append((ruleaction, rule)) | |
463 |
|
463 | |||
464 | # Replacements |
|
464 | # Replacements | |
465 | replacements = [] |
|
465 | replacements = [] | |
466 | replacementlen = int(lines[index]) |
|
466 | replacementlen = int(lines[index]) | |
467 | index += 1 |
|
467 | index += 1 | |
468 | for i in range(replacementlen): |
|
468 | for i in range(replacementlen): | |
469 | replacement = lines[index] |
|
469 | replacement = lines[index] | |
470 | original = bin(replacement[:40]) |
|
470 | original = bin(replacement[:40]) | |
471 | succ = [ |
|
471 | succ = [ | |
472 | bin(replacement[i : i + 40]) |
|
472 | bin(replacement[i : i + 40]) | |
473 | for i in range(40, len(replacement), 40) |
|
473 | for i in range(40, len(replacement), 40) | |
474 | ] |
|
474 | ] | |
475 | replacements.append((original, succ)) |
|
475 | replacements.append((original, succ)) | |
476 | index += 1 |
|
476 | index += 1 | |
477 |
|
477 | |||
478 | backupfile = lines[index] |
|
478 | backupfile = lines[index] | |
479 | index += 1 |
|
479 | index += 1 | |
480 |
|
480 | |||
481 | fp.close() |
|
481 | fp.close() | |
482 |
|
482 | |||
483 | return parentctxnode, rules, keep, topmost, replacements, backupfile |
|
483 | return parentctxnode, rules, keep, topmost, replacements, backupfile | |
484 |
|
484 | |||
485 | def clear(self): |
|
485 | def clear(self): | |
486 | if self.inprogress(): |
|
486 | if self.inprogress(): | |
487 | self.repo.vfs.unlink(b'histedit-state') |
|
487 | self.repo.vfs.unlink(b'histedit-state') | |
488 |
|
488 | |||
489 | def inprogress(self): |
|
489 | def inprogress(self): | |
490 | return self.repo.vfs.exists(b'histedit-state') |
|
490 | return self.repo.vfs.exists(b'histedit-state') | |
491 |
|
491 | |||
492 |
|
492 | |||
493 | class histeditaction: |
|
493 | class histeditaction: | |
494 | def __init__(self, state, node): |
|
494 | def __init__(self, state, node): | |
495 | self.state = state |
|
495 | self.state = state | |
496 | self.repo = state.repo |
|
496 | self.repo = state.repo | |
497 | self.node = node |
|
497 | self.node = node | |
498 |
|
498 | |||
499 | @classmethod |
|
499 | @classmethod | |
500 | def fromrule(cls, state, rule): |
|
500 | def fromrule(cls, state, rule): | |
501 | """Parses the given rule, returning an instance of the histeditaction.""" |
|
501 | """Parses the given rule, returning an instance of the histeditaction.""" | |
502 | ruleid = rule.strip().split(b' ', 1)[0] |
|
502 | ruleid = rule.strip().split(b' ', 1)[0] | |
503 | # ruleid can be anything from rev numbers, hashes, "bookmarks" etc |
|
503 | # ruleid can be anything from rev numbers, hashes, "bookmarks" etc | |
504 | # Check for validation of rule ids and get the rulehash |
|
504 | # Check for validation of rule ids and get the rulehash | |
505 | try: |
|
505 | try: | |
506 | rev = bin(ruleid) |
|
506 | rev = bin(ruleid) | |
507 | except binascii.Error: |
|
507 | except binascii.Error: | |
508 | try: |
|
508 | try: | |
509 | _ctx = scmutil.revsingle(state.repo, ruleid) |
|
509 | _ctx = scmutil.revsingle(state.repo, ruleid) | |
510 | rulehash = _ctx.hex() |
|
510 | rulehash = _ctx.hex() | |
511 | rev = bin(rulehash) |
|
511 | rev = bin(rulehash) | |
512 | except error.RepoLookupError: |
|
512 | except error.RepoLookupError: | |
513 | raise error.ParseError(_(b"invalid changeset %s") % ruleid) |
|
513 | raise error.ParseError(_(b"invalid changeset %s") % ruleid) | |
514 | return cls(state, rev) |
|
514 | return cls(state, rev) | |
515 |
|
515 | |||
516 | def verify(self, prev, expected, seen): |
|
516 | def verify(self, prev, expected, seen): | |
517 | """Verifies semantic correctness of the rule""" |
|
517 | """Verifies semantic correctness of the rule""" | |
518 | repo = self.repo |
|
518 | repo = self.repo | |
519 | ha = hex(self.node) |
|
519 | ha = hex(self.node) | |
520 | self.node = scmutil.resolvehexnodeidprefix(repo, ha) |
|
520 | self.node = scmutil.resolvehexnodeidprefix(repo, ha) | |
521 | if self.node is None: |
|
521 | if self.node is None: | |
522 | raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12]) |
|
522 | raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12]) | |
523 | self._verifynodeconstraints(prev, expected, seen) |
|
523 | self._verifynodeconstraints(prev, expected, seen) | |
524 |
|
524 | |||
525 | def _verifynodeconstraints(self, prev, expected, seen): |
|
525 | def _verifynodeconstraints(self, prev, expected, seen): | |
526 | # by default command need a node in the edited list |
|
526 | # by default command need a node in the edited list | |
527 | if self.node not in expected: |
|
527 | if self.node not in expected: | |
528 | raise error.ParseError( |
|
528 | raise error.ParseError( | |
529 | _(b'%s "%s" changeset was not a candidate') |
|
529 | _(b'%s "%s" changeset was not a candidate') | |
530 | % (self.verb, short(self.node)), |
|
530 | % (self.verb, short(self.node)), | |
531 | hint=_(b'only use listed changesets'), |
|
531 | hint=_(b'only use listed changesets'), | |
532 | ) |
|
532 | ) | |
533 | # and only one command per node |
|
533 | # and only one command per node | |
534 | if self.node in seen: |
|
534 | if self.node in seen: | |
535 | raise error.ParseError( |
|
535 | raise error.ParseError( | |
536 | _(b'duplicated command for changeset %s') % short(self.node) |
|
536 | _(b'duplicated command for changeset %s') % short(self.node) | |
537 | ) |
|
537 | ) | |
538 |
|
538 | |||
539 | def torule(self): |
|
539 | def torule(self): | |
540 | """build a histedit rule line for an action |
|
540 | """build a histedit rule line for an action | |
541 |
|
541 | |||
542 | by default lines are in the form: |
|
542 | by default lines are in the form: | |
543 | <hash> <rev> <summary> |
|
543 | <hash> <rev> <summary> | |
544 | """ |
|
544 | """ | |
545 | ctx = self.repo[self.node] |
|
545 | ctx = self.repo[self.node] | |
546 | ui = self.repo.ui |
|
546 | ui = self.repo.ui | |
547 | # We don't want color codes in the commit message template, so |
|
547 | # We don't want color codes in the commit message template, so | |
548 | # disable the label() template function while we render it. |
|
548 | # disable the label() template function while we render it. | |
549 | with ui.configoverride( |
|
549 | with ui.configoverride( | |
550 | {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit' |
|
550 | {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit' | |
551 | ): |
|
551 | ): | |
552 | summary = cmdutil.rendertemplate( |
|
552 | summary = cmdutil.rendertemplate( | |
553 | ctx, ui.config(b'histedit', b'summary-template') |
|
553 | ctx, ui.config(b'histedit', b'summary-template') | |
554 | ) |
|
554 | ) | |
555 | line = b'%s %s %s' % (self.verb, ctx, stringutil.firstline(summary)) |
|
555 | line = b'%s %s %s' % (self.verb, ctx, stringutil.firstline(summary)) | |
556 | # trim to 75 columns by default so it's not stupidly wide in my editor |
|
556 | # trim to 75 columns by default so it's not stupidly wide in my editor | |
557 | # (the 5 more are left for verb) |
|
557 | # (the 5 more are left for verb) | |
558 | maxlen = self.repo.ui.configint(b'histedit', b'linelen') |
|
558 | maxlen = self.repo.ui.configint(b'histedit', b'linelen') | |
559 | maxlen = max(maxlen, 22) # avoid truncating hash |
|
559 | maxlen = max(maxlen, 22) # avoid truncating hash | |
560 | return stringutil.ellipsis(line, maxlen) |
|
560 | return stringutil.ellipsis(line, maxlen) | |
561 |
|
561 | |||
562 | def tostate(self): |
|
562 | def tostate(self): | |
563 | """Print an action in format used by histedit state files |
|
563 | """Print an action in format used by histedit state files | |
564 | (the first line is a verb, the remainder is the second) |
|
564 | (the first line is a verb, the remainder is the second) | |
565 | """ |
|
565 | """ | |
566 | return b"%s\n%s" % (self.verb, hex(self.node)) |
|
566 | return b"%s\n%s" % (self.verb, hex(self.node)) | |
567 |
|
567 | |||
568 | def run(self): |
|
568 | def run(self): | |
569 | """Runs the action. The default behavior is simply apply the action's |
|
569 | """Runs the action. The default behavior is simply apply the action's | |
570 | rulectx onto the current parentctx.""" |
|
570 | rulectx onto the current parentctx.""" | |
571 | self.applychange() |
|
571 | self.applychange() | |
572 | self.continuedirty() |
|
572 | self.continuedirty() | |
573 | return self.continueclean() |
|
573 | return self.continueclean() | |
574 |
|
574 | |||
575 | def applychange(self): |
|
575 | def applychange(self): | |
576 | """Applies the changes from this action's rulectx onto the current |
|
576 | """Applies the changes from this action's rulectx onto the current | |
577 | parentctx, but does not commit them.""" |
|
577 | parentctx, but does not commit them.""" | |
578 | repo = self.repo |
|
578 | repo = self.repo | |
579 | rulectx = repo[self.node] |
|
579 | rulectx = repo[self.node] | |
580 | with repo.ui.silent(): |
|
580 | with repo.ui.silent(): | |
581 | hg.update(repo, self.state.parentctxnode, quietempty=True) |
|
581 | hg.update(repo, self.state.parentctxnode, quietempty=True) | |
582 | stats = applychanges(repo.ui, repo, rulectx, {}) |
|
582 | stats = applychanges(repo.ui, repo, rulectx, {}) | |
583 | repo.dirstate.setbranch(rulectx.branch(), repo.currenttransaction()) |
|
583 | repo.dirstate.setbranch(rulectx.branch(), repo.currenttransaction()) | |
584 | if stats.unresolvedcount: |
|
584 | if stats.unresolvedcount: | |
585 | raise error.InterventionRequired( |
|
585 | raise error.InterventionRequired( | |
586 | _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)), |
|
586 | _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)), | |
587 | hint=_(b'hg histedit --continue to resume'), |
|
587 | hint=_(b'hg histedit --continue to resume'), | |
588 | ) |
|
588 | ) | |
589 |
|
589 | |||
590 | def continuedirty(self): |
|
590 | def continuedirty(self): | |
591 | """Continues the action when changes have been applied to the working |
|
591 | """Continues the action when changes have been applied to the working | |
592 | copy. The default behavior is to commit the dirty changes.""" |
|
592 | copy. The default behavior is to commit the dirty changes.""" | |
593 | repo = self.repo |
|
593 | repo = self.repo | |
594 | rulectx = repo[self.node] |
|
594 | rulectx = repo[self.node] | |
595 |
|
595 | |||
596 | editor = self.commiteditor() |
|
596 | editor = self.commiteditor() | |
597 | commit = commitfuncfor(repo, rulectx) |
|
597 | commit = commitfuncfor(repo, rulectx) | |
598 | if repo.ui.configbool(b'rewrite', b'update-timestamp'): |
|
598 | if repo.ui.configbool(b'rewrite', b'update-timestamp'): | |
599 | date = dateutil.makedate() |
|
599 | date = dateutil.makedate() | |
600 | else: |
|
600 | else: | |
601 | date = rulectx.date() |
|
601 | date = rulectx.date() | |
602 | commit( |
|
602 | commit( | |
603 | text=rulectx.description(), |
|
603 | text=rulectx.description(), | |
604 | user=rulectx.user(), |
|
604 | user=rulectx.user(), | |
605 | date=date, |
|
605 | date=date, | |
606 | extra=rulectx.extra(), |
|
606 | extra=rulectx.extra(), | |
607 | editor=editor, |
|
607 | editor=editor, | |
608 | ) |
|
608 | ) | |
609 |
|
609 | |||
610 | def commiteditor(self): |
|
610 | def commiteditor(self): | |
611 | """The editor to be used to edit the commit message.""" |
|
611 | """The editor to be used to edit the commit message.""" | |
612 | return False |
|
612 | return False | |
613 |
|
613 | |||
614 | def continueclean(self): |
|
614 | def continueclean(self): | |
615 | """Continues the action when the working copy is clean. The default |
|
615 | """Continues the action when the working copy is clean. The default | |
616 | behavior is to accept the current commit as the new version of the |
|
616 | behavior is to accept the current commit as the new version of the | |
617 | rulectx.""" |
|
617 | rulectx.""" | |
618 | ctx = self.repo[b'.'] |
|
618 | ctx = self.repo[b'.'] | |
619 | if ctx.node() == self.state.parentctxnode: |
|
619 | if ctx.node() == self.state.parentctxnode: | |
620 | self.repo.ui.warn( |
|
620 | self.repo.ui.warn( | |
621 | _(b'%s: skipping changeset (no changes)\n') % short(self.node) |
|
621 | _(b'%s: skipping changeset (no changes)\n') % short(self.node) | |
622 | ) |
|
622 | ) | |
623 | return ctx, [(self.node, tuple())] |
|
623 | return ctx, [(self.node, tuple())] | |
624 | if ctx.node() == self.node: |
|
624 | if ctx.node() == self.node: | |
625 | # Nothing changed |
|
625 | # Nothing changed | |
626 | return ctx, [] |
|
626 | return ctx, [] | |
627 | return ctx, [(self.node, (ctx.node(),))] |
|
627 | return ctx, [(self.node, (ctx.node(),))] | |
628 |
|
628 | |||
629 |
|
629 | |||
630 | def commitfuncfor(repo, src): |
|
630 | def commitfuncfor(repo, src): | |
631 | """Build a commit function for the replacement of <src> |
|
631 | """Build a commit function for the replacement of <src> | |
632 |
|
632 | |||
633 | This function ensure we apply the same treatment to all changesets. |
|
633 | This function ensure we apply the same treatment to all changesets. | |
634 |
|
634 | |||
635 | - Add a 'histedit_source' entry in extra. |
|
635 | - Add a 'histedit_source' entry in extra. | |
636 |
|
636 | |||
637 | Note that fold has its own separated logic because its handling is a bit |
|
637 | Note that fold has its own separated logic because its handling is a bit | |
638 | different and not easily factored out of the fold method. |
|
638 | different and not easily factored out of the fold method. | |
639 | """ |
|
639 | """ | |
640 | phasemin = src.phase() |
|
640 | phasemin = src.phase() | |
641 |
|
641 | |||
642 | def commitfunc(**kwargs): |
|
642 | def commitfunc(**kwargs): | |
643 | overrides = {(b'phases', b'new-commit'): phasemin} |
|
643 | overrides = {(b'phases', b'new-commit'): phasemin} | |
644 | with repo.ui.configoverride(overrides, b'histedit'): |
|
644 | with repo.ui.configoverride(overrides, b'histedit'): | |
645 | extra = kwargs.get('extra', {}).copy() |
|
645 | extra = kwargs.get('extra', {}).copy() | |
646 | extra[b'histedit_source'] = src.hex() |
|
646 | extra[b'histedit_source'] = src.hex() | |
647 | kwargs['extra'] = extra |
|
647 | kwargs['extra'] = extra | |
648 | return repo.commit(**kwargs) |
|
648 | return repo.commit(**kwargs) | |
649 |
|
649 | |||
650 | return commitfunc |
|
650 | return commitfunc | |
651 |
|
651 | |||
652 |
|
652 | |||
653 | def applychanges(ui, repo, ctx, opts): |
|
653 | def applychanges(ui, repo, ctx, opts): | |
654 | """Merge changeset from ctx (only) in the current working directory""" |
|
654 | """Merge changeset from ctx (only) in the current working directory""" | |
655 | if ctx.p1().node() == repo.dirstate.p1(): |
|
655 | if ctx.p1().node() == repo.dirstate.p1(): | |
656 | # edits are "in place" we do not need to make any merge, |
|
656 | # edits are "in place" we do not need to make any merge, | |
657 | # just applies changes on parent for editing |
|
657 | # just applies changes on parent for editing | |
658 | with ui.silent(): |
|
658 | with ui.silent(): | |
659 | cmdutil.revert(ui, repo, ctx, all=True) |
|
659 | cmdutil.revert(ui, repo, ctx, all=True) | |
660 | stats = mergemod.updateresult(0, 0, 0, 0) |
|
660 | stats = mergemod.updateresult(0, 0, 0, 0) | |
661 | else: |
|
661 | else: | |
662 | try: |
|
662 | try: | |
663 | # ui.forcemerge is an internal variable, do not document |
|
663 | # ui.forcemerge is an internal variable, do not document | |
664 | repo.ui.setconfig( |
|
664 | repo.ui.setconfig( | |
665 | b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit' |
|
665 | b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit' | |
666 | ) |
|
666 | ) | |
667 | stats = mergemod.graft( |
|
667 | stats = mergemod.graft( | |
668 | repo, |
|
668 | repo, | |
669 | ctx, |
|
669 | ctx, | |
670 | labels=[ |
|
670 | labels=[ | |
671 | b'already edited', |
|
671 | b'already edited', | |
672 | b'current change', |
|
672 | b'current change', | |
673 | b'parent of current change', |
|
673 | b'parent of current change', | |
674 | ], |
|
674 | ], | |
675 | ) |
|
675 | ) | |
676 | finally: |
|
676 | finally: | |
677 | repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit') |
|
677 | repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit') | |
678 | return stats |
|
678 | return stats | |
679 |
|
679 | |||
680 |
|
680 | |||
681 | def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False): |
|
681 | def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False): | |
682 | """collapse the set of revisions from first to last as new one. |
|
682 | """collapse the set of revisions from first to last as new one. | |
683 |
|
683 | |||
684 | Expected commit options are: |
|
684 | Expected commit options are: | |
685 | - message |
|
685 | - message | |
686 | - date |
|
686 | - date | |
687 | - username |
|
687 | - username | |
688 | Commit message is edited in all cases. |
|
688 | Commit message is edited in all cases. | |
689 |
|
689 | |||
690 | This function works in memory.""" |
|
690 | This function works in memory.""" | |
691 | ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev())) |
|
691 | ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev())) | |
692 | if not ctxs: |
|
692 | if not ctxs: | |
693 | return None |
|
693 | return None | |
694 | for c in ctxs: |
|
694 | for c in ctxs: | |
695 | if not c.mutable(): |
|
695 | if not c.mutable(): | |
696 | raise error.ParseError( |
|
696 | raise error.ParseError( | |
697 | _(b"cannot fold into public change %s") % short(c.node()) |
|
697 | _(b"cannot fold into public change %s") % short(c.node()) | |
698 | ) |
|
698 | ) | |
699 | base = firstctx.p1() |
|
699 | base = firstctx.p1() | |
700 |
|
700 | |||
701 | # commit a new version of the old changeset, including the update |
|
701 | # commit a new version of the old changeset, including the update | |
702 | # collect all files which might be affected |
|
702 | # collect all files which might be affected | |
703 | files = set() |
|
703 | files = set() | |
704 | for ctx in ctxs: |
|
704 | for ctx in ctxs: | |
705 | files.update(ctx.files()) |
|
705 | files.update(ctx.files()) | |
706 |
|
706 | |||
707 | # Recompute copies (avoid recording a -> b -> a) |
|
707 | # Recompute copies (avoid recording a -> b -> a) | |
708 | copied = copies.pathcopies(base, lastctx) |
|
708 | copied = copies.pathcopies(base, lastctx) | |
709 |
|
709 | |||
710 | # prune files which were reverted by the updates |
|
710 | # prune files which were reverted by the updates | |
711 | files = [f for f in files if not cmdutil.samefile(f, lastctx, base)] |
|
711 | files = [f for f in files if not cmdutil.samefile(f, lastctx, base)] | |
712 | # commit version of these files as defined by head |
|
712 | # commit version of these files as defined by head | |
713 | headmf = lastctx.manifest() |
|
713 | headmf = lastctx.manifest() | |
714 |
|
714 | |||
715 | def filectxfn(repo, ctx, path): |
|
715 | def filectxfn(repo, ctx, path): | |
716 | if path in headmf: |
|
716 | if path in headmf: | |
717 | fctx = lastctx[path] |
|
717 | fctx = lastctx[path] | |
718 | flags = fctx.flags() |
|
718 | flags = fctx.flags() | |
719 | mctx = context.memfilectx( |
|
719 | mctx = context.memfilectx( | |
720 | repo, |
|
720 | repo, | |
721 | ctx, |
|
721 | ctx, | |
722 | fctx.path(), |
|
722 | fctx.path(), | |
723 | fctx.data(), |
|
723 | fctx.data(), | |
724 | islink=b'l' in flags, |
|
724 | islink=b'l' in flags, | |
725 | isexec=b'x' in flags, |
|
725 | isexec=b'x' in flags, | |
726 | copysource=copied.get(path), |
|
726 | copysource=copied.get(path), | |
727 | ) |
|
727 | ) | |
728 | return mctx |
|
728 | return mctx | |
729 | return None |
|
729 | return None | |
730 |
|
730 | |||
731 | if commitopts.get(b'message'): |
|
731 | if commitopts.get(b'message'): | |
732 | message = commitopts[b'message'] |
|
732 | message = commitopts[b'message'] | |
733 | else: |
|
733 | else: | |
734 | message = firstctx.description() |
|
734 | message = firstctx.description() | |
735 | user = commitopts.get(b'user') |
|
735 | user = commitopts.get(b'user') | |
736 | date = commitopts.get(b'date') |
|
736 | date = commitopts.get(b'date') | |
737 | extra = commitopts.get(b'extra') |
|
737 | extra = commitopts.get(b'extra') | |
738 |
|
738 | |||
739 | parents = (firstctx.p1().node(), firstctx.p2().node()) |
|
739 | parents = (firstctx.p1().node(), firstctx.p2().node()) | |
740 | editor = None |
|
740 | editor = None | |
741 | if not skipprompt: |
|
741 | if not skipprompt: | |
742 | editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold') |
|
742 | editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold') | |
743 | new = context.memctx( |
|
743 | new = context.memctx( | |
744 | repo, |
|
744 | repo, | |
745 | parents=parents, |
|
745 | parents=parents, | |
746 | text=message, |
|
746 | text=message, | |
747 | files=files, |
|
747 | files=files, | |
748 | filectxfn=filectxfn, |
|
748 | filectxfn=filectxfn, | |
749 | user=user, |
|
749 | user=user, | |
750 | date=date, |
|
750 | date=date, | |
751 | extra=extra, |
|
751 | extra=extra, | |
752 | editor=editor, |
|
752 | editor=editor, | |
753 | ) |
|
753 | ) | |
754 | return repo.commitctx(new) |
|
754 | return repo.commitctx(new) | |
755 |
|
755 | |||
756 |
|
756 | |||
757 | def _isdirtywc(repo): |
|
757 | def _isdirtywc(repo): | |
758 | return repo[None].dirty(missing=True) |
|
758 | return repo[None].dirty(missing=True) | |
759 |
|
759 | |||
760 |
|
760 | |||
761 | def abortdirty(): |
|
761 | def abortdirty(): | |
762 | raise error.StateError( |
|
762 | raise error.StateError( | |
763 | _(b'working copy has pending changes'), |
|
763 | _(b'working copy has pending changes'), | |
764 | hint=_( |
|
764 | hint=_( | |
765 | b'amend, commit, or revert them and run histedit ' |
|
765 | b'amend, commit, or revert them and run histedit ' | |
766 | b'--continue, or abort with histedit --abort' |
|
766 | b'--continue, or abort with histedit --abort' | |
767 | ), |
|
767 | ), | |
768 | ) |
|
768 | ) | |
769 |
|
769 | |||
770 |
|
770 | |||
771 | def action(verbs, message, priority=False, internal=False): |
|
771 | def action(verbs, message, priority=False, internal=False): | |
772 | def wrap(cls): |
|
772 | def wrap(cls): | |
773 | assert not priority or not internal |
|
773 | assert not priority or not internal | |
774 | verb = verbs[0] |
|
774 | verb = verbs[0] | |
775 | if priority: |
|
775 | if priority: | |
776 | primaryactions.add(verb) |
|
776 | primaryactions.add(verb) | |
777 | elif internal: |
|
777 | elif internal: | |
778 | internalactions.add(verb) |
|
778 | internalactions.add(verb) | |
779 | elif len(verbs) > 1: |
|
779 | elif len(verbs) > 1: | |
780 | secondaryactions.add(verb) |
|
780 | secondaryactions.add(verb) | |
781 | else: |
|
781 | else: | |
782 | tertiaryactions.add(verb) |
|
782 | tertiaryactions.add(verb) | |
783 |
|
783 | |||
784 | cls.verb = verb |
|
784 | cls.verb = verb | |
785 | cls.verbs = verbs |
|
785 | cls.verbs = verbs | |
786 | cls.message = message |
|
786 | cls.message = message | |
787 | for verb in verbs: |
|
787 | for verb in verbs: | |
788 | actiontable[verb] = cls |
|
788 | actiontable[verb] = cls | |
789 | return cls |
|
789 | return cls | |
790 |
|
790 | |||
791 | return wrap |
|
791 | return wrap | |
792 |
|
792 | |||
793 |
|
793 | |||
794 | @action([b'pick', b'p'], _(b'use commit'), priority=True) |
|
794 | @action([b'pick', b'p'], _(b'use commit'), priority=True) | |
795 | class pick(histeditaction): |
|
795 | class pick(histeditaction): | |
796 | def run(self): |
|
796 | def run(self): | |
797 | rulectx = self.repo[self.node] |
|
797 | rulectx = self.repo[self.node] | |
798 | if rulectx.p1().node() == self.state.parentctxnode: |
|
798 | if rulectx.p1().node() == self.state.parentctxnode: | |
799 | self.repo.ui.debug(b'node %s unchanged\n' % short(self.node)) |
|
799 | self.repo.ui.debug(b'node %s unchanged\n' % short(self.node)) | |
800 | return rulectx, [] |
|
800 | return rulectx, [] | |
801 |
|
801 | |||
802 | return super(pick, self).run() |
|
802 | return super(pick, self).run() | |
803 |
|
803 | |||
804 |
|
804 | |||
805 | @action( |
|
805 | @action( | |
806 | [b'edit', b'e'], |
|
806 | [b'edit', b'e'], | |
807 | _(b'use commit, but allow edits before making new commit'), |
|
807 | _(b'use commit, but allow edits before making new commit'), | |
808 | priority=True, |
|
808 | priority=True, | |
809 | ) |
|
809 | ) | |
810 | class edit(histeditaction): |
|
810 | class edit(histeditaction): | |
811 | def run(self): |
|
811 | def run(self): | |
812 | repo = self.repo |
|
812 | repo = self.repo | |
813 | rulectx = repo[self.node] |
|
813 | rulectx = repo[self.node] | |
814 | hg.update(repo, self.state.parentctxnode, quietempty=True) |
|
814 | hg.update(repo, self.state.parentctxnode, quietempty=True) | |
815 | applychanges(repo.ui, repo, rulectx, {}) |
|
815 | applychanges(repo.ui, repo, rulectx, {}) | |
816 | hint = _(b'to edit %s, `hg histedit --continue` after making changes') |
|
816 | hint = _(b'to edit %s, `hg histedit --continue` after making changes') | |
817 | raise error.InterventionRequired( |
|
817 | raise error.InterventionRequired( | |
818 | _(b'Editing (%s), commit as needed now to split the change') |
|
818 | _(b'Editing (%s), commit as needed now to split the change') | |
819 | % short(self.node), |
|
819 | % short(self.node), | |
820 | hint=hint % short(self.node), |
|
820 | hint=hint % short(self.node), | |
821 | ) |
|
821 | ) | |
822 |
|
822 | |||
823 | def commiteditor(self): |
|
823 | def commiteditor(self): | |
824 | return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit') |
|
824 | return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit') | |
825 |
|
825 | |||
826 |
|
826 | |||
827 | @action([b'fold', b'f'], _(b'use commit, but combine it with the one above')) |
|
827 | @action([b'fold', b'f'], _(b'use commit, but combine it with the one above')) | |
828 | class fold(histeditaction): |
|
828 | class fold(histeditaction): | |
829 | def verify(self, prev, expected, seen): |
|
829 | def verify(self, prev, expected, seen): | |
830 | """Verifies semantic correctness of the fold rule""" |
|
830 | """Verifies semantic correctness of the fold rule""" | |
831 | super(fold, self).verify(prev, expected, seen) |
|
831 | super(fold, self).verify(prev, expected, seen) | |
832 | repo = self.repo |
|
832 | repo = self.repo | |
833 | if not prev: |
|
833 | if not prev: | |
834 | c = repo[self.node].p1() |
|
834 | c = repo[self.node].p1() | |
835 | elif not prev.verb in (b'pick', b'base'): |
|
835 | elif not prev.verb in (b'pick', b'base'): | |
836 | return |
|
836 | return | |
837 | else: |
|
837 | else: | |
838 | c = repo[prev.node] |
|
838 | c = repo[prev.node] | |
839 | if not c.mutable(): |
|
839 | if not c.mutable(): | |
840 | raise error.ParseError( |
|
840 | raise error.ParseError( | |
841 | _(b"cannot fold into public change %s") % short(c.node()) |
|
841 | _(b"cannot fold into public change %s") % short(c.node()) | |
842 | ) |
|
842 | ) | |
843 |
|
843 | |||
844 | def continuedirty(self): |
|
844 | def continuedirty(self): | |
845 | repo = self.repo |
|
845 | repo = self.repo | |
846 | rulectx = repo[self.node] |
|
846 | rulectx = repo[self.node] | |
847 |
|
847 | |||
848 | commit = commitfuncfor(repo, rulectx) |
|
848 | commit = commitfuncfor(repo, rulectx) | |
849 | commit( |
|
849 | commit( | |
850 | text=b'fold-temp-revision %s' % short(self.node), |
|
850 | text=b'fold-temp-revision %s' % short(self.node), | |
851 | user=rulectx.user(), |
|
851 | user=rulectx.user(), | |
852 | date=rulectx.date(), |
|
852 | date=rulectx.date(), | |
853 | extra=rulectx.extra(), |
|
853 | extra=rulectx.extra(), | |
854 | ) |
|
854 | ) | |
855 |
|
855 | |||
856 | def continueclean(self): |
|
856 | def continueclean(self): | |
857 | repo = self.repo |
|
857 | repo = self.repo | |
858 | ctx = repo[b'.'] |
|
858 | ctx = repo[b'.'] | |
859 | rulectx = repo[self.node] |
|
859 | rulectx = repo[self.node] | |
860 | parentctxnode = self.state.parentctxnode |
|
860 | parentctxnode = self.state.parentctxnode | |
861 | if ctx.node() == parentctxnode: |
|
861 | if ctx.node() == parentctxnode: | |
862 | repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node)) |
|
862 | repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node)) | |
863 | return ctx, [(self.node, (parentctxnode,))] |
|
863 | return ctx, [(self.node, (parentctxnode,))] | |
864 |
|
864 | |||
865 | parentctx = repo[parentctxnode] |
|
865 | parentctx = repo[parentctxnode] | |
866 | newcommits = { |
|
866 | newcommits = { | |
867 | c.node() |
|
867 | c.node() | |
868 | for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev()) |
|
868 | for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev()) | |
869 | } |
|
869 | } | |
870 | if not newcommits: |
|
870 | if not newcommits: | |
871 | repo.ui.warn( |
|
871 | repo.ui.warn( | |
872 | _( |
|
872 | _( | |
873 | b'%s: cannot fold - working copy is not a ' |
|
873 | b'%s: cannot fold - working copy is not a ' | |
874 | b'descendant of previous commit %s\n' |
|
874 | b'descendant of previous commit %s\n' | |
875 | ) |
|
875 | ) | |
876 | % (short(self.node), short(parentctxnode)) |
|
876 | % (short(self.node), short(parentctxnode)) | |
877 | ) |
|
877 | ) | |
878 | return ctx, [(self.node, (ctx.node(),))] |
|
878 | return ctx, [(self.node, (ctx.node(),))] | |
879 |
|
879 | |||
880 | middlecommits = newcommits.copy() |
|
880 | middlecommits = newcommits.copy() | |
881 | middlecommits.discard(ctx.node()) |
|
881 | middlecommits.discard(ctx.node()) | |
882 |
|
882 | |||
883 | return self.finishfold( |
|
883 | return self.finishfold( | |
884 | repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits |
|
884 | repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits | |
885 | ) |
|
885 | ) | |
886 |
|
886 | |||
887 | def skipprompt(self): |
|
887 | def skipprompt(self): | |
888 | """Returns true if the rule should skip the message editor. |
|
888 | """Returns true if the rule should skip the message editor. | |
889 |
|
889 | |||
890 | For example, 'fold' wants to show an editor, but 'rollup' |
|
890 | For example, 'fold' wants to show an editor, but 'rollup' | |
891 | doesn't want to. |
|
891 | doesn't want to. | |
892 | """ |
|
892 | """ | |
893 | return False |
|
893 | return False | |
894 |
|
894 | |||
895 | def mergedescs(self): |
|
895 | def mergedescs(self): | |
896 | """Returns true if the rule should merge messages of multiple changes. |
|
896 | """Returns true if the rule should merge messages of multiple changes. | |
897 |
|
897 | |||
898 | This exists mainly so that 'rollup' rules can be a subclass of |
|
898 | This exists mainly so that 'rollup' rules can be a subclass of | |
899 | 'fold'. |
|
899 | 'fold'. | |
900 | """ |
|
900 | """ | |
901 | return True |
|
901 | return True | |
902 |
|
902 | |||
903 | def firstdate(self): |
|
903 | def firstdate(self): | |
904 | """Returns true if the rule should preserve the date of the first |
|
904 | """Returns true if the rule should preserve the date of the first | |
905 | change. |
|
905 | change. | |
906 |
|
906 | |||
907 | This exists mainly so that 'rollup' rules can be a subclass of |
|
907 | This exists mainly so that 'rollup' rules can be a subclass of | |
908 | 'fold'. |
|
908 | 'fold'. | |
909 | """ |
|
909 | """ | |
910 | return False |
|
910 | return False | |
911 |
|
911 | |||
912 | def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges): |
|
912 | def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges): | |
913 | mergemod.update(ctx.p1()) |
|
913 | mergemod.update(ctx.p1()) | |
914 | ### prepare new commit data |
|
914 | ### prepare new commit data | |
915 | commitopts = {} |
|
915 | commitopts = {} | |
916 | commitopts[b'user'] = ctx.user() |
|
916 | commitopts[b'user'] = ctx.user() | |
917 | # commit message |
|
917 | # commit message | |
918 | if not self.mergedescs(): |
|
918 | if not self.mergedescs(): | |
919 | newmessage = ctx.description() |
|
919 | newmessage = ctx.description() | |
920 | else: |
|
920 | else: | |
921 | newmessage = ( |
|
921 | newmessage = ( | |
922 | b'\n***\n'.join( |
|
922 | b'\n***\n'.join( | |
923 | [ctx.description()] |
|
923 | [ctx.description()] | |
924 | + [repo[r].description() for r in internalchanges] |
|
924 | + [repo[r].description() for r in internalchanges] | |
925 | + [oldctx.description()] |
|
925 | + [oldctx.description()] | |
926 | ) |
|
926 | ) | |
927 | + b'\n' |
|
927 | + b'\n' | |
928 | ) |
|
928 | ) | |
929 | commitopts[b'message'] = newmessage |
|
929 | commitopts[b'message'] = newmessage | |
930 | # date |
|
930 | # date | |
931 | if self.firstdate(): |
|
931 | if self.firstdate(): | |
932 | commitopts[b'date'] = ctx.date() |
|
932 | commitopts[b'date'] = ctx.date() | |
933 | else: |
|
933 | else: | |
934 | commitopts[b'date'] = max(ctx.date(), oldctx.date()) |
|
934 | commitopts[b'date'] = max(ctx.date(), oldctx.date()) | |
935 | # if date is to be updated to current |
|
935 | # if date is to be updated to current | |
936 | if ui.configbool(b'rewrite', b'update-timestamp'): |
|
936 | if ui.configbool(b'rewrite', b'update-timestamp'): | |
937 | commitopts[b'date'] = dateutil.makedate() |
|
937 | commitopts[b'date'] = dateutil.makedate() | |
938 |
|
938 | |||
939 | extra = ctx.extra().copy() |
|
939 | extra = ctx.extra().copy() | |
940 | # histedit_source |
|
940 | # histedit_source | |
941 | # note: ctx is likely a temporary commit but that the best we can do |
|
941 | # note: ctx is likely a temporary commit but that the best we can do | |
942 | # here. This is sufficient to solve issue3681 anyway. |
|
942 | # here. This is sufficient to solve issue3681 anyway. | |
943 | extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex()) |
|
943 | extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex()) | |
944 | commitopts[b'extra'] = extra |
|
944 | commitopts[b'extra'] = extra | |
945 | phasemin = max(ctx.phase(), oldctx.phase()) |
|
945 | phasemin = max(ctx.phase(), oldctx.phase()) | |
946 | overrides = {(b'phases', b'new-commit'): phasemin} |
|
946 | overrides = {(b'phases', b'new-commit'): phasemin} | |
947 | with repo.ui.configoverride(overrides, b'histedit'): |
|
947 | with repo.ui.configoverride(overrides, b'histedit'): | |
948 | n = collapse( |
|
948 | n = collapse( | |
949 | repo, |
|
949 | repo, | |
950 | ctx, |
|
950 | ctx, | |
951 | repo[newnode], |
|
951 | repo[newnode], | |
952 | commitopts, |
|
952 | commitopts, | |
953 | skipprompt=self.skipprompt(), |
|
953 | skipprompt=self.skipprompt(), | |
954 | ) |
|
954 | ) | |
955 | if n is None: |
|
955 | if n is None: | |
956 | return ctx, [] |
|
956 | return ctx, [] | |
957 | mergemod.update(repo[n]) |
|
957 | mergemod.update(repo[n]) | |
958 | replacements = [ |
|
958 | replacements = [ | |
959 | (oldctx.node(), (newnode,)), |
|
959 | (oldctx.node(), (newnode,)), | |
960 | (ctx.node(), (n,)), |
|
960 | (ctx.node(), (n,)), | |
961 | (newnode, (n,)), |
|
961 | (newnode, (n,)), | |
962 | ] |
|
962 | ] | |
963 | for ich in internalchanges: |
|
963 | for ich in internalchanges: | |
964 | replacements.append((ich, (n,))) |
|
964 | replacements.append((ich, (n,))) | |
965 | return repo[n], replacements |
|
965 | return repo[n], replacements | |
966 |
|
966 | |||
967 |
|
967 | |||
968 | @action( |
|
968 | @action( | |
969 | [b'base', b'b'], |
|
969 | [b'base', b'b'], | |
970 | _(b'checkout changeset and apply further changesets from there'), |
|
970 | _(b'checkout changeset and apply further changesets from there'), | |
971 | ) |
|
971 | ) | |
972 | class base(histeditaction): |
|
972 | class base(histeditaction): | |
973 | def run(self): |
|
973 | def run(self): | |
974 | if self.repo[b'.'].node() != self.node: |
|
974 | if self.repo[b'.'].node() != self.node: | |
975 | mergemod.clean_update(self.repo[self.node]) |
|
975 | mergemod.clean_update(self.repo[self.node]) | |
976 | return self.continueclean() |
|
976 | return self.continueclean() | |
977 |
|
977 | |||
978 | def continuedirty(self): |
|
978 | def continuedirty(self): | |
979 | abortdirty() |
|
979 | abortdirty() | |
980 |
|
980 | |||
981 | def continueclean(self): |
|
981 | def continueclean(self): | |
982 | basectx = self.repo[b'.'] |
|
982 | basectx = self.repo[b'.'] | |
983 | return basectx, [] |
|
983 | return basectx, [] | |
984 |
|
984 | |||
985 | def _verifynodeconstraints(self, prev, expected, seen): |
|
985 | def _verifynodeconstraints(self, prev, expected, seen): | |
986 | # base can only be use with a node not in the edited set |
|
986 | # base can only be use with a node not in the edited set | |
987 | if self.node in expected: |
|
987 | if self.node in expected: | |
988 | msg = _(b'%s "%s" changeset was an edited list candidate') |
|
988 | msg = _(b'%s "%s" changeset was an edited list candidate') | |
989 | raise error.ParseError( |
|
989 | raise error.ParseError( | |
990 | msg % (self.verb, short(self.node)), |
|
990 | msg % (self.verb, short(self.node)), | |
991 | hint=_(b'base must only use unlisted changesets'), |
|
991 | hint=_(b'base must only use unlisted changesets'), | |
992 | ) |
|
992 | ) | |
993 |
|
993 | |||
994 |
|
994 | |||
995 | @action( |
|
995 | @action( | |
996 | [b'_multifold'], |
|
996 | [b'_multifold'], | |
997 | _( |
|
997 | _( | |
998 | b"""fold subclass used for when multiple folds happen in a row |
|
998 | b"""fold subclass used for when multiple folds happen in a row | |
999 |
|
999 | |||
1000 | We only want to fire the editor for the folded message once when |
|
1000 | We only want to fire the editor for the folded message once when | |
1001 | (say) four changes are folded down into a single change. This is |
|
1001 | (say) four changes are folded down into a single change. This is | |
1002 | similar to rollup, but we should preserve both messages so that |
|
1002 | similar to rollup, but we should preserve both messages so that | |
1003 | when the last fold operation runs we can show the user all the |
|
1003 | when the last fold operation runs we can show the user all the | |
1004 | commit messages in their editor. |
|
1004 | commit messages in their editor. | |
1005 | """ |
|
1005 | """ | |
1006 | ), |
|
1006 | ), | |
1007 | internal=True, |
|
1007 | internal=True, | |
1008 | ) |
|
1008 | ) | |
1009 | class _multifold(fold): |
|
1009 | class _multifold(fold): | |
1010 | def skipprompt(self): |
|
1010 | def skipprompt(self): | |
1011 | return True |
|
1011 | return True | |
1012 |
|
1012 | |||
1013 |
|
1013 | |||
1014 | @action( |
|
1014 | @action( | |
1015 | [b"roll", b"r"], |
|
1015 | [b"roll", b"r"], | |
1016 | _(b"like fold, but discard this commit's description and date"), |
|
1016 | _(b"like fold, but discard this commit's description and date"), | |
1017 | ) |
|
1017 | ) | |
1018 | class rollup(fold): |
|
1018 | class rollup(fold): | |
1019 | def mergedescs(self): |
|
1019 | def mergedescs(self): | |
1020 | return False |
|
1020 | return False | |
1021 |
|
1021 | |||
1022 | def skipprompt(self): |
|
1022 | def skipprompt(self): | |
1023 | return True |
|
1023 | return True | |
1024 |
|
1024 | |||
1025 | def firstdate(self): |
|
1025 | def firstdate(self): | |
1026 | return True |
|
1026 | return True | |
1027 |
|
1027 | |||
1028 |
|
1028 | |||
1029 | @action([b"drop", b"d"], _(b'remove commit from history')) |
|
1029 | @action([b"drop", b"d"], _(b'remove commit from history')) | |
1030 | class drop(histeditaction): |
|
1030 | class drop(histeditaction): | |
1031 | def run(self): |
|
1031 | def run(self): | |
1032 | parentctx = self.repo[self.state.parentctxnode] |
|
1032 | parentctx = self.repo[self.state.parentctxnode] | |
1033 | return parentctx, [(self.node, tuple())] |
|
1033 | return parentctx, [(self.node, tuple())] | |
1034 |
|
1034 | |||
1035 |
|
1035 | |||
1036 | @action( |
|
1036 | @action( | |
1037 | [b"mess", b"m"], |
|
1037 | [b"mess", b"m"], | |
1038 | _(b'edit commit message without changing commit content'), |
|
1038 | _(b'edit commit message without changing commit content'), | |
1039 | priority=True, |
|
1039 | priority=True, | |
1040 | ) |
|
1040 | ) | |
1041 | class message(histeditaction): |
|
1041 | class message(histeditaction): | |
1042 | def commiteditor(self): |
|
1042 | def commiteditor(self): | |
1043 | return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess') |
|
1043 | return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess') | |
1044 |
|
1044 | |||
1045 |
|
1045 | |||
1046 | def findoutgoing(ui, repo, remote=None, force=False, opts=None): |
|
1046 | def findoutgoing(ui, repo, remote=None, force=False, opts=None): | |
1047 | """utility function to find the first outgoing changeset |
|
1047 | """utility function to find the first outgoing changeset | |
1048 |
|
1048 | |||
1049 | Used by initialization code""" |
|
1049 | Used by initialization code""" | |
1050 | if opts is None: |
|
1050 | if opts is None: | |
1051 | opts = {} |
|
1051 | opts = {} | |
1052 | path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote) |
|
1052 | path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote) | |
1053 |
|
1053 | |||
1054 | ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc)) |
|
1054 | ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc)) | |
1055 |
|
1055 | |||
1056 | revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None) |
|
1056 | revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None) | |
1057 | other = hg.peer(repo, opts, path) |
|
1057 | other = hg.peer(repo, opts, path) | |
1058 |
|
1058 | |||
1059 | if revs: |
|
1059 | if revs: | |
1060 | revs = [repo.lookup(rev) for rev in revs] |
|
1060 | revs = [repo.lookup(rev) for rev in revs] | |
1061 |
|
1061 | |||
1062 | outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) |
|
1062 | outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) | |
1063 | if not outgoing.missing: |
|
1063 | if not outgoing.missing: | |
1064 | raise error.StateError(_(b'no outgoing ancestors')) |
|
1064 | raise error.StateError(_(b'no outgoing ancestors')) | |
1065 | roots = list(repo.revs(b"roots(%ln)", outgoing.missing)) |
|
1065 | roots = list(repo.revs(b"roots(%ln)", outgoing.missing)) | |
1066 | if len(roots) > 1: |
|
1066 | if len(roots) > 1: | |
1067 | msg = _(b'there are ambiguous outgoing revisions') |
|
1067 | msg = _(b'there are ambiguous outgoing revisions') | |
1068 | hint = _(b"see 'hg help histedit' for more detail") |
|
1068 | hint = _(b"see 'hg help histedit' for more detail") | |
1069 | raise error.StateError(msg, hint=hint) |
|
1069 | raise error.StateError(msg, hint=hint) | |
1070 | return repo[roots[0]].node() |
|
1070 | return repo[roots[0]].node() | |
1071 |
|
1071 | |||
1072 |
|
1072 | |||
1073 | # Curses Support |
|
1073 | # Curses Support | |
1074 | try: |
|
1074 | try: | |
1075 | import curses |
|
1075 | import curses | |
1076 | except ImportError: |
|
1076 | except ImportError: | |
1077 | curses = None |
|
1077 | curses = None | |
1078 |
|
1078 | |||
1079 | KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll'] |
|
1079 | KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll'] | |
1080 | ACTION_LABELS = { |
|
1080 | ACTION_LABELS = { | |
1081 | b'fold': b'^fold', |
|
1081 | b'fold': b'^fold', | |
1082 | b'roll': b'^roll', |
|
1082 | b'roll': b'^roll', | |
1083 | } |
|
1083 | } | |
1084 |
|
1084 | |||
1085 | COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5 |
|
1085 | COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5 | |
1086 | COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8 |
|
1086 | COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8 | |
1087 | COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11 |
|
1087 | COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11 | |
1088 |
|
1088 | |||
1089 | E_QUIT, E_HISTEDIT = 1, 2 |
|
1089 | E_QUIT, E_HISTEDIT = 1, 2 | |
1090 | E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7 |
|
1090 | E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7 | |
1091 | MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3 |
|
1091 | MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3 | |
1092 |
|
1092 | |||
1093 | KEYTABLE = { |
|
1093 | KEYTABLE = { | |
1094 | b'global': { |
|
1094 | b'global': { | |
1095 | b'h': b'next-action', |
|
1095 | b'h': b'next-action', | |
1096 | b'KEY_RIGHT': b'next-action', |
|
1096 | b'KEY_RIGHT': b'next-action', | |
1097 | b'l': b'prev-action', |
|
1097 | b'l': b'prev-action', | |
1098 | b'KEY_LEFT': b'prev-action', |
|
1098 | b'KEY_LEFT': b'prev-action', | |
1099 | b'q': b'quit', |
|
1099 | b'q': b'quit', | |
1100 | b'c': b'histedit', |
|
1100 | b'c': b'histedit', | |
1101 | b'C': b'histedit', |
|
1101 | b'C': b'histedit', | |
1102 | b'v': b'showpatch', |
|
1102 | b'v': b'showpatch', | |
1103 | b'?': b'help', |
|
1103 | b'?': b'help', | |
1104 | }, |
|
1104 | }, | |
1105 | MODE_RULES: { |
|
1105 | MODE_RULES: { | |
1106 | b'd': b'action-drop', |
|
1106 | b'd': b'action-drop', | |
1107 | b'e': b'action-edit', |
|
1107 | b'e': b'action-edit', | |
1108 | b'f': b'action-fold', |
|
1108 | b'f': b'action-fold', | |
1109 | b'm': b'action-mess', |
|
1109 | b'm': b'action-mess', | |
1110 | b'p': b'action-pick', |
|
1110 | b'p': b'action-pick', | |
1111 | b'r': b'action-roll', |
|
1111 | b'r': b'action-roll', | |
1112 | b' ': b'select', |
|
1112 | b' ': b'select', | |
1113 | b'j': b'down', |
|
1113 | b'j': b'down', | |
1114 | b'k': b'up', |
|
1114 | b'k': b'up', | |
1115 | b'KEY_DOWN': b'down', |
|
1115 | b'KEY_DOWN': b'down', | |
1116 | b'KEY_UP': b'up', |
|
1116 | b'KEY_UP': b'up', | |
1117 | b'J': b'move-down', |
|
1117 | b'J': b'move-down', | |
1118 | b'K': b'move-up', |
|
1118 | b'K': b'move-up', | |
1119 | b'KEY_NPAGE': b'move-down', |
|
1119 | b'KEY_NPAGE': b'move-down', | |
1120 | b'KEY_PPAGE': b'move-up', |
|
1120 | b'KEY_PPAGE': b'move-up', | |
1121 | b'0': b'goto', # Used for 0..9 |
|
1121 | b'0': b'goto', # Used for 0..9 | |
1122 | }, |
|
1122 | }, | |
1123 | MODE_PATCH: { |
|
1123 | MODE_PATCH: { | |
1124 | b' ': b'page-down', |
|
1124 | b' ': b'page-down', | |
1125 | b'KEY_NPAGE': b'page-down', |
|
1125 | b'KEY_NPAGE': b'page-down', | |
1126 | b'KEY_PPAGE': b'page-up', |
|
1126 | b'KEY_PPAGE': b'page-up', | |
1127 | b'j': b'line-down', |
|
1127 | b'j': b'line-down', | |
1128 | b'k': b'line-up', |
|
1128 | b'k': b'line-up', | |
1129 | b'KEY_DOWN': b'line-down', |
|
1129 | b'KEY_DOWN': b'line-down', | |
1130 | b'KEY_UP': b'line-up', |
|
1130 | b'KEY_UP': b'line-up', | |
1131 | b'J': b'down', |
|
1131 | b'J': b'down', | |
1132 | b'K': b'up', |
|
1132 | b'K': b'up', | |
1133 | }, |
|
1133 | }, | |
1134 | MODE_HELP: {}, |
|
1134 | MODE_HELP: {}, | |
1135 | } |
|
1135 | } | |
1136 |
|
1136 | |||
1137 |
|
1137 | |||
1138 | def screen_size(): |
|
1138 | def screen_size(): | |
1139 | return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' ')) |
|
1139 | return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' ')) | |
1140 |
|
1140 | |||
1141 |
|
1141 | |||
1142 | class histeditrule: |
|
1142 | class histeditrule: | |
1143 | def __init__(self, ui, ctx, pos, action=b'pick'): |
|
1143 | def __init__(self, ui, ctx, pos, action=b'pick'): | |
1144 | self.ui = ui |
|
1144 | self.ui = ui | |
1145 | self.ctx = ctx |
|
1145 | self.ctx = ctx | |
1146 | self.action = action |
|
1146 | self.action = action | |
1147 | self.origpos = pos |
|
1147 | self.origpos = pos | |
1148 | self.pos = pos |
|
1148 | self.pos = pos | |
1149 | self.conflicts = [] |
|
1149 | self.conflicts = [] | |
1150 |
|
1150 | |||
1151 | def __bytes__(self): |
|
1151 | def __bytes__(self): | |
1152 | # Example display of several histeditrules: |
|
1152 | # Example display of several histeditrules: | |
1153 | # |
|
1153 | # | |
1154 | # #10 pick 316392:06a16c25c053 add option to skip tests |
|
1154 | # #10 pick 316392:06a16c25c053 add option to skip tests | |
1155 | # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED> |
|
1155 | # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED> | |
1156 | # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h |
|
1156 | # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h | |
1157 | # #13 ^fold 316395:14ce5803f4c3 fix warnings |
|
1157 | # #13 ^fold 316395:14ce5803f4c3 fix warnings | |
1158 | # |
|
1158 | # | |
1159 | # The carets point to the changeset being folded into ("roll this |
|
1159 | # The carets point to the changeset being folded into ("roll this | |
1160 | # changeset into the changeset above"). |
|
1160 | # changeset into the changeset above"). | |
1161 | return b'%s%s' % (self.prefix, self.desc) |
|
1161 | return b'%s%s' % (self.prefix, self.desc) | |
1162 |
|
1162 | |||
1163 | __str__ = encoding.strmethod(__bytes__) |
|
1163 | __str__ = encoding.strmethod(__bytes__) | |
1164 |
|
1164 | |||
1165 | @property |
|
1165 | @property | |
1166 | def prefix(self): |
|
1166 | def prefix(self): | |
1167 | # Some actions ('fold' and 'roll') combine a patch with a |
|
1167 | # Some actions ('fold' and 'roll') combine a patch with a | |
1168 | # previous one. Add a marker showing which patch they apply |
|
1168 | # previous one. Add a marker showing which patch they apply | |
1169 | # to. |
|
1169 | # to. | |
1170 | action = ACTION_LABELS.get(self.action, self.action) |
|
1170 | action = ACTION_LABELS.get(self.action, self.action) | |
1171 |
|
1171 | |||
1172 | h = self.ctx.hex()[0:12] |
|
1172 | h = self.ctx.hex()[0:12] | |
1173 | r = self.ctx.rev() |
|
1173 | r = self.ctx.rev() | |
1174 |
|
1174 | |||
1175 | return b"#%s %s %d:%s " % ( |
|
1175 | return b"#%s %s %d:%s " % ( | |
1176 | (b'%d' % self.origpos).ljust(2), |
|
1176 | (b'%d' % self.origpos).ljust(2), | |
1177 | action.ljust(6), |
|
1177 | action.ljust(6), | |
1178 | r, |
|
1178 | r, | |
1179 | h, |
|
1179 | h, | |
1180 | ) |
|
1180 | ) | |
1181 |
|
1181 | |||
1182 | @util.propertycache |
|
1182 | @util.propertycache | |
1183 | def desc(self): |
|
1183 | def desc(self): | |
1184 | summary = cmdutil.rendertemplate( |
|
1184 | summary = cmdutil.rendertemplate( | |
1185 | self.ctx, self.ui.config(b'histedit', b'summary-template') |
|
1185 | self.ctx, self.ui.config(b'histedit', b'summary-template') | |
1186 | ) |
|
1186 | ) | |
1187 | if summary: |
|
1187 | if summary: | |
1188 | return summary |
|
1188 | return summary | |
1189 | # This is split off from the prefix property so that we can |
|
1189 | # This is split off from the prefix property so that we can | |
1190 | # separately make the description for 'roll' red (since it |
|
1190 | # separately make the description for 'roll' red (since it | |
1191 | # will get discarded). |
|
1191 | # will get discarded). | |
1192 | return stringutil.firstline(self.ctx.description()) |
|
1192 | return stringutil.firstline(self.ctx.description()) | |
1193 |
|
1193 | |||
1194 | def checkconflicts(self, other): |
|
1194 | def checkconflicts(self, other): | |
1195 | if other.pos > self.pos and other.origpos <= self.origpos: |
|
1195 | if other.pos > self.pos and other.origpos <= self.origpos: | |
1196 | if set(other.ctx.files()) & set(self.ctx.files()) != set(): |
|
1196 | if set(other.ctx.files()) & set(self.ctx.files()) != set(): | |
1197 | self.conflicts.append(other) |
|
1197 | self.conflicts.append(other) | |
1198 | return self.conflicts |
|
1198 | return self.conflicts | |
1199 |
|
1199 | |||
1200 | if other in self.conflicts: |
|
1200 | if other in self.conflicts: | |
1201 | self.conflicts.remove(other) |
|
1201 | self.conflicts.remove(other) | |
1202 | return self.conflicts |
|
1202 | return self.conflicts | |
1203 |
|
1203 | |||
1204 |
|
1204 | |||
1205 | def makecommands(rules): |
|
1205 | def makecommands(rules): | |
1206 | """Returns a list of commands consumable by histedit --commands based on |
|
1206 | """Returns a list of commands consumable by histedit --commands based on | |
1207 | our list of rules""" |
|
1207 | our list of rules""" | |
1208 | commands = [] |
|
1208 | commands = [] | |
1209 | for rules in rules: |
|
1209 | for rules in rules: | |
1210 | commands.append(b'%s %s\n' % (rules.action, rules.ctx)) |
|
1210 | commands.append(b'%s %s\n' % (rules.action, rules.ctx)) | |
1211 | return commands |
|
1211 | return commands | |
1212 |
|
1212 | |||
1213 |
|
1213 | |||
1214 | def addln(win, y, x, line, color=None): |
|
1214 | def addln(win, y, x, line, color=None): | |
1215 | """Add a line to the given window left padding but 100% filled with |
|
1215 | """Add a line to the given window left padding but 100% filled with | |
1216 | whitespace characters, so that the color appears on the whole line""" |
|
1216 | whitespace characters, so that the color appears on the whole line""" | |
1217 | maxy, maxx = win.getmaxyx() |
|
1217 | maxy, maxx = win.getmaxyx() | |
1218 | length = maxx - 1 - x |
|
1218 | length = maxx - 1 - x | |
1219 | line = bytes(line).ljust(length)[:length] |
|
1219 | line = bytes(line).ljust(length)[:length] | |
1220 | if y < 0: |
|
1220 | if y < 0: | |
1221 | y = maxy + y |
|
1221 | y = maxy + y | |
1222 | if x < 0: |
|
1222 | if x < 0: | |
1223 | x = maxx + x |
|
1223 | x = maxx + x | |
1224 | if color: |
|
1224 | if color: | |
1225 | win.addstr(y, x, line, color) |
|
1225 | win.addstr(y, x, line, color) | |
1226 | else: |
|
1226 | else: | |
1227 | win.addstr(y, x, line) |
|
1227 | win.addstr(y, x, line) | |
1228 |
|
1228 | |||
1229 |
|
1229 | |||
1230 | def _trunc_head(line, n): |
|
1230 | def _trunc_head(line, n): | |
1231 | if len(line) <= n: |
|
1231 | if len(line) <= n: | |
1232 | return line |
|
1232 | return line | |
1233 | return b'> ' + line[-(n - 2) :] |
|
1233 | return b'> ' + line[-(n - 2) :] | |
1234 |
|
1234 | |||
1235 |
|
1235 | |||
1236 | def _trunc_tail(line, n): |
|
1236 | def _trunc_tail(line, n): | |
1237 | if len(line) <= n: |
|
1237 | if len(line) <= n: | |
1238 | return line |
|
1238 | return line | |
1239 | return line[: n - 2] + b' >' |
|
1239 | return line[: n - 2] + b' >' | |
1240 |
|
1240 | |||
1241 |
|
1241 | |||
1242 | class _chistedit_state: |
|
1242 | class _chistedit_state: | |
1243 | def __init__( |
|
1243 | def __init__( | |
1244 | self, |
|
1244 | self, | |
1245 | repo, |
|
1245 | repo, | |
1246 | rules, |
|
1246 | rules, | |
1247 | stdscr, |
|
1247 | stdscr, | |
1248 | ): |
|
1248 | ): | |
1249 | self.repo = repo |
|
1249 | self.repo = repo | |
1250 | self.rules = rules |
|
1250 | self.rules = rules | |
1251 | self.stdscr = stdscr |
|
1251 | self.stdscr = stdscr | |
1252 | self.later_on_top = repo.ui.configbool( |
|
1252 | self.later_on_top = repo.ui.configbool( | |
1253 | b'histedit', b'later-commits-first' |
|
1253 | b'histedit', b'later-commits-first' | |
1254 | ) |
|
1254 | ) | |
1255 | # The current item in display order, initialized to point to the top |
|
1255 | # The current item in display order, initialized to point to the top | |
1256 | # of the screen. |
|
1256 | # of the screen. | |
1257 | self.pos = 0 |
|
1257 | self.pos = 0 | |
1258 | self.selected = None |
|
1258 | self.selected = None | |
1259 | self.mode = (MODE_INIT, MODE_INIT) |
|
1259 | self.mode = (MODE_INIT, MODE_INIT) | |
1260 | self.page_height = None |
|
1260 | self.page_height = None | |
1261 | self.modes = { |
|
1261 | self.modes = { | |
1262 | MODE_RULES: { |
|
1262 | MODE_RULES: { | |
1263 | b'line_offset': 0, |
|
1263 | b'line_offset': 0, | |
1264 | }, |
|
1264 | }, | |
1265 | MODE_PATCH: { |
|
1265 | MODE_PATCH: { | |
1266 | b'line_offset': 0, |
|
1266 | b'line_offset': 0, | |
1267 | }, |
|
1267 | }, | |
1268 | } |
|
1268 | } | |
1269 |
|
1269 | |||
1270 | def render_commit(self, win): |
|
1270 | def render_commit(self, win): | |
1271 | """Renders the commit window that shows the log of the current selected |
|
1271 | """Renders the commit window that shows the log of the current selected | |
1272 | commit""" |
|
1272 | commit""" | |
1273 | rule = self.rules[self.display_pos_to_rule_pos(self.pos)] |
|
1273 | rule = self.rules[self.display_pos_to_rule_pos(self.pos)] | |
1274 |
|
1274 | |||
1275 | ctx = rule.ctx |
|
1275 | ctx = rule.ctx | |
1276 | win.box() |
|
1276 | win.box() | |
1277 |
|
1277 | |||
1278 | maxy, maxx = win.getmaxyx() |
|
1278 | maxy, maxx = win.getmaxyx() | |
1279 | length = maxx - 3 |
|
1279 | length = maxx - 3 | |
1280 |
|
1280 | |||
1281 | line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12]) |
|
1281 | line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12]) | |
1282 | win.addstr(1, 1, line[:length]) |
|
1282 | win.addstr(1, 1, line[:length]) | |
1283 |
|
1283 | |||
1284 | line = b"user: %s" % ctx.user() |
|
1284 | line = b"user: %s" % ctx.user() | |
1285 | win.addstr(2, 1, line[:length]) |
|
1285 | win.addstr(2, 1, line[:length]) | |
1286 |
|
1286 | |||
1287 | bms = self.repo.nodebookmarks(ctx.node()) |
|
1287 | bms = self.repo.nodebookmarks(ctx.node()) | |
1288 | line = b"bookmark: %s" % b' '.join(bms) |
|
1288 | line = b"bookmark: %s" % b' '.join(bms) | |
1289 | win.addstr(3, 1, line[:length]) |
|
1289 | win.addstr(3, 1, line[:length]) | |
1290 |
|
1290 | |||
1291 | line = b"summary: %s" % stringutil.firstline(ctx.description()) |
|
1291 | line = b"summary: %s" % stringutil.firstline(ctx.description()) | |
1292 | win.addstr(4, 1, line[:length]) |
|
1292 | win.addstr(4, 1, line[:length]) | |
1293 |
|
1293 | |||
1294 | line = b"files: " |
|
1294 | line = b"files: " | |
1295 | win.addstr(5, 1, line) |
|
1295 | win.addstr(5, 1, line) | |
1296 | fnx = 1 + len(line) |
|
1296 | fnx = 1 + len(line) | |
1297 | fnmaxx = length - fnx + 1 |
|
1297 | fnmaxx = length - fnx + 1 | |
1298 | y = 5 |
|
1298 | y = 5 | |
1299 | fnmaxn = maxy - (1 + y) - 1 |
|
1299 | fnmaxn = maxy - (1 + y) - 1 | |
1300 | files = ctx.files() |
|
1300 | files = ctx.files() | |
1301 | for i, line1 in enumerate(files): |
|
1301 | for i, line1 in enumerate(files): | |
1302 | if len(files) > fnmaxn and i == fnmaxn - 1: |
|
1302 | if len(files) > fnmaxn and i == fnmaxn - 1: | |
1303 | win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx)) |
|
1303 | win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx)) | |
1304 | y = y + 1 |
|
1304 | y = y + 1 | |
1305 | break |
|
1305 | break | |
1306 | win.addstr(y, fnx, _trunc_head(line1, fnmaxx)) |
|
1306 | win.addstr(y, fnx, _trunc_head(line1, fnmaxx)) | |
1307 | y = y + 1 |
|
1307 | y = y + 1 | |
1308 |
|
1308 | |||
1309 | conflicts = rule.conflicts |
|
1309 | conflicts = rule.conflicts | |
1310 | if len(conflicts) > 0: |
|
1310 | if len(conflicts) > 0: | |
1311 | conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts)) |
|
1311 | conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts)) | |
1312 | conflictstr = b"changed files overlap with %s" % conflictstr |
|
1312 | conflictstr = b"changed files overlap with %s" % conflictstr | |
1313 | else: |
|
1313 | else: | |
1314 | conflictstr = b'no overlap' |
|
1314 | conflictstr = b'no overlap' | |
1315 |
|
1315 | |||
1316 | win.addstr(y, 1, conflictstr[:length]) |
|
1316 | win.addstr(y, 1, conflictstr[:length]) | |
1317 | win.noutrefresh() |
|
1317 | win.noutrefresh() | |
1318 |
|
1318 | |||
1319 | def helplines(self): |
|
1319 | def helplines(self): | |
1320 | if self.mode[0] == MODE_PATCH: |
|
1320 | if self.mode[0] == MODE_PATCH: | |
1321 | help = b"""\ |
|
1321 | help = b"""\ | |
1322 | ?: help, k/up: line up, j/down: line down, v: stop viewing patch |
|
1322 | ?: help, k/up: line up, j/down: line down, v: stop viewing patch | |
1323 | pgup: prev page, space/pgdn: next page, c: commit, q: abort |
|
1323 | pgup: prev page, space/pgdn: next page, c: commit, q: abort | |
1324 | """ |
|
1324 | """ | |
1325 | else: |
|
1325 | else: | |
1326 | help = b"""\ |
|
1326 | help = b"""\ | |
1327 | ?: help, k/up: move up, j/down: move down, space: select, v: view patch |
|
1327 | ?: help, k/up: move up, j/down: move down, space: select, v: view patch | |
1328 | d: drop, e: edit, f: fold, m: mess, p: pick, r: roll |
|
1328 | d: drop, e: edit, f: fold, m: mess, p: pick, r: roll | |
1329 | pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort |
|
1329 | pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort | |
1330 | """ |
|
1330 | """ | |
1331 | if self.later_on_top: |
|
1331 | if self.later_on_top: | |
1332 | help += b"Newer commits are shown above older commits.\n" |
|
1332 | help += b"Newer commits are shown above older commits.\n" | |
1333 | else: |
|
1333 | else: | |
1334 | help += b"Older commits are shown above newer commits.\n" |
|
1334 | help += b"Older commits are shown above newer commits.\n" | |
1335 | return help.splitlines() |
|
1335 | return help.splitlines() | |
1336 |
|
1336 | |||
1337 | def render_help(self, win): |
|
1337 | def render_help(self, win): | |
1338 | maxy, maxx = win.getmaxyx() |
|
1338 | maxy, maxx = win.getmaxyx() | |
1339 | for y, line in enumerate(self.helplines()): |
|
1339 | for y, line in enumerate(self.helplines()): | |
1340 | if y >= maxy: |
|
1340 | if y >= maxy: | |
1341 | break |
|
1341 | break | |
1342 | addln(win, y, 0, line, curses.color_pair(COLOR_HELP)) |
|
1342 | addln(win, y, 0, line, curses.color_pair(COLOR_HELP)) | |
1343 | win.noutrefresh() |
|
1343 | win.noutrefresh() | |
1344 |
|
1344 | |||
1345 | def layout(self): |
|
1345 | def layout(self): | |
1346 | maxy, maxx = self.stdscr.getmaxyx() |
|
1346 | maxy, maxx = self.stdscr.getmaxyx() | |
1347 | helplen = len(self.helplines()) |
|
1347 | helplen = len(self.helplines()) | |
1348 | mainlen = maxy - helplen - 12 |
|
1348 | mainlen = maxy - helplen - 12 | |
1349 | if mainlen < 1: |
|
1349 | if mainlen < 1: | |
1350 | raise error.Abort( |
|
1350 | raise error.Abort( | |
1351 | _(b"terminal dimensions %d by %d too small for curses histedit") |
|
1351 | _(b"terminal dimensions %d by %d too small for curses histedit") | |
1352 | % (maxy, maxx), |
|
1352 | % (maxy, maxx), | |
1353 | hint=_( |
|
1353 | hint=_( | |
1354 | b"enlarge your terminal or use --config ui.interface=text" |
|
1354 | b"enlarge your terminal or use --config ui.interface=text" | |
1355 | ), |
|
1355 | ), | |
1356 | ) |
|
1356 | ) | |
1357 | return { |
|
1357 | return { | |
1358 | b'commit': (12, maxx), |
|
1358 | b'commit': (12, maxx), | |
1359 | b'help': (helplen, maxx), |
|
1359 | b'help': (helplen, maxx), | |
1360 | b'main': (mainlen, maxx), |
|
1360 | b'main': (mainlen, maxx), | |
1361 | } |
|
1361 | } | |
1362 |
|
1362 | |||
1363 | def display_pos_to_rule_pos(self, display_pos): |
|
1363 | def display_pos_to_rule_pos(self, display_pos): | |
1364 | """Converts a position in display order to rule order. |
|
1364 | """Converts a position in display order to rule order. | |
1365 |
|
1365 | |||
1366 | The `display_pos` is the order from the top in display order, not |
|
1366 | The `display_pos` is the order from the top in display order, not | |
1367 | considering which items are currently visible on the screen. Thus, |
|
1367 | considering which items are currently visible on the screen. Thus, | |
1368 | `display_pos=0` is the item at the top (possibly after scrolling to |
|
1368 | `display_pos=0` is the item at the top (possibly after scrolling to | |
1369 | the top) |
|
1369 | the top) | |
1370 | """ |
|
1370 | """ | |
1371 | if self.later_on_top: |
|
1371 | if self.later_on_top: | |
1372 | return len(self.rules) - 1 - display_pos |
|
1372 | return len(self.rules) - 1 - display_pos | |
1373 | else: |
|
1373 | else: | |
1374 | return display_pos |
|
1374 | return display_pos | |
1375 |
|
1375 | |||
1376 | def render_rules(self, rulesscr): |
|
1376 | def render_rules(self, rulesscr): | |
1377 | start = self.modes[MODE_RULES][b'line_offset'] |
|
1377 | start = self.modes[MODE_RULES][b'line_offset'] | |
1378 |
|
1378 | |||
1379 | conflicts = [r.ctx for r in self.rules if r.conflicts] |
|
1379 | conflicts = [r.ctx for r in self.rules if r.conflicts] | |
1380 | if len(conflicts) > 0: |
|
1380 | if len(conflicts) > 0: | |
1381 | line = b"potential conflict in %s" % b','.join( |
|
1381 | line = b"potential conflict in %s" % b','.join( | |
1382 | map(pycompat.bytestr, conflicts) |
|
1382 | map(pycompat.bytestr, conflicts) | |
1383 | ) |
|
1383 | ) | |
1384 | addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN)) |
|
1384 | addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN)) | |
1385 |
|
1385 | |||
1386 | for display_pos in range(start, len(self.rules)): |
|
1386 | for display_pos in range(start, len(self.rules)): | |
1387 | y = display_pos - start |
|
1387 | y = display_pos - start | |
1388 | if y < 0 or y >= self.page_height: |
|
1388 | if y < 0 or y >= self.page_height: | |
1389 | continue |
|
1389 | continue | |
1390 | rule_pos = self.display_pos_to_rule_pos(display_pos) |
|
1390 | rule_pos = self.display_pos_to_rule_pos(display_pos) | |
1391 | rule = self.rules[rule_pos] |
|
1391 | rule = self.rules[rule_pos] | |
1392 | if len(rule.conflicts) > 0: |
|
1392 | if len(rule.conflicts) > 0: | |
1393 | rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN)) |
|
1393 | rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN)) | |
1394 | else: |
|
1394 | else: | |
1395 | rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK) |
|
1395 | rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK) | |
1396 |
|
1396 | |||
1397 | if display_pos == self.selected: |
|
1397 | if display_pos == self.selected: | |
1398 | rollcolor = COLOR_ROLL_SELECTED |
|
1398 | rollcolor = COLOR_ROLL_SELECTED | |
1399 | addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED)) |
|
1399 | addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED)) | |
1400 | elif display_pos == self.pos: |
|
1400 | elif display_pos == self.pos: | |
1401 | rollcolor = COLOR_ROLL_CURRENT |
|
1401 | rollcolor = COLOR_ROLL_CURRENT | |
1402 | addln( |
|
1402 | addln( | |
1403 | rulesscr, |
|
1403 | rulesscr, | |
1404 | y, |
|
1404 | y, | |
1405 | 2, |
|
1405 | 2, | |
1406 | rule, |
|
1406 | rule, | |
1407 | curses.color_pair(COLOR_CURRENT) | curses.A_BOLD, |
|
1407 | curses.color_pair(COLOR_CURRENT) | curses.A_BOLD, | |
1408 | ) |
|
1408 | ) | |
1409 | else: |
|
1409 | else: | |
1410 | rollcolor = COLOR_ROLL |
|
1410 | rollcolor = COLOR_ROLL | |
1411 | addln(rulesscr, y, 2, rule) |
|
1411 | addln(rulesscr, y, 2, rule) | |
1412 |
|
1412 | |||
1413 | if rule.action == b'roll': |
|
1413 | if rule.action == b'roll': | |
1414 | rulesscr.addstr( |
|
1414 | rulesscr.addstr( | |
1415 | y, |
|
1415 | y, | |
1416 | 2 + len(rule.prefix), |
|
1416 | 2 + len(rule.prefix), | |
1417 | rule.desc, |
|
1417 | rule.desc, | |
1418 | curses.color_pair(rollcolor), |
|
1418 | curses.color_pair(rollcolor), | |
1419 | ) |
|
1419 | ) | |
1420 |
|
1420 | |||
1421 | rulesscr.noutrefresh() |
|
1421 | rulesscr.noutrefresh() | |
1422 |
|
1422 | |||
1423 | def render_string(self, win, output, diffcolors=False): |
|
1423 | def render_string(self, win, output, diffcolors=False): | |
1424 | maxy, maxx = win.getmaxyx() |
|
1424 | maxy, maxx = win.getmaxyx() | |
1425 | length = min(maxy - 1, len(output)) |
|
1425 | length = min(maxy - 1, len(output)) | |
1426 | for y in range(0, length): |
|
1426 | for y in range(0, length): | |
1427 | line = output[y] |
|
1427 | line = output[y] | |
1428 | if diffcolors: |
|
1428 | if diffcolors: | |
1429 | if line.startswith(b'+'): |
|
1429 | if line.startswith(b'+'): | |
1430 | win.addstr( |
|
1430 | win.addstr( | |
1431 | y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE) |
|
1431 | y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE) | |
1432 | ) |
|
1432 | ) | |
1433 | elif line.startswith(b'-'): |
|
1433 | elif line.startswith(b'-'): | |
1434 | win.addstr( |
|
1434 | win.addstr( | |
1435 | y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE) |
|
1435 | y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE) | |
1436 | ) |
|
1436 | ) | |
1437 | elif line.startswith(b'@@ '): |
|
1437 | elif line.startswith(b'@@ '): | |
1438 | win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET)) |
|
1438 | win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET)) | |
1439 | else: |
|
1439 | else: | |
1440 | win.addstr(y, 0, line) |
|
1440 | win.addstr(y, 0, line) | |
1441 | else: |
|
1441 | else: | |
1442 | win.addstr(y, 0, line) |
|
1442 | win.addstr(y, 0, line) | |
1443 | win.noutrefresh() |
|
1443 | win.noutrefresh() | |
1444 |
|
1444 | |||
1445 | def render_patch(self, win): |
|
1445 | def render_patch(self, win): | |
1446 | start = self.modes[MODE_PATCH][b'line_offset'] |
|
1446 | start = self.modes[MODE_PATCH][b'line_offset'] | |
1447 | content = self.modes[MODE_PATCH][b'patchcontents'] |
|
1447 | content = self.modes[MODE_PATCH][b'patchcontents'] | |
1448 | self.render_string(win, content[start:], diffcolors=True) |
|
1448 | self.render_string(win, content[start:], diffcolors=True) | |
1449 |
|
1449 | |||
1450 | def event(self, ch): |
|
1450 | def event(self, ch): | |
1451 | """Change state based on the current character input |
|
1451 | """Change state based on the current character input | |
1452 |
|
1452 | |||
1453 | This takes the current state and based on the current character input from |
|
1453 | This takes the current state and based on the current character input from | |
1454 | the user we change the state. |
|
1454 | the user we change the state. | |
1455 | """ |
|
1455 | """ | |
1456 | oldpos = self.pos |
|
1456 | oldpos = self.pos | |
1457 |
|
1457 | |||
1458 | if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"): |
|
1458 | if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"): | |
1459 | return E_RESIZE |
|
1459 | return E_RESIZE | |
1460 |
|
1460 | |||
1461 | lookup_ch = ch |
|
1461 | lookup_ch = ch | |
1462 | if ch is not None and b'0' <= ch <= b'9': |
|
1462 | if ch is not None and b'0' <= ch <= b'9': | |
1463 | lookup_ch = b'0' |
|
1463 | lookup_ch = b'0' | |
1464 |
|
1464 | |||
1465 | curmode, prevmode = self.mode |
|
1465 | curmode, prevmode = self.mode | |
1466 | action = KEYTABLE[curmode].get( |
|
1466 | action = KEYTABLE[curmode].get( | |
1467 | lookup_ch, KEYTABLE[b'global'].get(lookup_ch) |
|
1467 | lookup_ch, KEYTABLE[b'global'].get(lookup_ch) | |
1468 | ) |
|
1468 | ) | |
1469 | if action is None: |
|
1469 | if action is None: | |
1470 | return |
|
1470 | return | |
1471 | if action in (b'down', b'move-down'): |
|
1471 | if action in (b'down', b'move-down'): | |
1472 | newpos = min(oldpos + 1, len(self.rules) - 1) |
|
1472 | newpos = min(oldpos + 1, len(self.rules) - 1) | |
1473 | self.move_cursor(oldpos, newpos) |
|
1473 | self.move_cursor(oldpos, newpos) | |
1474 | if self.selected is not None or action == b'move-down': |
|
1474 | if self.selected is not None or action == b'move-down': | |
1475 | self.swap(oldpos, newpos) |
|
1475 | self.swap(oldpos, newpos) | |
1476 | elif action in (b'up', b'move-up'): |
|
1476 | elif action in (b'up', b'move-up'): | |
1477 | newpos = max(0, oldpos - 1) |
|
1477 | newpos = max(0, oldpos - 1) | |
1478 | self.move_cursor(oldpos, newpos) |
|
1478 | self.move_cursor(oldpos, newpos) | |
1479 | if self.selected is not None or action == b'move-up': |
|
1479 | if self.selected is not None or action == b'move-up': | |
1480 | self.swap(oldpos, newpos) |
|
1480 | self.swap(oldpos, newpos) | |
1481 | elif action == b'next-action': |
|
1481 | elif action == b'next-action': | |
1482 | self.cycle_action(oldpos, next=True) |
|
1482 | self.cycle_action(oldpos, next=True) | |
1483 | elif action == b'prev-action': |
|
1483 | elif action == b'prev-action': | |
1484 | self.cycle_action(oldpos, next=False) |
|
1484 | self.cycle_action(oldpos, next=False) | |
1485 | elif action == b'select': |
|
1485 | elif action == b'select': | |
1486 | self.selected = oldpos if self.selected is None else None |
|
1486 | self.selected = oldpos if self.selected is None else None | |
1487 | self.make_selection(self.selected) |
|
1487 | self.make_selection(self.selected) | |
1488 | elif action == b'goto' and int(ch) < len(self.rules) <= 10: |
|
1488 | elif action == b'goto' and int(ch) < len(self.rules) <= 10: | |
1489 | newrule = next((r for r in self.rules if r.origpos == int(ch))) |
|
1489 | newrule = next((r for r in self.rules if r.origpos == int(ch))) | |
1490 | self.move_cursor(oldpos, newrule.pos) |
|
1490 | self.move_cursor(oldpos, newrule.pos) | |
1491 | if self.selected is not None: |
|
1491 | if self.selected is not None: | |
1492 | self.swap(oldpos, newrule.pos) |
|
1492 | self.swap(oldpos, newrule.pos) | |
1493 | elif action.startswith(b'action-'): |
|
1493 | elif action.startswith(b'action-'): | |
1494 | self.change_action(oldpos, action[7:]) |
|
1494 | self.change_action(oldpos, action[7:]) | |
1495 | elif action == b'showpatch': |
|
1495 | elif action == b'showpatch': | |
1496 | self.change_mode(MODE_PATCH if curmode != MODE_PATCH else prevmode) |
|
1496 | self.change_mode(MODE_PATCH if curmode != MODE_PATCH else prevmode) | |
1497 | elif action == b'help': |
|
1497 | elif action == b'help': | |
1498 | self.change_mode(MODE_HELP if curmode != MODE_HELP else prevmode) |
|
1498 | self.change_mode(MODE_HELP if curmode != MODE_HELP else prevmode) | |
1499 | elif action == b'quit': |
|
1499 | elif action == b'quit': | |
1500 | return E_QUIT |
|
1500 | return E_QUIT | |
1501 | elif action == b'histedit': |
|
1501 | elif action == b'histedit': | |
1502 | return E_HISTEDIT |
|
1502 | return E_HISTEDIT | |
1503 | elif action == b'page-down': |
|
1503 | elif action == b'page-down': | |
1504 | return E_PAGEDOWN |
|
1504 | return E_PAGEDOWN | |
1505 | elif action == b'page-up': |
|
1505 | elif action == b'page-up': | |
1506 | return E_PAGEUP |
|
1506 | return E_PAGEUP | |
1507 | elif action == b'line-down': |
|
1507 | elif action == b'line-down': | |
1508 | return E_LINEDOWN |
|
1508 | return E_LINEDOWN | |
1509 | elif action == b'line-up': |
|
1509 | elif action == b'line-up': | |
1510 | return E_LINEUP |
|
1510 | return E_LINEUP | |
1511 |
|
1511 | |||
1512 | def patch_contents(self): |
|
1512 | def patch_contents(self): | |
1513 | repo = self.repo |
|
1513 | repo = self.repo | |
1514 | rule = self.rules[self.display_pos_to_rule_pos(self.pos)] |
|
1514 | rule = self.rules[self.display_pos_to_rule_pos(self.pos)] | |
1515 | displayer = logcmdutil.changesetdisplayer( |
|
1515 | displayer = logcmdutil.changesetdisplayer( | |
1516 | repo.ui, |
|
1516 | repo.ui, | |
1517 | repo, |
|
1517 | repo, | |
1518 | {b"patch": True, b"template": b"status"}, |
|
1518 | {b"patch": True, b"template": b"status"}, | |
1519 | buffered=True, |
|
1519 | buffered=True, | |
1520 | ) |
|
1520 | ) | |
1521 | overrides = {(b'ui', b'verbose'): True} |
|
1521 | overrides = {(b'ui', b'verbose'): True} | |
1522 | with repo.ui.configoverride(overrides, source=b'histedit'): |
|
1522 | with repo.ui.configoverride(overrides, source=b'histedit'): | |
1523 | displayer.show(rule.ctx) |
|
1523 | displayer.show(rule.ctx) | |
1524 | displayer.close() |
|
1524 | displayer.close() | |
1525 | return displayer.hunk[rule.ctx.rev()].splitlines() |
|
1525 | return displayer.hunk[rule.ctx.rev()].splitlines() | |
1526 |
|
1526 | |||
1527 | def move_cursor(self, oldpos, newpos): |
|
1527 | def move_cursor(self, oldpos, newpos): | |
1528 | """Change the rule/changeset that the cursor is pointing to, regardless of |
|
1528 | """Change the rule/changeset that the cursor is pointing to, regardless of | |
1529 | current mode (you can switch between patches from the view patch window).""" |
|
1529 | current mode (you can switch between patches from the view patch window).""" | |
1530 | self.pos = newpos |
|
1530 | self.pos = newpos | |
1531 |
|
1531 | |||
1532 | mode, _ = self.mode |
|
1532 | mode, _ = self.mode | |
1533 | if mode == MODE_RULES: |
|
1533 | if mode == MODE_RULES: | |
1534 | # Scroll through the list by updating the view for MODE_RULES, so that |
|
1534 | # Scroll through the list by updating the view for MODE_RULES, so that | |
1535 | # even if we are not currently viewing the rules, switching back will |
|
1535 | # even if we are not currently viewing the rules, switching back will | |
1536 | # result in the cursor's rule being visible. |
|
1536 | # result in the cursor's rule being visible. | |
1537 | modestate = self.modes[MODE_RULES] |
|
1537 | modestate = self.modes[MODE_RULES] | |
1538 | if newpos < modestate[b'line_offset']: |
|
1538 | if newpos < modestate[b'line_offset']: | |
1539 | modestate[b'line_offset'] = newpos |
|
1539 | modestate[b'line_offset'] = newpos | |
1540 | elif newpos > modestate[b'line_offset'] + self.page_height - 1: |
|
1540 | elif newpos > modestate[b'line_offset'] + self.page_height - 1: | |
1541 | modestate[b'line_offset'] = newpos - self.page_height + 1 |
|
1541 | modestate[b'line_offset'] = newpos - self.page_height + 1 | |
1542 |
|
1542 | |||
1543 | # Reset the patch view region to the top of the new patch. |
|
1543 | # Reset the patch view region to the top of the new patch. | |
1544 | self.modes[MODE_PATCH][b'line_offset'] = 0 |
|
1544 | self.modes[MODE_PATCH][b'line_offset'] = 0 | |
1545 |
|
1545 | |||
1546 | def change_mode(self, mode): |
|
1546 | def change_mode(self, mode): | |
1547 | curmode, _ = self.mode |
|
1547 | curmode, _ = self.mode | |
1548 | self.mode = (mode, curmode) |
|
1548 | self.mode = (mode, curmode) | |
1549 | if mode == MODE_PATCH: |
|
1549 | if mode == MODE_PATCH: | |
1550 | self.modes[MODE_PATCH][b'patchcontents'] = self.patch_contents() |
|
1550 | self.modes[MODE_PATCH][b'patchcontents'] = self.patch_contents() | |
1551 |
|
1551 | |||
1552 | def make_selection(self, pos): |
|
1552 | def make_selection(self, pos): | |
1553 | self.selected = pos |
|
1553 | self.selected = pos | |
1554 |
|
1554 | |||
1555 | def swap(self, oldpos, newpos): |
|
1555 | def swap(self, oldpos, newpos): | |
1556 | """Swap two positions and calculate necessary conflicts in |
|
1556 | """Swap two positions and calculate necessary conflicts in | |
1557 | O(|newpos-oldpos|) time""" |
|
1557 | O(|newpos-oldpos|) time""" | |
1558 | old_rule_pos = self.display_pos_to_rule_pos(oldpos) |
|
1558 | old_rule_pos = self.display_pos_to_rule_pos(oldpos) | |
1559 | new_rule_pos = self.display_pos_to_rule_pos(newpos) |
|
1559 | new_rule_pos = self.display_pos_to_rule_pos(newpos) | |
1560 |
|
1560 | |||
1561 | rules = self.rules |
|
1561 | rules = self.rules | |
1562 | assert 0 <= old_rule_pos < len(rules) and 0 <= new_rule_pos < len(rules) |
|
1562 | assert 0 <= old_rule_pos < len(rules) and 0 <= new_rule_pos < len(rules) | |
1563 |
|
1563 | |||
1564 | rules[old_rule_pos], rules[new_rule_pos] = ( |
|
1564 | rules[old_rule_pos], rules[new_rule_pos] = ( | |
1565 | rules[new_rule_pos], |
|
1565 | rules[new_rule_pos], | |
1566 | rules[old_rule_pos], |
|
1566 | rules[old_rule_pos], | |
1567 | ) |
|
1567 | ) | |
1568 |
|
1568 | |||
1569 | # TODO: swap should not know about histeditrule's internals |
|
1569 | # TODO: swap should not know about histeditrule's internals | |
1570 | rules[new_rule_pos].pos = new_rule_pos |
|
1570 | rules[new_rule_pos].pos = new_rule_pos | |
1571 | rules[old_rule_pos].pos = old_rule_pos |
|
1571 | rules[old_rule_pos].pos = old_rule_pos | |
1572 |
|
1572 | |||
1573 | start = min(old_rule_pos, new_rule_pos) |
|
1573 | start = min(old_rule_pos, new_rule_pos) | |
1574 | end = max(old_rule_pos, new_rule_pos) |
|
1574 | end = max(old_rule_pos, new_rule_pos) | |
1575 | for r in range(start, end + 1): |
|
1575 | for r in range(start, end + 1): | |
1576 | rules[new_rule_pos].checkconflicts(rules[r]) |
|
1576 | rules[new_rule_pos].checkconflicts(rules[r]) | |
1577 | rules[old_rule_pos].checkconflicts(rules[r]) |
|
1577 | rules[old_rule_pos].checkconflicts(rules[r]) | |
1578 |
|
1578 | |||
1579 | if self.selected: |
|
1579 | if self.selected: | |
1580 | self.make_selection(newpos) |
|
1580 | self.make_selection(newpos) | |
1581 |
|
1581 | |||
1582 | def change_action(self, pos, action): |
|
1582 | def change_action(self, pos, action): | |
1583 | """Change the action state on the given position to the new action""" |
|
1583 | """Change the action state on the given position to the new action""" | |
1584 | assert 0 <= pos < len(self.rules) |
|
1584 | assert 0 <= pos < len(self.rules) | |
1585 | self.rules[pos].action = action |
|
1585 | self.rules[pos].action = action | |
1586 |
|
1586 | |||
1587 | def cycle_action(self, pos, next=False): |
|
1587 | def cycle_action(self, pos, next=False): | |
1588 | """Changes the action state the next or the previous action from |
|
1588 | """Changes the action state the next or the previous action from | |
1589 | the action list""" |
|
1589 | the action list""" | |
1590 | assert 0 <= pos < len(self.rules) |
|
1590 | assert 0 <= pos < len(self.rules) | |
1591 | current = self.rules[pos].action |
|
1591 | current = self.rules[pos].action | |
1592 |
|
1592 | |||
1593 | assert current in KEY_LIST |
|
1593 | assert current in KEY_LIST | |
1594 |
|
1594 | |||
1595 | index = KEY_LIST.index(current) |
|
1595 | index = KEY_LIST.index(current) | |
1596 | if next: |
|
1596 | if next: | |
1597 | index += 1 |
|
1597 | index += 1 | |
1598 | else: |
|
1598 | else: | |
1599 | index -= 1 |
|
1599 | index -= 1 | |
1600 | self.change_action(pos, KEY_LIST[index % len(KEY_LIST)]) |
|
1600 | self.change_action(pos, KEY_LIST[index % len(KEY_LIST)]) | |
1601 |
|
1601 | |||
1602 | def change_view(self, delta, unit): |
|
1602 | def change_view(self, delta, unit): | |
1603 | """Change the region of whatever is being viewed (a patch or the list of |
|
1603 | """Change the region of whatever is being viewed (a patch or the list of | |
1604 | changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.""" |
|
1604 | changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.""" | |
1605 | mode, _ = self.mode |
|
1605 | mode, _ = self.mode | |
1606 | if mode != MODE_PATCH: |
|
1606 | if mode != MODE_PATCH: | |
1607 | return |
|
1607 | return | |
1608 | mode_state = self.modes[mode] |
|
1608 | mode_state = self.modes[mode] | |
1609 | num_lines = len(mode_state[b'patchcontents']) |
|
1609 | num_lines = len(mode_state[b'patchcontents']) | |
1610 | page_height = self.page_height |
|
1610 | page_height = self.page_height | |
1611 | unit = page_height if unit == b'page' else 1 |
|
1611 | unit = page_height if unit == b'page' else 1 | |
1612 | num_pages = 1 + (num_lines - 1) // page_height |
|
1612 | num_pages = 1 + (num_lines - 1) // page_height | |
1613 | max_offset = (num_pages - 1) * page_height |
|
1613 | max_offset = (num_pages - 1) * page_height | |
1614 | newline = mode_state[b'line_offset'] + delta * unit |
|
1614 | newline = mode_state[b'line_offset'] + delta * unit | |
1615 | mode_state[b'line_offset'] = max(0, min(max_offset, newline)) |
|
1615 | mode_state[b'line_offset'] = max(0, min(max_offset, newline)) | |
1616 |
|
1616 | |||
1617 |
|
1617 | |||
1618 | def _chisteditmain(repo, rules, stdscr): |
|
1618 | def _chisteditmain(repo, rules, stdscr): | |
1619 | try: |
|
1619 | try: | |
1620 | curses.use_default_colors() |
|
1620 | curses.use_default_colors() | |
1621 | except curses.error: |
|
1621 | except curses.error: | |
1622 | pass |
|
1622 | pass | |
1623 |
|
1623 | |||
1624 | # initialize color pattern |
|
1624 | # initialize color pattern | |
1625 | curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE) |
|
1625 | curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE) | |
1626 | curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE) |
|
1626 | curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE) | |
1627 | curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW) |
|
1627 | curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW) | |
1628 | curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN) |
|
1628 | curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN) | |
1629 | curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA) |
|
1629 | curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA) | |
1630 | curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1) |
|
1630 | curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1) | |
1631 | curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1) |
|
1631 | curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1) | |
1632 | curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1) |
|
1632 | curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1) | |
1633 | curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1) |
|
1633 | curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1) | |
1634 | curses.init_pair( |
|
1634 | curses.init_pair( | |
1635 | COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA |
|
1635 | COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA | |
1636 | ) |
|
1636 | ) | |
1637 | curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE) |
|
1637 | curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE) | |
1638 |
|
1638 | |||
1639 | # don't display the cursor |
|
1639 | # don't display the cursor | |
1640 | try: |
|
1640 | try: | |
1641 | curses.curs_set(0) |
|
1641 | curses.curs_set(0) | |
1642 | except curses.error: |
|
1642 | except curses.error: | |
1643 | pass |
|
1643 | pass | |
1644 |
|
1644 | |||
1645 | def drawvertwin(size, y, x): |
|
1645 | def drawvertwin(size, y, x): | |
1646 | win = curses.newwin(size[0], size[1], y, x) |
|
1646 | win = curses.newwin(size[0], size[1], y, x) | |
1647 | y += size[0] |
|
1647 | y += size[0] | |
1648 | return win, y, x |
|
1648 | return win, y, x | |
1649 |
|
1649 | |||
1650 | state = _chistedit_state(repo, rules, stdscr) |
|
1650 | state = _chistedit_state(repo, rules, stdscr) | |
1651 |
|
1651 | |||
1652 | # eventloop |
|
1652 | # eventloop | |
1653 | ch = None |
|
1653 | ch = None | |
1654 | stdscr.clear() |
|
1654 | stdscr.clear() | |
1655 | stdscr.refresh() |
|
1655 | stdscr.refresh() | |
1656 | while True: |
|
1656 | while True: | |
1657 | oldmode, unused = state.mode |
|
1657 | oldmode, unused = state.mode | |
1658 | if oldmode == MODE_INIT: |
|
1658 | if oldmode == MODE_INIT: | |
1659 | state.change_mode(MODE_RULES) |
|
1659 | state.change_mode(MODE_RULES) | |
1660 | e = state.event(ch) |
|
1660 | e = state.event(ch) | |
1661 |
|
1661 | |||
1662 | if e == E_QUIT: |
|
1662 | if e == E_QUIT: | |
1663 | return False |
|
1663 | return False | |
1664 | if e == E_HISTEDIT: |
|
1664 | if e == E_HISTEDIT: | |
1665 | return state.rules |
|
1665 | return state.rules | |
1666 | else: |
|
1666 | else: | |
1667 | if e == E_RESIZE: |
|
1667 | if e == E_RESIZE: | |
1668 | size = screen_size() |
|
1668 | size = screen_size() | |
1669 | if size != stdscr.getmaxyx(): |
|
1669 | if size != stdscr.getmaxyx(): | |
1670 | curses.resizeterm(*size) |
|
1670 | curses.resizeterm(*size) | |
1671 |
|
1671 | |||
1672 | sizes = state.layout() |
|
1672 | sizes = state.layout() | |
1673 | curmode, unused = state.mode |
|
1673 | curmode, unused = state.mode | |
1674 | if curmode != oldmode: |
|
1674 | if curmode != oldmode: | |
1675 | state.page_height = sizes[b'main'][0] |
|
1675 | state.page_height = sizes[b'main'][0] | |
1676 | # Adjust the view to fit the current screen size. |
|
1676 | # Adjust the view to fit the current screen size. | |
1677 | state.move_cursor(state.pos, state.pos) |
|
1677 | state.move_cursor(state.pos, state.pos) | |
1678 |
|
1678 | |||
1679 | # Pack the windows against the top, each pane spread across the |
|
1679 | # Pack the windows against the top, each pane spread across the | |
1680 | # full width of the screen. |
|
1680 | # full width of the screen. | |
1681 | y, x = (0, 0) |
|
1681 | y, x = (0, 0) | |
1682 | helpwin, y, x = drawvertwin(sizes[b'help'], y, x) |
|
1682 | helpwin, y, x = drawvertwin(sizes[b'help'], y, x) | |
1683 | mainwin, y, x = drawvertwin(sizes[b'main'], y, x) |
|
1683 | mainwin, y, x = drawvertwin(sizes[b'main'], y, x) | |
1684 | commitwin, y, x = drawvertwin(sizes[b'commit'], y, x) |
|
1684 | commitwin, y, x = drawvertwin(sizes[b'commit'], y, x) | |
1685 |
|
1685 | |||
1686 | if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP): |
|
1686 | if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP): | |
1687 | if e == E_PAGEDOWN: |
|
1687 | if e == E_PAGEDOWN: | |
1688 | state.change_view(+1, b'page') |
|
1688 | state.change_view(+1, b'page') | |
1689 | elif e == E_PAGEUP: |
|
1689 | elif e == E_PAGEUP: | |
1690 | state.change_view(-1, b'page') |
|
1690 | state.change_view(-1, b'page') | |
1691 | elif e == E_LINEDOWN: |
|
1691 | elif e == E_LINEDOWN: | |
1692 | state.change_view(+1, b'line') |
|
1692 | state.change_view(+1, b'line') | |
1693 | elif e == E_LINEUP: |
|
1693 | elif e == E_LINEUP: | |
1694 | state.change_view(-1, b'line') |
|
1694 | state.change_view(-1, b'line') | |
1695 |
|
1695 | |||
1696 | # start rendering |
|
1696 | # start rendering | |
1697 | commitwin.erase() |
|
1697 | commitwin.erase() | |
1698 | helpwin.erase() |
|
1698 | helpwin.erase() | |
1699 | mainwin.erase() |
|
1699 | mainwin.erase() | |
1700 | if curmode == MODE_PATCH: |
|
1700 | if curmode == MODE_PATCH: | |
1701 | state.render_patch(mainwin) |
|
1701 | state.render_patch(mainwin) | |
1702 | elif curmode == MODE_HELP: |
|
1702 | elif curmode == MODE_HELP: | |
1703 | state.render_string(mainwin, __doc__.strip().splitlines()) |
|
1703 | state.render_string(mainwin, __doc__.strip().splitlines()) | |
1704 | else: |
|
1704 | else: | |
1705 | state.render_rules(mainwin) |
|
1705 | state.render_rules(mainwin) | |
1706 | state.render_commit(commitwin) |
|
1706 | state.render_commit(commitwin) | |
1707 | state.render_help(helpwin) |
|
1707 | state.render_help(helpwin) | |
1708 | curses.doupdate() |
|
1708 | curses.doupdate() | |
1709 | # done rendering |
|
1709 | # done rendering | |
1710 | ch = encoding.strtolocal(stdscr.getkey()) |
|
1710 | ch = encoding.strtolocal(stdscr.getkey()) | |
1711 |
|
1711 | |||
1712 |
|
1712 | |||
1713 | def _chistedit(ui, repo, freeargs, opts): |
|
1713 | def _chistedit(ui, repo, freeargs, opts): | |
1714 | """interactively edit changeset history via a curses interface |
|
1714 | """interactively edit changeset history via a curses interface | |
1715 |
|
1715 | |||
1716 | Provides a ncurses interface to histedit. Press ? in chistedit mode |
|
1716 | Provides a ncurses interface to histedit. Press ? in chistedit mode | |
1717 | to see an extensive help. Requires python-curses to be installed.""" |
|
1717 | to see an extensive help. Requires python-curses to be installed.""" | |
1718 |
|
1718 | |||
1719 | if curses is None: |
|
1719 | if curses is None: | |
1720 | raise error.Abort(_(b"Python curses library required")) |
|
1720 | raise error.Abort(_(b"Python curses library required")) | |
1721 |
|
1721 | |||
1722 | # disable color |
|
1722 | # disable color | |
1723 | ui._colormode = None |
|
1723 | ui._colormode = None | |
1724 |
|
1724 | |||
1725 | try: |
|
1725 | try: | |
1726 | keep = opts.get(b'keep') |
|
1726 | keep = opts.get(b'keep') | |
1727 | revs = opts.get(b'rev', [])[:] |
|
1727 | revs = opts.get(b'rev', [])[:] | |
1728 | cmdutil.checkunfinished(repo) |
|
1728 | cmdutil.checkunfinished(repo) | |
1729 | cmdutil.bailifchanged(repo) |
|
1729 | cmdutil.bailifchanged(repo) | |
1730 |
|
1730 | |||
1731 | revs.extend(freeargs) |
|
1731 | revs.extend(freeargs) | |
1732 | if not revs: |
|
1732 | if not revs: | |
1733 | defaultrev = destutil.desthistedit(ui, repo) |
|
1733 | defaultrev = destutil.desthistedit(ui, repo) | |
1734 | if defaultrev is not None: |
|
1734 | if defaultrev is not None: | |
1735 | revs.append(defaultrev) |
|
1735 | revs.append(defaultrev) | |
1736 | if len(revs) != 1: |
|
1736 | if len(revs) != 1: | |
1737 | raise error.InputError( |
|
1737 | raise error.InputError( | |
1738 | _(b'histedit requires exactly one ancestor revision') |
|
1738 | _(b'histedit requires exactly one ancestor revision') | |
1739 | ) |
|
1739 | ) | |
1740 |
|
1740 | |||
1741 | rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs))) |
|
1741 | rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs))) | |
1742 | if len(rr) != 1: |
|
1742 | if len(rr) != 1: | |
1743 | raise error.InputError( |
|
1743 | raise error.InputError( | |
1744 | _( |
|
1744 | _( | |
1745 | b'The specified revisions must have ' |
|
1745 | b'The specified revisions must have ' | |
1746 | b'exactly one common root' |
|
1746 | b'exactly one common root' | |
1747 | ) |
|
1747 | ) | |
1748 | ) |
|
1748 | ) | |
1749 | root = rr[0].node() |
|
1749 | root = rr[0].node() | |
1750 |
|
1750 | |||
1751 | topmost = repo.dirstate.p1() |
|
1751 | topmost = repo.dirstate.p1() | |
1752 | revs = between(repo, root, topmost, keep) |
|
1752 | revs = between(repo, root, topmost, keep) | |
1753 | if not revs: |
|
1753 | if not revs: | |
1754 | raise error.InputError( |
|
1754 | raise error.InputError( | |
1755 | _(b'%s is not an ancestor of working directory') % short(root) |
|
1755 | _(b'%s is not an ancestor of working directory') % short(root) | |
1756 | ) |
|
1756 | ) | |
1757 |
|
1757 | |||
1758 | rules = [] |
|
1758 | rules = [] | |
1759 | for i, r in enumerate(revs): |
|
1759 | for i, r in enumerate(revs): | |
1760 | rules.append(histeditrule(ui, repo[r], i)) |
|
1760 | rules.append(histeditrule(ui, repo[r], i)) | |
1761 | with util.with_lc_ctype(): |
|
1761 | with util.with_lc_ctype(): | |
1762 | rc = curses.wrapper(functools.partial(_chisteditmain, repo, rules)) |
|
1762 | rc = curses.wrapper(functools.partial(_chisteditmain, repo, rules)) | |
1763 | curses.echo() |
|
|||
1764 | curses.endwin() |
|
|||
1765 | if rc is False: |
|
1763 | if rc is False: | |
1766 | ui.write(_(b"histedit aborted\n")) |
|
1764 | ui.write(_(b"histedit aborted\n")) | |
1767 | return 0 |
|
1765 | return 0 | |
1768 | if type(rc) is list: |
|
1766 | if type(rc) is list: | |
1769 | ui.status(_(b"performing changes\n")) |
|
1767 | ui.status(_(b"performing changes\n")) | |
1770 | rules = makecommands(rc) |
|
1768 | rules = makecommands(rc) | |
1771 | with repo.vfs(b'chistedit', b'w+') as fp: |
|
1769 | with repo.vfs(b'chistedit', b'w+') as fp: | |
1772 | for r in rules: |
|
1770 | for r in rules: | |
1773 | fp.write(r) |
|
1771 | fp.write(r) | |
1774 | opts[b'commands'] = fp.name |
|
1772 | opts[b'commands'] = fp.name | |
1775 | return _texthistedit(ui, repo, freeargs, opts) |
|
1773 | return _texthistedit(ui, repo, freeargs, opts) | |
1776 | except KeyboardInterrupt: |
|
1774 | except KeyboardInterrupt: | |
1777 | pass |
|
1775 | pass | |
1778 | return -1 |
|
1776 | return -1 | |
1779 |
|
1777 | |||
1780 |
|
1778 | |||
1781 | @command( |
|
1779 | @command( | |
1782 | b'histedit', |
|
1780 | b'histedit', | |
1783 | [ |
|
1781 | [ | |
1784 | ( |
|
1782 | ( | |
1785 | b'', |
|
1783 | b'', | |
1786 | b'commands', |
|
1784 | b'commands', | |
1787 | b'', |
|
1785 | b'', | |
1788 | _(b'read history edits from the specified file'), |
|
1786 | _(b'read history edits from the specified file'), | |
1789 | _(b'FILE'), |
|
1787 | _(b'FILE'), | |
1790 | ), |
|
1788 | ), | |
1791 | (b'c', b'continue', False, _(b'continue an edit already in progress')), |
|
1789 | (b'c', b'continue', False, _(b'continue an edit already in progress')), | |
1792 | (b'', b'edit-plan', False, _(b'edit remaining actions list')), |
|
1790 | (b'', b'edit-plan', False, _(b'edit remaining actions list')), | |
1793 | ( |
|
1791 | ( | |
1794 | b'k', |
|
1792 | b'k', | |
1795 | b'keep', |
|
1793 | b'keep', | |
1796 | False, |
|
1794 | False, | |
1797 | _(b"don't strip old nodes after edit is complete"), |
|
1795 | _(b"don't strip old nodes after edit is complete"), | |
1798 | ), |
|
1796 | ), | |
1799 | (b'', b'abort', False, _(b'abort an edit in progress')), |
|
1797 | (b'', b'abort', False, _(b'abort an edit in progress')), | |
1800 | (b'o', b'outgoing', False, _(b'changesets not found in destination')), |
|
1798 | (b'o', b'outgoing', False, _(b'changesets not found in destination')), | |
1801 | ( |
|
1799 | ( | |
1802 | b'f', |
|
1800 | b'f', | |
1803 | b'force', |
|
1801 | b'force', | |
1804 | False, |
|
1802 | False, | |
1805 | _(b'force outgoing even for unrelated repositories'), |
|
1803 | _(b'force outgoing even for unrelated repositories'), | |
1806 | ), |
|
1804 | ), | |
1807 | (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')), |
|
1805 | (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')), | |
1808 | ] |
|
1806 | ] | |
1809 | + cmdutil.formatteropts, |
|
1807 | + cmdutil.formatteropts, | |
1810 | _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"), |
|
1808 | _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"), | |
1811 | helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, |
|
1809 | helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, | |
1812 | ) |
|
1810 | ) | |
1813 | def histedit(ui, repo, *freeargs, **opts): |
|
1811 | def histedit(ui, repo, *freeargs, **opts): | |
1814 | """interactively edit changeset history |
|
1812 | """interactively edit changeset history | |
1815 |
|
1813 | |||
1816 | This command lets you edit a linear series of changesets (up to |
|
1814 | This command lets you edit a linear series of changesets (up to | |
1817 | and including the working directory, which should be clean). |
|
1815 | and including the working directory, which should be clean). | |
1818 | You can: |
|
1816 | You can: | |
1819 |
|
1817 | |||
1820 | - `pick` to [re]order a changeset |
|
1818 | - `pick` to [re]order a changeset | |
1821 |
|
1819 | |||
1822 | - `drop` to omit changeset |
|
1820 | - `drop` to omit changeset | |
1823 |
|
1821 | |||
1824 | - `mess` to reword the changeset commit message |
|
1822 | - `mess` to reword the changeset commit message | |
1825 |
|
1823 | |||
1826 | - `fold` to combine it with the preceding changeset (using the later date) |
|
1824 | - `fold` to combine it with the preceding changeset (using the later date) | |
1827 |
|
1825 | |||
1828 | - `roll` like fold, but discarding this commit's description and date |
|
1826 | - `roll` like fold, but discarding this commit's description and date | |
1829 |
|
1827 | |||
1830 | - `edit` to edit this changeset (preserving date) |
|
1828 | - `edit` to edit this changeset (preserving date) | |
1831 |
|
1829 | |||
1832 | - `base` to checkout changeset and apply further changesets from there |
|
1830 | - `base` to checkout changeset and apply further changesets from there | |
1833 |
|
1831 | |||
1834 | There are a number of ways to select the root changeset: |
|
1832 | There are a number of ways to select the root changeset: | |
1835 |
|
1833 | |||
1836 | - Specify ANCESTOR directly |
|
1834 | - Specify ANCESTOR directly | |
1837 |
|
1835 | |||
1838 | - Use --outgoing -- it will be the first linear changeset not |
|
1836 | - Use --outgoing -- it will be the first linear changeset not | |
1839 | included in destination. (See :hg:`help config.paths.default-push`) |
|
1837 | included in destination. (See :hg:`help config.paths.default-push`) | |
1840 |
|
1838 | |||
1841 | - Otherwise, the value from the "histedit.defaultrev" config option |
|
1839 | - Otherwise, the value from the "histedit.defaultrev" config option | |
1842 | is used as a revset to select the base revision when ANCESTOR is not |
|
1840 | is used as a revset to select the base revision when ANCESTOR is not | |
1843 | specified. The first revision returned by the revset is used. By |
|
1841 | specified. The first revision returned by the revset is used. By | |
1844 | default, this selects the editable history that is unique to the |
|
1842 | default, this selects the editable history that is unique to the | |
1845 | ancestry of the working directory. |
|
1843 | ancestry of the working directory. | |
1846 |
|
1844 | |||
1847 | .. container:: verbose |
|
1845 | .. container:: verbose | |
1848 |
|
1846 | |||
1849 | If you use --outgoing, this command will abort if there are ambiguous |
|
1847 | If you use --outgoing, this command will abort if there are ambiguous | |
1850 | outgoing revisions. For example, if there are multiple branches |
|
1848 | outgoing revisions. For example, if there are multiple branches | |
1851 | containing outgoing revisions. |
|
1849 | containing outgoing revisions. | |
1852 |
|
1850 | |||
1853 | Use "min(outgoing() and ::.)" or similar revset specification |
|
1851 | Use "min(outgoing() and ::.)" or similar revset specification | |
1854 | instead of --outgoing to specify edit target revision exactly in |
|
1852 | instead of --outgoing to specify edit target revision exactly in | |
1855 | such ambiguous situation. See :hg:`help revsets` for detail about |
|
1853 | such ambiguous situation. See :hg:`help revsets` for detail about | |
1856 | selecting revisions. |
|
1854 | selecting revisions. | |
1857 |
|
1855 | |||
1858 | .. container:: verbose |
|
1856 | .. container:: verbose | |
1859 |
|
1857 | |||
1860 | Examples: |
|
1858 | Examples: | |
1861 |
|
1859 | |||
1862 | - A number of changes have been made. |
|
1860 | - A number of changes have been made. | |
1863 | Revision 3 is no longer needed. |
|
1861 | Revision 3 is no longer needed. | |
1864 |
|
1862 | |||
1865 | Start history editing from revision 3:: |
|
1863 | Start history editing from revision 3:: | |
1866 |
|
1864 | |||
1867 | hg histedit -r 3 |
|
1865 | hg histedit -r 3 | |
1868 |
|
1866 | |||
1869 | An editor opens, containing the list of revisions, |
|
1867 | An editor opens, containing the list of revisions, | |
1870 | with specific actions specified:: |
|
1868 | with specific actions specified:: | |
1871 |
|
1869 | |||
1872 | pick 5339bf82f0ca 3 Zworgle the foobar |
|
1870 | pick 5339bf82f0ca 3 Zworgle the foobar | |
1873 | pick 8ef592ce7cc4 4 Bedazzle the zerlog |
|
1871 | pick 8ef592ce7cc4 4 Bedazzle the zerlog | |
1874 | pick 0a9639fcda9d 5 Morgify the cromulancy |
|
1872 | pick 0a9639fcda9d 5 Morgify the cromulancy | |
1875 |
|
1873 | |||
1876 | Additional information about the possible actions |
|
1874 | Additional information about the possible actions | |
1877 | to take appears below the list of revisions. |
|
1875 | to take appears below the list of revisions. | |
1878 |
|
1876 | |||
1879 | To remove revision 3 from the history, |
|
1877 | To remove revision 3 from the history, | |
1880 | its action (at the beginning of the relevant line) |
|
1878 | its action (at the beginning of the relevant line) | |
1881 | is changed to 'drop':: |
|
1879 | is changed to 'drop':: | |
1882 |
|
1880 | |||
1883 | drop 5339bf82f0ca 3 Zworgle the foobar |
|
1881 | drop 5339bf82f0ca 3 Zworgle the foobar | |
1884 | pick 8ef592ce7cc4 4 Bedazzle the zerlog |
|
1882 | pick 8ef592ce7cc4 4 Bedazzle the zerlog | |
1885 | pick 0a9639fcda9d 5 Morgify the cromulancy |
|
1883 | pick 0a9639fcda9d 5 Morgify the cromulancy | |
1886 |
|
1884 | |||
1887 | - A number of changes have been made. |
|
1885 | - A number of changes have been made. | |
1888 | Revision 2 and 4 need to be swapped. |
|
1886 | Revision 2 and 4 need to be swapped. | |
1889 |
|
1887 | |||
1890 | Start history editing from revision 2:: |
|
1888 | Start history editing from revision 2:: | |
1891 |
|
1889 | |||
1892 | hg histedit -r 2 |
|
1890 | hg histedit -r 2 | |
1893 |
|
1891 | |||
1894 | An editor opens, containing the list of revisions, |
|
1892 | An editor opens, containing the list of revisions, | |
1895 | with specific actions specified:: |
|
1893 | with specific actions specified:: | |
1896 |
|
1894 | |||
1897 | pick 252a1af424ad 2 Blorb a morgwazzle |
|
1895 | pick 252a1af424ad 2 Blorb a morgwazzle | |
1898 | pick 5339bf82f0ca 3 Zworgle the foobar |
|
1896 | pick 5339bf82f0ca 3 Zworgle the foobar | |
1899 | pick 8ef592ce7cc4 4 Bedazzle the zerlog |
|
1897 | pick 8ef592ce7cc4 4 Bedazzle the zerlog | |
1900 |
|
1898 | |||
1901 | To swap revision 2 and 4, its lines are swapped |
|
1899 | To swap revision 2 and 4, its lines are swapped | |
1902 | in the editor:: |
|
1900 | in the editor:: | |
1903 |
|
1901 | |||
1904 | pick 8ef592ce7cc4 4 Bedazzle the zerlog |
|
1902 | pick 8ef592ce7cc4 4 Bedazzle the zerlog | |
1905 | pick 5339bf82f0ca 3 Zworgle the foobar |
|
1903 | pick 5339bf82f0ca 3 Zworgle the foobar | |
1906 | pick 252a1af424ad 2 Blorb a morgwazzle |
|
1904 | pick 252a1af424ad 2 Blorb a morgwazzle | |
1907 |
|
1905 | |||
1908 | Returns 0 on success, 1 if user intervention is required (not only |
|
1906 | Returns 0 on success, 1 if user intervention is required (not only | |
1909 | for intentional "edit" command, but also for resolving unexpected |
|
1907 | for intentional "edit" command, but also for resolving unexpected | |
1910 | conflicts). |
|
1908 | conflicts). | |
1911 | """ |
|
1909 | """ | |
1912 | opts = pycompat.byteskwargs(opts) |
|
1910 | opts = pycompat.byteskwargs(opts) | |
1913 |
|
1911 | |||
1914 | # kludge: _chistedit only works for starting an edit, not aborting |
|
1912 | # kludge: _chistedit only works for starting an edit, not aborting | |
1915 | # or continuing, so fall back to regular _texthistedit for those |
|
1913 | # or continuing, so fall back to regular _texthistedit for those | |
1916 | # operations. |
|
1914 | # operations. | |
1917 | if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew: |
|
1915 | if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew: | |
1918 | return _chistedit(ui, repo, freeargs, opts) |
|
1916 | return _chistedit(ui, repo, freeargs, opts) | |
1919 | return _texthistedit(ui, repo, freeargs, opts) |
|
1917 | return _texthistedit(ui, repo, freeargs, opts) | |
1920 |
|
1918 | |||
1921 |
|
1919 | |||
1922 | def _texthistedit(ui, repo, freeargs, opts): |
|
1920 | def _texthistedit(ui, repo, freeargs, opts): | |
1923 | state = histeditstate(repo) |
|
1921 | state = histeditstate(repo) | |
1924 | with repo.wlock() as wlock, repo.lock() as lock: |
|
1922 | with repo.wlock() as wlock, repo.lock() as lock: | |
1925 | state.wlock = wlock |
|
1923 | state.wlock = wlock | |
1926 | state.lock = lock |
|
1924 | state.lock = lock | |
1927 | _histedit(ui, repo, state, freeargs, opts) |
|
1925 | _histedit(ui, repo, state, freeargs, opts) | |
1928 |
|
1926 | |||
1929 |
|
1927 | |||
1930 | goalcontinue = b'continue' |
|
1928 | goalcontinue = b'continue' | |
1931 | goalabort = b'abort' |
|
1929 | goalabort = b'abort' | |
1932 | goaleditplan = b'edit-plan' |
|
1930 | goaleditplan = b'edit-plan' | |
1933 | goalnew = b'new' |
|
1931 | goalnew = b'new' | |
1934 |
|
1932 | |||
1935 |
|
1933 | |||
1936 | def _getgoal(opts): |
|
1934 | def _getgoal(opts): | |
1937 | if opts.get(b'continue'): |
|
1935 | if opts.get(b'continue'): | |
1938 | return goalcontinue |
|
1936 | return goalcontinue | |
1939 | if opts.get(b'abort'): |
|
1937 | if opts.get(b'abort'): | |
1940 | return goalabort |
|
1938 | return goalabort | |
1941 | if opts.get(b'edit_plan'): |
|
1939 | if opts.get(b'edit_plan'): | |
1942 | return goaleditplan |
|
1940 | return goaleditplan | |
1943 | return goalnew |
|
1941 | return goalnew | |
1944 |
|
1942 | |||
1945 |
|
1943 | |||
1946 | def _readfile(ui, path): |
|
1944 | def _readfile(ui, path): | |
1947 | if path == b'-': |
|
1945 | if path == b'-': | |
1948 | with ui.timeblockedsection(b'histedit'): |
|
1946 | with ui.timeblockedsection(b'histedit'): | |
1949 | return ui.fin.read() |
|
1947 | return ui.fin.read() | |
1950 | else: |
|
1948 | else: | |
1951 | with open(path, b'rb') as f: |
|
1949 | with open(path, b'rb') as f: | |
1952 | return f.read() |
|
1950 | return f.read() | |
1953 |
|
1951 | |||
1954 |
|
1952 | |||
1955 | def _validateargs(ui, repo, freeargs, opts, goal, rules, revs): |
|
1953 | def _validateargs(ui, repo, freeargs, opts, goal, rules, revs): | |
1956 | # TODO only abort if we try to histedit mq patches, not just |
|
1954 | # TODO only abort if we try to histedit mq patches, not just | |
1957 | # blanket if mq patches are applied somewhere |
|
1955 | # blanket if mq patches are applied somewhere | |
1958 | mq = getattr(repo, 'mq', None) |
|
1956 | mq = getattr(repo, 'mq', None) | |
1959 | if mq and mq.applied: |
|
1957 | if mq and mq.applied: | |
1960 | raise error.StateError(_(b'source has mq patches applied')) |
|
1958 | raise error.StateError(_(b'source has mq patches applied')) | |
1961 |
|
1959 | |||
1962 | # basic argument incompatibility processing |
|
1960 | # basic argument incompatibility processing | |
1963 | outg = opts.get(b'outgoing') |
|
1961 | outg = opts.get(b'outgoing') | |
1964 | editplan = opts.get(b'edit_plan') |
|
1962 | editplan = opts.get(b'edit_plan') | |
1965 | abort = opts.get(b'abort') |
|
1963 | abort = opts.get(b'abort') | |
1966 | force = opts.get(b'force') |
|
1964 | force = opts.get(b'force') | |
1967 | if force and not outg: |
|
1965 | if force and not outg: | |
1968 | raise error.InputError(_(b'--force only allowed with --outgoing')) |
|
1966 | raise error.InputError(_(b'--force only allowed with --outgoing')) | |
1969 | if goal == b'continue': |
|
1967 | if goal == b'continue': | |
1970 | if any((outg, abort, revs, freeargs, rules, editplan)): |
|
1968 | if any((outg, abort, revs, freeargs, rules, editplan)): | |
1971 | raise error.InputError(_(b'no arguments allowed with --continue')) |
|
1969 | raise error.InputError(_(b'no arguments allowed with --continue')) | |
1972 | elif goal == b'abort': |
|
1970 | elif goal == b'abort': | |
1973 | if any((outg, revs, freeargs, rules, editplan)): |
|
1971 | if any((outg, revs, freeargs, rules, editplan)): | |
1974 | raise error.InputError(_(b'no arguments allowed with --abort')) |
|
1972 | raise error.InputError(_(b'no arguments allowed with --abort')) | |
1975 | elif goal == b'edit-plan': |
|
1973 | elif goal == b'edit-plan': | |
1976 | if any((outg, revs, freeargs)): |
|
1974 | if any((outg, revs, freeargs)): | |
1977 | raise error.InputError( |
|
1975 | raise error.InputError( | |
1978 | _(b'only --commands argument allowed with --edit-plan') |
|
1976 | _(b'only --commands argument allowed with --edit-plan') | |
1979 | ) |
|
1977 | ) | |
1980 | else: |
|
1978 | else: | |
1981 | if outg: |
|
1979 | if outg: | |
1982 | if revs: |
|
1980 | if revs: | |
1983 | raise error.InputError( |
|
1981 | raise error.InputError( | |
1984 | _(b'no revisions allowed with --outgoing') |
|
1982 | _(b'no revisions allowed with --outgoing') | |
1985 | ) |
|
1983 | ) | |
1986 | if len(freeargs) > 1: |
|
1984 | if len(freeargs) > 1: | |
1987 | raise error.InputError( |
|
1985 | raise error.InputError( | |
1988 | _(b'only one repo argument allowed with --outgoing') |
|
1986 | _(b'only one repo argument allowed with --outgoing') | |
1989 | ) |
|
1987 | ) | |
1990 | else: |
|
1988 | else: | |
1991 | revs.extend(freeargs) |
|
1989 | revs.extend(freeargs) | |
1992 | if len(revs) == 0: |
|
1990 | if len(revs) == 0: | |
1993 | defaultrev = destutil.desthistedit(ui, repo) |
|
1991 | defaultrev = destutil.desthistedit(ui, repo) | |
1994 | if defaultrev is not None: |
|
1992 | if defaultrev is not None: | |
1995 | revs.append(defaultrev) |
|
1993 | revs.append(defaultrev) | |
1996 |
|
1994 | |||
1997 | if len(revs) != 1: |
|
1995 | if len(revs) != 1: | |
1998 | raise error.InputError( |
|
1996 | raise error.InputError( | |
1999 | _(b'histedit requires exactly one ancestor revision') |
|
1997 | _(b'histedit requires exactly one ancestor revision') | |
2000 | ) |
|
1998 | ) | |
2001 |
|
1999 | |||
2002 |
|
2000 | |||
2003 | def _histedit(ui, repo, state, freeargs, opts): |
|
2001 | def _histedit(ui, repo, state, freeargs, opts): | |
2004 | fm = ui.formatter(b'histedit', opts) |
|
2002 | fm = ui.formatter(b'histedit', opts) | |
2005 | fm.startitem() |
|
2003 | fm.startitem() | |
2006 | goal = _getgoal(opts) |
|
2004 | goal = _getgoal(opts) | |
2007 | revs = opts.get(b'rev', []) |
|
2005 | revs = opts.get(b'rev', []) | |
2008 | nobackup = not ui.configbool(b'rewrite', b'backup-bundle') |
|
2006 | nobackup = not ui.configbool(b'rewrite', b'backup-bundle') | |
2009 | rules = opts.get(b'commands', b'') |
|
2007 | rules = opts.get(b'commands', b'') | |
2010 | state.keep = opts.get(b'keep', False) |
|
2008 | state.keep = opts.get(b'keep', False) | |
2011 |
|
2009 | |||
2012 | _validateargs(ui, repo, freeargs, opts, goal, rules, revs) |
|
2010 | _validateargs(ui, repo, freeargs, opts, goal, rules, revs) | |
2013 |
|
2011 | |||
2014 | hastags = False |
|
2012 | hastags = False | |
2015 | if revs: |
|
2013 | if revs: | |
2016 | revs = logcmdutil.revrange(repo, revs) |
|
2014 | revs = logcmdutil.revrange(repo, revs) | |
2017 | ctxs = [repo[rev] for rev in revs] |
|
2015 | ctxs = [repo[rev] for rev in revs] | |
2018 | for ctx in ctxs: |
|
2016 | for ctx in ctxs: | |
2019 | tags = [tag for tag in ctx.tags() if tag != b'tip'] |
|
2017 | tags = [tag for tag in ctx.tags() if tag != b'tip'] | |
2020 | if not hastags: |
|
2018 | if not hastags: | |
2021 | hastags = len(tags) |
|
2019 | hastags = len(tags) | |
2022 | if hastags: |
|
2020 | if hastags: | |
2023 | if ui.promptchoice( |
|
2021 | if ui.promptchoice( | |
2024 | _( |
|
2022 | _( | |
2025 | b'warning: tags associated with the given' |
|
2023 | b'warning: tags associated with the given' | |
2026 | b' changeset will be lost after histedit.\n' |
|
2024 | b' changeset will be lost after histedit.\n' | |
2027 | b'do you want to continue (yN)? $$ &Yes $$ &No' |
|
2025 | b'do you want to continue (yN)? $$ &Yes $$ &No' | |
2028 | ), |
|
2026 | ), | |
2029 | default=1, |
|
2027 | default=1, | |
2030 | ): |
|
2028 | ): | |
2031 | raise error.CanceledError(_(b'histedit cancelled\n')) |
|
2029 | raise error.CanceledError(_(b'histedit cancelled\n')) | |
2032 | # rebuild state |
|
2030 | # rebuild state | |
2033 | if goal == goalcontinue: |
|
2031 | if goal == goalcontinue: | |
2034 | state.read() |
|
2032 | state.read() | |
2035 | state = bootstrapcontinue(ui, state, opts) |
|
2033 | state = bootstrapcontinue(ui, state, opts) | |
2036 | elif goal == goaleditplan: |
|
2034 | elif goal == goaleditplan: | |
2037 | _edithisteditplan(ui, repo, state, rules) |
|
2035 | _edithisteditplan(ui, repo, state, rules) | |
2038 | return |
|
2036 | return | |
2039 | elif goal == goalabort: |
|
2037 | elif goal == goalabort: | |
2040 | _aborthistedit(ui, repo, state, nobackup=nobackup) |
|
2038 | _aborthistedit(ui, repo, state, nobackup=nobackup) | |
2041 | return |
|
2039 | return | |
2042 | else: |
|
2040 | else: | |
2043 | # goal == goalnew |
|
2041 | # goal == goalnew | |
2044 | _newhistedit(ui, repo, state, revs, freeargs, opts) |
|
2042 | _newhistedit(ui, repo, state, revs, freeargs, opts) | |
2045 |
|
2043 | |||
2046 | _continuehistedit(ui, repo, state) |
|
2044 | _continuehistedit(ui, repo, state) | |
2047 | _finishhistedit(ui, repo, state, fm) |
|
2045 | _finishhistedit(ui, repo, state, fm) | |
2048 | fm.end() |
|
2046 | fm.end() | |
2049 |
|
2047 | |||
2050 |
|
2048 | |||
2051 | def _continuehistedit(ui, repo, state): |
|
2049 | def _continuehistedit(ui, repo, state): | |
2052 | """This function runs after either: |
|
2050 | """This function runs after either: | |
2053 | - bootstrapcontinue (if the goal is 'continue') |
|
2051 | - bootstrapcontinue (if the goal is 'continue') | |
2054 | - _newhistedit (if the goal is 'new') |
|
2052 | - _newhistedit (if the goal is 'new') | |
2055 | """ |
|
2053 | """ | |
2056 | # preprocess rules so that we can hide inner folds from the user |
|
2054 | # preprocess rules so that we can hide inner folds from the user | |
2057 | # and only show one editor |
|
2055 | # and only show one editor | |
2058 | actions = state.actions[:] |
|
2056 | actions = state.actions[:] | |
2059 | for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])): |
|
2057 | for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])): | |
2060 | if action.verb == b'fold' and nextact and nextact.verb == b'fold': |
|
2058 | if action.verb == b'fold' and nextact and nextact.verb == b'fold': | |
2061 | state.actions[idx].__class__ = _multifold |
|
2059 | state.actions[idx].__class__ = _multifold | |
2062 |
|
2060 | |||
2063 | # Force an initial state file write, so the user can run --abort/continue |
|
2061 | # Force an initial state file write, so the user can run --abort/continue | |
2064 | # even if there's an exception before the first transaction serialize. |
|
2062 | # even if there's an exception before the first transaction serialize. | |
2065 | state.write() |
|
2063 | state.write() | |
2066 |
|
2064 | |||
2067 | tr = None |
|
2065 | tr = None | |
2068 | # Don't use singletransaction by default since it rolls the entire |
|
2066 | # Don't use singletransaction by default since it rolls the entire | |
2069 | # transaction back if an unexpected exception happens (like a |
|
2067 | # transaction back if an unexpected exception happens (like a | |
2070 | # pretxncommit hook throws, or the user aborts the commit msg editor). |
|
2068 | # pretxncommit hook throws, or the user aborts the commit msg editor). | |
2071 | if ui.configbool(b"histedit", b"singletransaction"): |
|
2069 | if ui.configbool(b"histedit", b"singletransaction"): | |
2072 | # Don't use a 'with' for the transaction, since actions may close |
|
2070 | # Don't use a 'with' for the transaction, since actions may close | |
2073 | # and reopen a transaction. For example, if the action executes an |
|
2071 | # and reopen a transaction. For example, if the action executes an | |
2074 | # external process it may choose to commit the transaction first. |
|
2072 | # external process it may choose to commit the transaction first. | |
2075 | tr = repo.transaction(b'histedit') |
|
2073 | tr = repo.transaction(b'histedit') | |
2076 | progress = ui.makeprogress( |
|
2074 | progress = ui.makeprogress( | |
2077 | _(b"editing"), unit=_(b'changes'), total=len(state.actions) |
|
2075 | _(b"editing"), unit=_(b'changes'), total=len(state.actions) | |
2078 | ) |
|
2076 | ) | |
2079 | with progress, util.acceptintervention(tr): |
|
2077 | with progress, util.acceptintervention(tr): | |
2080 | while state.actions: |
|
2078 | while state.actions: | |
2081 | state.write(tr=tr) |
|
2079 | state.write(tr=tr) | |
2082 | actobj = state.actions[0] |
|
2080 | actobj = state.actions[0] | |
2083 | progress.increment(item=actobj.torule()) |
|
2081 | progress.increment(item=actobj.torule()) | |
2084 | ui.debug( |
|
2082 | ui.debug( | |
2085 | b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule()) |
|
2083 | b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule()) | |
2086 | ) |
|
2084 | ) | |
2087 | parentctx, replacement_ = actobj.run() |
|
2085 | parentctx, replacement_ = actobj.run() | |
2088 | state.parentctxnode = parentctx.node() |
|
2086 | state.parentctxnode = parentctx.node() | |
2089 | state.replacements.extend(replacement_) |
|
2087 | state.replacements.extend(replacement_) | |
2090 | state.actions.pop(0) |
|
2088 | state.actions.pop(0) | |
2091 |
|
2089 | |||
2092 | state.write() |
|
2090 | state.write() | |
2093 |
|
2091 | |||
2094 |
|
2092 | |||
2095 | def _finishhistedit(ui, repo, state, fm): |
|
2093 | def _finishhistedit(ui, repo, state, fm): | |
2096 | """This action runs when histedit is finishing its session""" |
|
2094 | """This action runs when histedit is finishing its session""" | |
2097 | mergemod.update(repo[state.parentctxnode]) |
|
2095 | mergemod.update(repo[state.parentctxnode]) | |
2098 |
|
2096 | |||
2099 | mapping, tmpnodes, created, ntm = processreplacement(state) |
|
2097 | mapping, tmpnodes, created, ntm = processreplacement(state) | |
2100 | if mapping: |
|
2098 | if mapping: | |
2101 | for prec, succs in mapping.items(): |
|
2099 | for prec, succs in mapping.items(): | |
2102 | if not succs: |
|
2100 | if not succs: | |
2103 | ui.debug(b'histedit: %s is dropped\n' % short(prec)) |
|
2101 | ui.debug(b'histedit: %s is dropped\n' % short(prec)) | |
2104 | else: |
|
2102 | else: | |
2105 | ui.debug( |
|
2103 | ui.debug( | |
2106 | b'histedit: %s is replaced by %s\n' |
|
2104 | b'histedit: %s is replaced by %s\n' | |
2107 | % (short(prec), short(succs[0])) |
|
2105 | % (short(prec), short(succs[0])) | |
2108 | ) |
|
2106 | ) | |
2109 | if len(succs) > 1: |
|
2107 | if len(succs) > 1: | |
2110 | m = b'histedit: %s' |
|
2108 | m = b'histedit: %s' | |
2111 | for n in succs[1:]: |
|
2109 | for n in succs[1:]: | |
2112 | ui.debug(m % short(n)) |
|
2110 | ui.debug(m % short(n)) | |
2113 |
|
2111 | |||
2114 | if not state.keep: |
|
2112 | if not state.keep: | |
2115 | if mapping: |
|
2113 | if mapping: | |
2116 | movetopmostbookmarks(repo, state.topmost, ntm) |
|
2114 | movetopmostbookmarks(repo, state.topmost, ntm) | |
2117 | # TODO update mq state |
|
2115 | # TODO update mq state | |
2118 | else: |
|
2116 | else: | |
2119 | mapping = {} |
|
2117 | mapping = {} | |
2120 |
|
2118 | |||
2121 | for n in tmpnodes: |
|
2119 | for n in tmpnodes: | |
2122 | if n in repo: |
|
2120 | if n in repo: | |
2123 | mapping[n] = () |
|
2121 | mapping[n] = () | |
2124 |
|
2122 | |||
2125 | # remove entries about unknown nodes |
|
2123 | # remove entries about unknown nodes | |
2126 | has_node = repo.unfiltered().changelog.index.has_node |
|
2124 | has_node = repo.unfiltered().changelog.index.has_node | |
2127 | mapping = { |
|
2125 | mapping = { | |
2128 | k: v |
|
2126 | k: v | |
2129 | for k, v in mapping.items() |
|
2127 | for k, v in mapping.items() | |
2130 | if has_node(k) and all(has_node(n) for n in v) |
|
2128 | if has_node(k) and all(has_node(n) for n in v) | |
2131 | } |
|
2129 | } | |
2132 | scmutil.cleanupnodes(repo, mapping, b'histedit') |
|
2130 | scmutil.cleanupnodes(repo, mapping, b'histedit') | |
2133 | hf = fm.hexfunc |
|
2131 | hf = fm.hexfunc | |
2134 | fl = fm.formatlist |
|
2132 | fl = fm.formatlist | |
2135 | fd = fm.formatdict |
|
2133 | fd = fm.formatdict | |
2136 | nodechanges = fd( |
|
2134 | nodechanges = fd( | |
2137 | { |
|
2135 | { | |
2138 | hf(oldn): fl([hf(n) for n in newn], name=b'node') |
|
2136 | hf(oldn): fl([hf(n) for n in newn], name=b'node') | |
2139 | for oldn, newn in mapping.items() |
|
2137 | for oldn, newn in mapping.items() | |
2140 | }, |
|
2138 | }, | |
2141 | key=b"oldnode", |
|
2139 | key=b"oldnode", | |
2142 | value=b"newnodes", |
|
2140 | value=b"newnodes", | |
2143 | ) |
|
2141 | ) | |
2144 | fm.data(nodechanges=nodechanges) |
|
2142 | fm.data(nodechanges=nodechanges) | |
2145 |
|
2143 | |||
2146 | state.clear() |
|
2144 | state.clear() | |
2147 | if os.path.exists(repo.sjoin(b'undo')): |
|
2145 | if os.path.exists(repo.sjoin(b'undo')): | |
2148 | os.unlink(repo.sjoin(b'undo')) |
|
2146 | os.unlink(repo.sjoin(b'undo')) | |
2149 | if repo.vfs.exists(b'histedit-last-edit.txt'): |
|
2147 | if repo.vfs.exists(b'histedit-last-edit.txt'): | |
2150 | repo.vfs.unlink(b'histedit-last-edit.txt') |
|
2148 | repo.vfs.unlink(b'histedit-last-edit.txt') | |
2151 |
|
2149 | |||
2152 |
|
2150 | |||
2153 | def _aborthistedit(ui, repo, state, nobackup=False): |
|
2151 | def _aborthistedit(ui, repo, state, nobackup=False): | |
2154 | try: |
|
2152 | try: | |
2155 | state.read() |
|
2153 | state.read() | |
2156 | __, leafs, tmpnodes, __ = processreplacement(state) |
|
2154 | __, leafs, tmpnodes, __ = processreplacement(state) | |
2157 | ui.debug(b'restore wc to old parent %s\n' % short(state.topmost)) |
|
2155 | ui.debug(b'restore wc to old parent %s\n' % short(state.topmost)) | |
2158 |
|
2156 | |||
2159 | # Recover our old commits if necessary |
|
2157 | # Recover our old commits if necessary | |
2160 | if not state.topmost in repo and state.backupfile: |
|
2158 | if not state.topmost in repo and state.backupfile: | |
2161 | backupfile = repo.vfs.join(state.backupfile) |
|
2159 | backupfile = repo.vfs.join(state.backupfile) | |
2162 | f = hg.openpath(ui, backupfile) |
|
2160 | f = hg.openpath(ui, backupfile) | |
2163 | gen = exchange.readbundle(ui, f, backupfile) |
|
2161 | gen = exchange.readbundle(ui, f, backupfile) | |
2164 | with repo.transaction(b'histedit.abort') as tr: |
|
2162 | with repo.transaction(b'histedit.abort') as tr: | |
2165 | bundle2.applybundle( |
|
2163 | bundle2.applybundle( | |
2166 | repo, |
|
2164 | repo, | |
2167 | gen, |
|
2165 | gen, | |
2168 | tr, |
|
2166 | tr, | |
2169 | source=b'histedit', |
|
2167 | source=b'histedit', | |
2170 | url=b'bundle:' + backupfile, |
|
2168 | url=b'bundle:' + backupfile, | |
2171 | ) |
|
2169 | ) | |
2172 |
|
2170 | |||
2173 | os.remove(backupfile) |
|
2171 | os.remove(backupfile) | |
2174 |
|
2172 | |||
2175 | # check whether we should update away |
|
2173 | # check whether we should update away | |
2176 | if repo.unfiltered().revs( |
|
2174 | if repo.unfiltered().revs( | |
2177 | b'parents() and (%n or %ln::)', |
|
2175 | b'parents() and (%n or %ln::)', | |
2178 | state.parentctxnode, |
|
2176 | state.parentctxnode, | |
2179 | leafs | tmpnodes, |
|
2177 | leafs | tmpnodes, | |
2180 | ): |
|
2178 | ): | |
2181 | hg.clean(repo, state.topmost, show_stats=True, quietempty=True) |
|
2179 | hg.clean(repo, state.topmost, show_stats=True, quietempty=True) | |
2182 | cleanupnode(ui, repo, tmpnodes, nobackup=nobackup) |
|
2180 | cleanupnode(ui, repo, tmpnodes, nobackup=nobackup) | |
2183 | cleanupnode(ui, repo, leafs, nobackup=nobackup) |
|
2181 | cleanupnode(ui, repo, leafs, nobackup=nobackup) | |
2184 | except Exception: |
|
2182 | except Exception: | |
2185 | if state.inprogress(): |
|
2183 | if state.inprogress(): | |
2186 | ui.warn( |
|
2184 | ui.warn( | |
2187 | _( |
|
2185 | _( | |
2188 | b'warning: encountered an exception during histedit ' |
|
2186 | b'warning: encountered an exception during histedit ' | |
2189 | b'--abort; the repository may not have been completely ' |
|
2187 | b'--abort; the repository may not have been completely ' | |
2190 | b'cleaned up\n' |
|
2188 | b'cleaned up\n' | |
2191 | ) |
|
2189 | ) | |
2192 | ) |
|
2190 | ) | |
2193 | raise |
|
2191 | raise | |
2194 | finally: |
|
2192 | finally: | |
2195 | state.clear() |
|
2193 | state.clear() | |
2196 |
|
2194 | |||
2197 |
|
2195 | |||
2198 | def hgaborthistedit(ui, repo): |
|
2196 | def hgaborthistedit(ui, repo): | |
2199 | state = histeditstate(repo) |
|
2197 | state = histeditstate(repo) | |
2200 | nobackup = not ui.configbool(b'rewrite', b'backup-bundle') |
|
2198 | nobackup = not ui.configbool(b'rewrite', b'backup-bundle') | |
2201 | with repo.wlock() as wlock, repo.lock() as lock: |
|
2199 | with repo.wlock() as wlock, repo.lock() as lock: | |
2202 | state.wlock = wlock |
|
2200 | state.wlock = wlock | |
2203 | state.lock = lock |
|
2201 | state.lock = lock | |
2204 | _aborthistedit(ui, repo, state, nobackup=nobackup) |
|
2202 | _aborthistedit(ui, repo, state, nobackup=nobackup) | |
2205 |
|
2203 | |||
2206 |
|
2204 | |||
2207 | def _edithisteditplan(ui, repo, state, rules): |
|
2205 | def _edithisteditplan(ui, repo, state, rules): | |
2208 | state.read() |
|
2206 | state.read() | |
2209 | if not rules: |
|
2207 | if not rules: | |
2210 | comment = geteditcomment( |
|
2208 | comment = geteditcomment( | |
2211 | ui, short(state.parentctxnode), short(state.topmost) |
|
2209 | ui, short(state.parentctxnode), short(state.topmost) | |
2212 | ) |
|
2210 | ) | |
2213 | rules = ruleeditor(repo, ui, state.actions, comment) |
|
2211 | rules = ruleeditor(repo, ui, state.actions, comment) | |
2214 | else: |
|
2212 | else: | |
2215 | rules = _readfile(ui, rules) |
|
2213 | rules = _readfile(ui, rules) | |
2216 | actions = parserules(rules, state) |
|
2214 | actions = parserules(rules, state) | |
2217 | ctxs = [repo[act.node] for act in state.actions if act.node] |
|
2215 | ctxs = [repo[act.node] for act in state.actions if act.node] | |
2218 | warnverifyactions(ui, repo, actions, state, ctxs) |
|
2216 | warnverifyactions(ui, repo, actions, state, ctxs) | |
2219 | state.actions = actions |
|
2217 | state.actions = actions | |
2220 | state.write() |
|
2218 | state.write() | |
2221 |
|
2219 | |||
2222 |
|
2220 | |||
2223 | def _newhistedit(ui, repo, state, revs, freeargs, opts): |
|
2221 | def _newhistedit(ui, repo, state, revs, freeargs, opts): | |
2224 | outg = opts.get(b'outgoing') |
|
2222 | outg = opts.get(b'outgoing') | |
2225 | rules = opts.get(b'commands', b'') |
|
2223 | rules = opts.get(b'commands', b'') | |
2226 | force = opts.get(b'force') |
|
2224 | force = opts.get(b'force') | |
2227 |
|
2225 | |||
2228 | cmdutil.checkunfinished(repo) |
|
2226 | cmdutil.checkunfinished(repo) | |
2229 | cmdutil.bailifchanged(repo) |
|
2227 | cmdutil.bailifchanged(repo) | |
2230 |
|
2228 | |||
2231 | topmost = repo.dirstate.p1() |
|
2229 | topmost = repo.dirstate.p1() | |
2232 | if outg: |
|
2230 | if outg: | |
2233 | if freeargs: |
|
2231 | if freeargs: | |
2234 | remote = freeargs[0] |
|
2232 | remote = freeargs[0] | |
2235 | else: |
|
2233 | else: | |
2236 | remote = None |
|
2234 | remote = None | |
2237 | root = findoutgoing(ui, repo, remote, force, opts) |
|
2235 | root = findoutgoing(ui, repo, remote, force, opts) | |
2238 | else: |
|
2236 | else: | |
2239 | rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs))) |
|
2237 | rr = list(repo.set(b'roots(%ld)', logcmdutil.revrange(repo, revs))) | |
2240 | if len(rr) != 1: |
|
2238 | if len(rr) != 1: | |
2241 | raise error.InputError( |
|
2239 | raise error.InputError( | |
2242 | _( |
|
2240 | _( | |
2243 | b'The specified revisions must have ' |
|
2241 | b'The specified revisions must have ' | |
2244 | b'exactly one common root' |
|
2242 | b'exactly one common root' | |
2245 | ) |
|
2243 | ) | |
2246 | ) |
|
2244 | ) | |
2247 | root = rr[0].node() |
|
2245 | root = rr[0].node() | |
2248 |
|
2246 | |||
2249 | revs = between(repo, root, topmost, state.keep) |
|
2247 | revs = between(repo, root, topmost, state.keep) | |
2250 | if not revs: |
|
2248 | if not revs: | |
2251 | raise error.InputError( |
|
2249 | raise error.InputError( | |
2252 | _(b'%s is not an ancestor of working directory') % short(root) |
|
2250 | _(b'%s is not an ancestor of working directory') % short(root) | |
2253 | ) |
|
2251 | ) | |
2254 |
|
2252 | |||
2255 | ctxs = [repo[r] for r in revs] |
|
2253 | ctxs = [repo[r] for r in revs] | |
2256 |
|
2254 | |||
2257 | wctx = repo[None] |
|
2255 | wctx = repo[None] | |
2258 | # Please don't ask me why `ancestors` is this value. I figured it |
|
2256 | # Please don't ask me why `ancestors` is this value. I figured it | |
2259 | # out with print-debugging, not by actually understanding what the |
|
2257 | # out with print-debugging, not by actually understanding what the | |
2260 | # merge code is doing. :( |
|
2258 | # merge code is doing. :( | |
2261 | ancs = [repo[b'.']] |
|
2259 | ancs = [repo[b'.']] | |
2262 | # Sniff-test to make sure we won't collide with untracked files in |
|
2260 | # Sniff-test to make sure we won't collide with untracked files in | |
2263 | # the working directory. If we don't do this, we can get a |
|
2261 | # the working directory. If we don't do this, we can get a | |
2264 | # collision after we've started histedit and backing out gets ugly |
|
2262 | # collision after we've started histedit and backing out gets ugly | |
2265 | # for everyone, especially the user. |
|
2263 | # for everyone, especially the user. | |
2266 | for c in [ctxs[0].p1()] + ctxs: |
|
2264 | for c in [ctxs[0].p1()] + ctxs: | |
2267 | try: |
|
2265 | try: | |
2268 | mergemod.calculateupdates( |
|
2266 | mergemod.calculateupdates( | |
2269 | repo, |
|
2267 | repo, | |
2270 | wctx, |
|
2268 | wctx, | |
2271 | c, |
|
2269 | c, | |
2272 | ancs, |
|
2270 | ancs, | |
2273 | # These parameters were determined by print-debugging |
|
2271 | # These parameters were determined by print-debugging | |
2274 | # what happens later on inside histedit. |
|
2272 | # what happens later on inside histedit. | |
2275 | branchmerge=False, |
|
2273 | branchmerge=False, | |
2276 | force=False, |
|
2274 | force=False, | |
2277 | acceptremote=False, |
|
2275 | acceptremote=False, | |
2278 | followcopies=False, |
|
2276 | followcopies=False, | |
2279 | ) |
|
2277 | ) | |
2280 | except error.Abort: |
|
2278 | except error.Abort: | |
2281 | raise error.StateError( |
|
2279 | raise error.StateError( | |
2282 | _( |
|
2280 | _( | |
2283 | b"untracked files in working directory conflict with files in %s" |
|
2281 | b"untracked files in working directory conflict with files in %s" | |
2284 | ) |
|
2282 | ) | |
2285 | % c |
|
2283 | % c | |
2286 | ) |
|
2284 | ) | |
2287 |
|
2285 | |||
2288 | if not rules: |
|
2286 | if not rules: | |
2289 | comment = geteditcomment(ui, short(root), short(topmost)) |
|
2287 | comment = geteditcomment(ui, short(root), short(topmost)) | |
2290 | actions = [pick(state, r) for r in revs] |
|
2288 | actions = [pick(state, r) for r in revs] | |
2291 | rules = ruleeditor(repo, ui, actions, comment) |
|
2289 | rules = ruleeditor(repo, ui, actions, comment) | |
2292 | else: |
|
2290 | else: | |
2293 | rules = _readfile(ui, rules) |
|
2291 | rules = _readfile(ui, rules) | |
2294 | actions = parserules(rules, state) |
|
2292 | actions = parserules(rules, state) | |
2295 | warnverifyactions(ui, repo, actions, state, ctxs) |
|
2293 | warnverifyactions(ui, repo, actions, state, ctxs) | |
2296 |
|
2294 | |||
2297 | parentctxnode = repo[root].p1().node() |
|
2295 | parentctxnode = repo[root].p1().node() | |
2298 |
|
2296 | |||
2299 | state.parentctxnode = parentctxnode |
|
2297 | state.parentctxnode = parentctxnode | |
2300 | state.actions = actions |
|
2298 | state.actions = actions | |
2301 | state.topmost = topmost |
|
2299 | state.topmost = topmost | |
2302 | state.replacements = [] |
|
2300 | state.replacements = [] | |
2303 |
|
2301 | |||
2304 | ui.log( |
|
2302 | ui.log( | |
2305 | b"histedit", |
|
2303 | b"histedit", | |
2306 | b"%d actions to histedit\n", |
|
2304 | b"%d actions to histedit\n", | |
2307 | len(actions), |
|
2305 | len(actions), | |
2308 | histedit_num_actions=len(actions), |
|
2306 | histedit_num_actions=len(actions), | |
2309 | ) |
|
2307 | ) | |
2310 |
|
2308 | |||
2311 | # Create a backup so we can always abort completely. |
|
2309 | # Create a backup so we can always abort completely. | |
2312 | backupfile = None |
|
2310 | backupfile = None | |
2313 | if not obsolete.isenabled(repo, obsolete.createmarkersopt): |
|
2311 | if not obsolete.isenabled(repo, obsolete.createmarkersopt): | |
2314 | backupfile = repair.backupbundle( |
|
2312 | backupfile = repair.backupbundle( | |
2315 | repo, [parentctxnode], [topmost], root, b'histedit' |
|
2313 | repo, [parentctxnode], [topmost], root, b'histedit' | |
2316 | ) |
|
2314 | ) | |
2317 | state.backupfile = backupfile |
|
2315 | state.backupfile = backupfile | |
2318 |
|
2316 | |||
2319 |
|
2317 | |||
2320 | def _getsummary(ctx): |
|
2318 | def _getsummary(ctx): | |
2321 | return stringutil.firstline(ctx.description()) |
|
2319 | return stringutil.firstline(ctx.description()) | |
2322 |
|
2320 | |||
2323 |
|
2321 | |||
2324 | def bootstrapcontinue(ui, state, opts): |
|
2322 | def bootstrapcontinue(ui, state, opts): | |
2325 | repo = state.repo |
|
2323 | repo = state.repo | |
2326 |
|
2324 | |||
2327 | ms = mergestatemod.mergestate.read(repo) |
|
2325 | ms = mergestatemod.mergestate.read(repo) | |
2328 | mergeutil.checkunresolved(ms) |
|
2326 | mergeutil.checkunresolved(ms) | |
2329 |
|
2327 | |||
2330 | if state.actions: |
|
2328 | if state.actions: | |
2331 | actobj = state.actions.pop(0) |
|
2329 | actobj = state.actions.pop(0) | |
2332 |
|
2330 | |||
2333 | if _isdirtywc(repo): |
|
2331 | if _isdirtywc(repo): | |
2334 | actobj.continuedirty() |
|
2332 | actobj.continuedirty() | |
2335 | if _isdirtywc(repo): |
|
2333 | if _isdirtywc(repo): | |
2336 | abortdirty() |
|
2334 | abortdirty() | |
2337 |
|
2335 | |||
2338 | parentctx, replacements = actobj.continueclean() |
|
2336 | parentctx, replacements = actobj.continueclean() | |
2339 |
|
2337 | |||
2340 | state.parentctxnode = parentctx.node() |
|
2338 | state.parentctxnode = parentctx.node() | |
2341 | state.replacements.extend(replacements) |
|
2339 | state.replacements.extend(replacements) | |
2342 |
|
2340 | |||
2343 | return state |
|
2341 | return state | |
2344 |
|
2342 | |||
2345 |
|
2343 | |||
2346 | def between(repo, old, new, keep): |
|
2344 | def between(repo, old, new, keep): | |
2347 | """select and validate the set of revision to edit |
|
2345 | """select and validate the set of revision to edit | |
2348 |
|
2346 | |||
2349 | When keep is false, the specified set can't have children.""" |
|
2347 | When keep is false, the specified set can't have children.""" | |
2350 | revs = repo.revs(b'%n::%n', old, new) |
|
2348 | revs = repo.revs(b'%n::%n', old, new) | |
2351 | if revs and not keep: |
|
2349 | if revs and not keep: | |
2352 | rewriteutil.precheck(repo, revs, b'edit') |
|
2350 | rewriteutil.precheck(repo, revs, b'edit') | |
2353 | if repo.revs(b'(%ld) and merge()', revs): |
|
2351 | if repo.revs(b'(%ld) and merge()', revs): | |
2354 | raise error.StateError( |
|
2352 | raise error.StateError( | |
2355 | _(b'cannot edit history that contains merges') |
|
2353 | _(b'cannot edit history that contains merges') | |
2356 | ) |
|
2354 | ) | |
2357 | return pycompat.maplist(repo.changelog.node, revs) |
|
2355 | return pycompat.maplist(repo.changelog.node, revs) | |
2358 |
|
2356 | |||
2359 |
|
2357 | |||
2360 | def ruleeditor(repo, ui, actions, editcomment=b""): |
|
2358 | def ruleeditor(repo, ui, actions, editcomment=b""): | |
2361 | """open an editor to edit rules |
|
2359 | """open an editor to edit rules | |
2362 |
|
2360 | |||
2363 | rules are in the format [ [act, ctx], ...] like in state.rules |
|
2361 | rules are in the format [ [act, ctx], ...] like in state.rules | |
2364 | """ |
|
2362 | """ | |
2365 | if repo.ui.configbool(b"experimental", b"histedit.autoverb"): |
|
2363 | if repo.ui.configbool(b"experimental", b"histedit.autoverb"): | |
2366 | newact = util.sortdict() |
|
2364 | newact = util.sortdict() | |
2367 | for act in actions: |
|
2365 | for act in actions: | |
2368 | ctx = repo[act.node] |
|
2366 | ctx = repo[act.node] | |
2369 | summary = _getsummary(ctx) |
|
2367 | summary = _getsummary(ctx) | |
2370 | fword = summary.split(b' ', 1)[0].lower() |
|
2368 | fword = summary.split(b' ', 1)[0].lower() | |
2371 | added = False |
|
2369 | added = False | |
2372 |
|
2370 | |||
2373 | # if it doesn't end with the special character '!' just skip this |
|
2371 | # if it doesn't end with the special character '!' just skip this | |
2374 | if fword.endswith(b'!'): |
|
2372 | if fword.endswith(b'!'): | |
2375 | fword = fword[:-1] |
|
2373 | fword = fword[:-1] | |
2376 | if fword in primaryactions | secondaryactions | tertiaryactions: |
|
2374 | if fword in primaryactions | secondaryactions | tertiaryactions: | |
2377 | act.verb = fword |
|
2375 | act.verb = fword | |
2378 | # get the target summary |
|
2376 | # get the target summary | |
2379 | tsum = summary[len(fword) + 1 :].lstrip() |
|
2377 | tsum = summary[len(fword) + 1 :].lstrip() | |
2380 | # safe but slow: reverse iterate over the actions so we |
|
2378 | # safe but slow: reverse iterate over the actions so we | |
2381 | # don't clash on two commits having the same summary |
|
2379 | # don't clash on two commits having the same summary | |
2382 | for na, l in reversed(list(newact.items())): |
|
2380 | for na, l in reversed(list(newact.items())): | |
2383 | actx = repo[na.node] |
|
2381 | actx = repo[na.node] | |
2384 | asum = _getsummary(actx) |
|
2382 | asum = _getsummary(actx) | |
2385 | if asum == tsum: |
|
2383 | if asum == tsum: | |
2386 | added = True |
|
2384 | added = True | |
2387 | l.append(act) |
|
2385 | l.append(act) | |
2388 | break |
|
2386 | break | |
2389 |
|
2387 | |||
2390 | if not added: |
|
2388 | if not added: | |
2391 | newact[act] = [] |
|
2389 | newact[act] = [] | |
2392 |
|
2390 | |||
2393 | # copy over and flatten the new list |
|
2391 | # copy over and flatten the new list | |
2394 | actions = [] |
|
2392 | actions = [] | |
2395 | for na, l in newact.items(): |
|
2393 | for na, l in newact.items(): | |
2396 | actions.append(na) |
|
2394 | actions.append(na) | |
2397 | actions += l |
|
2395 | actions += l | |
2398 |
|
2396 | |||
2399 | rules = b'\n'.join([act.torule() for act in actions]) |
|
2397 | rules = b'\n'.join([act.torule() for act in actions]) | |
2400 | rules += b'\n\n' |
|
2398 | rules += b'\n\n' | |
2401 | rules += editcomment |
|
2399 | rules += editcomment | |
2402 | rules = ui.edit( |
|
2400 | rules = ui.edit( | |
2403 | rules, |
|
2401 | rules, | |
2404 | ui.username(), |
|
2402 | ui.username(), | |
2405 | {b'prefix': b'histedit'}, |
|
2403 | {b'prefix': b'histedit'}, | |
2406 | repopath=repo.path, |
|
2404 | repopath=repo.path, | |
2407 | action=b'histedit', |
|
2405 | action=b'histedit', | |
2408 | ) |
|
2406 | ) | |
2409 |
|
2407 | |||
2410 | # Save edit rules in .hg/histedit-last-edit.txt in case |
|
2408 | # Save edit rules in .hg/histedit-last-edit.txt in case | |
2411 | # the user needs to ask for help after something |
|
2409 | # the user needs to ask for help after something | |
2412 | # surprising happens. |
|
2410 | # surprising happens. | |
2413 | with repo.vfs(b'histedit-last-edit.txt', b'wb') as f: |
|
2411 | with repo.vfs(b'histedit-last-edit.txt', b'wb') as f: | |
2414 | f.write(rules) |
|
2412 | f.write(rules) | |
2415 |
|
2413 | |||
2416 | return rules |
|
2414 | return rules | |
2417 |
|
2415 | |||
2418 |
|
2416 | |||
2419 | def parserules(rules, state): |
|
2417 | def parserules(rules, state): | |
2420 | """Read the histedit rules string and return list of action objects""" |
|
2418 | """Read the histedit rules string and return list of action objects""" | |
2421 | rules = [ |
|
2419 | rules = [ | |
2422 | l |
|
2420 | l | |
2423 | for l in (r.strip() for r in rules.splitlines()) |
|
2421 | for l in (r.strip() for r in rules.splitlines()) | |
2424 | if l and not l.startswith(b'#') |
|
2422 | if l and not l.startswith(b'#') | |
2425 | ] |
|
2423 | ] | |
2426 | actions = [] |
|
2424 | actions = [] | |
2427 | for r in rules: |
|
2425 | for r in rules: | |
2428 | if b' ' not in r: |
|
2426 | if b' ' not in r: | |
2429 | raise error.ParseError(_(b'malformed line "%s"') % r) |
|
2427 | raise error.ParseError(_(b'malformed line "%s"') % r) | |
2430 | verb, rest = r.split(b' ', 1) |
|
2428 | verb, rest = r.split(b' ', 1) | |
2431 |
|
2429 | |||
2432 | if verb not in actiontable: |
|
2430 | if verb not in actiontable: | |
2433 | raise error.ParseError(_(b'unknown action "%s"') % verb) |
|
2431 | raise error.ParseError(_(b'unknown action "%s"') % verb) | |
2434 |
|
2432 | |||
2435 | action = actiontable[verb].fromrule(state, rest) |
|
2433 | action = actiontable[verb].fromrule(state, rest) | |
2436 | actions.append(action) |
|
2434 | actions.append(action) | |
2437 | return actions |
|
2435 | return actions | |
2438 |
|
2436 | |||
2439 |
|
2437 | |||
2440 | def warnverifyactions(ui, repo, actions, state, ctxs): |
|
2438 | def warnverifyactions(ui, repo, actions, state, ctxs): | |
2441 | try: |
|
2439 | try: | |
2442 | verifyactions(actions, state, ctxs) |
|
2440 | verifyactions(actions, state, ctxs) | |
2443 | except error.ParseError: |
|
2441 | except error.ParseError: | |
2444 | if repo.vfs.exists(b'histedit-last-edit.txt'): |
|
2442 | if repo.vfs.exists(b'histedit-last-edit.txt'): | |
2445 | ui.warn( |
|
2443 | ui.warn( | |
2446 | _( |
|
2444 | _( | |
2447 | b'warning: histedit rules saved ' |
|
2445 | b'warning: histedit rules saved ' | |
2448 | b'to: .hg/histedit-last-edit.txt\n' |
|
2446 | b'to: .hg/histedit-last-edit.txt\n' | |
2449 | ) |
|
2447 | ) | |
2450 | ) |
|
2448 | ) | |
2451 | raise |
|
2449 | raise | |
2452 |
|
2450 | |||
2453 |
|
2451 | |||
2454 | def verifyactions(actions, state, ctxs): |
|
2452 | def verifyactions(actions, state, ctxs): | |
2455 | """Verify that there exists exactly one action per given changeset and |
|
2453 | """Verify that there exists exactly one action per given changeset and | |
2456 | other constraints. |
|
2454 | other constraints. | |
2457 |
|
2455 | |||
2458 | Will abort if there are to many or too few rules, a malformed rule, |
|
2456 | Will abort if there are to many or too few rules, a malformed rule, | |
2459 | or a rule on a changeset outside of the user-given range. |
|
2457 | or a rule on a changeset outside of the user-given range. | |
2460 | """ |
|
2458 | """ | |
2461 | expected = {c.node() for c in ctxs} |
|
2459 | expected = {c.node() for c in ctxs} | |
2462 | seen = set() |
|
2460 | seen = set() | |
2463 | prev = None |
|
2461 | prev = None | |
2464 |
|
2462 | |||
2465 | if actions and actions[0].verb in [b'roll', b'fold']: |
|
2463 | if actions and actions[0].verb in [b'roll', b'fold']: | |
2466 | raise error.ParseError( |
|
2464 | raise error.ParseError( | |
2467 | _(b'first changeset cannot use verb "%s"') % actions[0].verb |
|
2465 | _(b'first changeset cannot use verb "%s"') % actions[0].verb | |
2468 | ) |
|
2466 | ) | |
2469 |
|
2467 | |||
2470 | for action in actions: |
|
2468 | for action in actions: | |
2471 | action.verify(prev, expected, seen) |
|
2469 | action.verify(prev, expected, seen) | |
2472 | prev = action |
|
2470 | prev = action | |
2473 | if action.node is not None: |
|
2471 | if action.node is not None: | |
2474 | seen.add(action.node) |
|
2472 | seen.add(action.node) | |
2475 | missing = sorted(expected - seen) # sort to stabilize output |
|
2473 | missing = sorted(expected - seen) # sort to stabilize output | |
2476 |
|
2474 | |||
2477 | if state.repo.ui.configbool(b'histedit', b'dropmissing'): |
|
2475 | if state.repo.ui.configbool(b'histedit', b'dropmissing'): | |
2478 | if len(actions) == 0: |
|
2476 | if len(actions) == 0: | |
2479 | raise error.ParseError( |
|
2477 | raise error.ParseError( | |
2480 | _(b'no rules provided'), |
|
2478 | _(b'no rules provided'), | |
2481 | hint=_(b'use strip extension to remove commits'), |
|
2479 | hint=_(b'use strip extension to remove commits'), | |
2482 | ) |
|
2480 | ) | |
2483 |
|
2481 | |||
2484 | drops = [drop(state, n) for n in missing] |
|
2482 | drops = [drop(state, n) for n in missing] | |
2485 | # put the in the beginning so they execute immediately and |
|
2483 | # put the in the beginning so they execute immediately and | |
2486 | # don't show in the edit-plan in the future |
|
2484 | # don't show in the edit-plan in the future | |
2487 | actions[:0] = drops |
|
2485 | actions[:0] = drops | |
2488 | elif missing: |
|
2486 | elif missing: | |
2489 | raise error.ParseError( |
|
2487 | raise error.ParseError( | |
2490 | _(b'missing rules for changeset %s') % short(missing[0]), |
|
2488 | _(b'missing rules for changeset %s') % short(missing[0]), | |
2491 | hint=_( |
|
2489 | hint=_( | |
2492 | b'use "drop %s" to discard, see also: ' |
|
2490 | b'use "drop %s" to discard, see also: ' | |
2493 | b"'hg help -e histedit.config'" |
|
2491 | b"'hg help -e histedit.config'" | |
2494 | ) |
|
2492 | ) | |
2495 | % short(missing[0]), |
|
2493 | % short(missing[0]), | |
2496 | ) |
|
2494 | ) | |
2497 |
|
2495 | |||
2498 |
|
2496 | |||
2499 | def adjustreplacementsfrommarkers(repo, oldreplacements): |
|
2497 | def adjustreplacementsfrommarkers(repo, oldreplacements): | |
2500 | """Adjust replacements from obsolescence markers |
|
2498 | """Adjust replacements from obsolescence markers | |
2501 |
|
2499 | |||
2502 | Replacements structure is originally generated based on |
|
2500 | Replacements structure is originally generated based on | |
2503 | histedit's state and does not account for changes that are |
|
2501 | histedit's state and does not account for changes that are | |
2504 | not recorded there. This function fixes that by adding |
|
2502 | not recorded there. This function fixes that by adding | |
2505 | data read from obsolescence markers""" |
|
2503 | data read from obsolescence markers""" | |
2506 | if not obsolete.isenabled(repo, obsolete.createmarkersopt): |
|
2504 | if not obsolete.isenabled(repo, obsolete.createmarkersopt): | |
2507 | return oldreplacements |
|
2505 | return oldreplacements | |
2508 |
|
2506 | |||
2509 | unfi = repo.unfiltered() |
|
2507 | unfi = repo.unfiltered() | |
2510 | get_rev = unfi.changelog.index.get_rev |
|
2508 | get_rev = unfi.changelog.index.get_rev | |
2511 | obsstore = repo.obsstore |
|
2509 | obsstore = repo.obsstore | |
2512 | newreplacements = list(oldreplacements) |
|
2510 | newreplacements = list(oldreplacements) | |
2513 | oldsuccs = [r[1] for r in oldreplacements] |
|
2511 | oldsuccs = [r[1] for r in oldreplacements] | |
2514 | # successors that have already been added to succstocheck once |
|
2512 | # successors that have already been added to succstocheck once | |
2515 | seensuccs = set().union( |
|
2513 | seensuccs = set().union( | |
2516 | *oldsuccs |
|
2514 | *oldsuccs | |
2517 | ) # create a set from an iterable of tuples |
|
2515 | ) # create a set from an iterable of tuples | |
2518 | succstocheck = list(seensuccs) |
|
2516 | succstocheck = list(seensuccs) | |
2519 | while succstocheck: |
|
2517 | while succstocheck: | |
2520 | n = succstocheck.pop() |
|
2518 | n = succstocheck.pop() | |
2521 | missing = get_rev(n) is None |
|
2519 | missing = get_rev(n) is None | |
2522 | markers = obsstore.successors.get(n, ()) |
|
2520 | markers = obsstore.successors.get(n, ()) | |
2523 | if missing and not markers: |
|
2521 | if missing and not markers: | |
2524 | # dead end, mark it as such |
|
2522 | # dead end, mark it as such | |
2525 | newreplacements.append((n, ())) |
|
2523 | newreplacements.append((n, ())) | |
2526 | for marker in markers: |
|
2524 | for marker in markers: | |
2527 | nsuccs = marker[1] |
|
2525 | nsuccs = marker[1] | |
2528 | newreplacements.append((n, nsuccs)) |
|
2526 | newreplacements.append((n, nsuccs)) | |
2529 | for nsucc in nsuccs: |
|
2527 | for nsucc in nsuccs: | |
2530 | if nsucc not in seensuccs: |
|
2528 | if nsucc not in seensuccs: | |
2531 | seensuccs.add(nsucc) |
|
2529 | seensuccs.add(nsucc) | |
2532 | succstocheck.append(nsucc) |
|
2530 | succstocheck.append(nsucc) | |
2533 |
|
2531 | |||
2534 | return newreplacements |
|
2532 | return newreplacements | |
2535 |
|
2533 | |||
2536 |
|
2534 | |||
2537 | def processreplacement(state): |
|
2535 | def processreplacement(state): | |
2538 | """process the list of replacements to return |
|
2536 | """process the list of replacements to return | |
2539 |
|
2537 | |||
2540 | 1) the final mapping between original and created nodes |
|
2538 | 1) the final mapping between original and created nodes | |
2541 | 2) the list of temporary node created by histedit |
|
2539 | 2) the list of temporary node created by histedit | |
2542 | 3) the list of new commit created by histedit""" |
|
2540 | 3) the list of new commit created by histedit""" | |
2543 | replacements = adjustreplacementsfrommarkers(state.repo, state.replacements) |
|
2541 | replacements = adjustreplacementsfrommarkers(state.repo, state.replacements) | |
2544 | allsuccs = set() |
|
2542 | allsuccs = set() | |
2545 | replaced = set() |
|
2543 | replaced = set() | |
2546 | fullmapping = {} |
|
2544 | fullmapping = {} | |
2547 | # initialize basic set |
|
2545 | # initialize basic set | |
2548 | # fullmapping records all operations recorded in replacement |
|
2546 | # fullmapping records all operations recorded in replacement | |
2549 | for rep in replacements: |
|
2547 | for rep in replacements: | |
2550 | allsuccs.update(rep[1]) |
|
2548 | allsuccs.update(rep[1]) | |
2551 | replaced.add(rep[0]) |
|
2549 | replaced.add(rep[0]) | |
2552 | fullmapping.setdefault(rep[0], set()).update(rep[1]) |
|
2550 | fullmapping.setdefault(rep[0], set()).update(rep[1]) | |
2553 | new = allsuccs - replaced |
|
2551 | new = allsuccs - replaced | |
2554 | tmpnodes = allsuccs & replaced |
|
2552 | tmpnodes = allsuccs & replaced | |
2555 | # Reduce content fullmapping into direct relation between original nodes |
|
2553 | # Reduce content fullmapping into direct relation between original nodes | |
2556 | # and final node created during history edition |
|
2554 | # and final node created during history edition | |
2557 | # Dropped changeset are replaced by an empty list |
|
2555 | # Dropped changeset are replaced by an empty list | |
2558 | toproceed = set(fullmapping) |
|
2556 | toproceed = set(fullmapping) | |
2559 | final = {} |
|
2557 | final = {} | |
2560 | while toproceed: |
|
2558 | while toproceed: | |
2561 | for x in list(toproceed): |
|
2559 | for x in list(toproceed): | |
2562 | succs = fullmapping[x] |
|
2560 | succs = fullmapping[x] | |
2563 | for s in list(succs): |
|
2561 | for s in list(succs): | |
2564 | if s in toproceed: |
|
2562 | if s in toproceed: | |
2565 | # non final node with unknown closure |
|
2563 | # non final node with unknown closure | |
2566 | # We can't process this now |
|
2564 | # We can't process this now | |
2567 | break |
|
2565 | break | |
2568 | elif s in final: |
|
2566 | elif s in final: | |
2569 | # non final node, replace with closure |
|
2567 | # non final node, replace with closure | |
2570 | succs.remove(s) |
|
2568 | succs.remove(s) | |
2571 | succs.update(final[s]) |
|
2569 | succs.update(final[s]) | |
2572 | else: |
|
2570 | else: | |
2573 | final[x] = succs |
|
2571 | final[x] = succs | |
2574 | toproceed.remove(x) |
|
2572 | toproceed.remove(x) | |
2575 | # remove tmpnodes from final mapping |
|
2573 | # remove tmpnodes from final mapping | |
2576 | for n in tmpnodes: |
|
2574 | for n in tmpnodes: | |
2577 | del final[n] |
|
2575 | del final[n] | |
2578 | # we expect all changes involved in final to exist in the repo |
|
2576 | # we expect all changes involved in final to exist in the repo | |
2579 | # turn `final` into list (topologically sorted) |
|
2577 | # turn `final` into list (topologically sorted) | |
2580 | get_rev = state.repo.changelog.index.get_rev |
|
2578 | get_rev = state.repo.changelog.index.get_rev | |
2581 | for prec, succs in final.items(): |
|
2579 | for prec, succs in final.items(): | |
2582 | final[prec] = sorted(succs, key=get_rev) |
|
2580 | final[prec] = sorted(succs, key=get_rev) | |
2583 |
|
2581 | |||
2584 | # computed topmost element (necessary for bookmark) |
|
2582 | # computed topmost element (necessary for bookmark) | |
2585 | if new: |
|
2583 | if new: | |
2586 | newtopmost = sorted(new, key=state.repo.changelog.rev)[-1] |
|
2584 | newtopmost = sorted(new, key=state.repo.changelog.rev)[-1] | |
2587 | elif not final: |
|
2585 | elif not final: | |
2588 | # Nothing rewritten at all. we won't need `newtopmost` |
|
2586 | # Nothing rewritten at all. we won't need `newtopmost` | |
2589 | # It is the same as `oldtopmost` and `processreplacement` know it |
|
2587 | # It is the same as `oldtopmost` and `processreplacement` know it | |
2590 | newtopmost = None |
|
2588 | newtopmost = None | |
2591 | else: |
|
2589 | else: | |
2592 | # every body died. The newtopmost is the parent of the root. |
|
2590 | # every body died. The newtopmost is the parent of the root. | |
2593 | r = state.repo.changelog.rev |
|
2591 | r = state.repo.changelog.rev | |
2594 | newtopmost = state.repo[sorted(final, key=r)[0]].p1().node() |
|
2592 | newtopmost = state.repo[sorted(final, key=r)[0]].p1().node() | |
2595 |
|
2593 | |||
2596 | return final, tmpnodes, new, newtopmost |
|
2594 | return final, tmpnodes, new, newtopmost | |
2597 |
|
2595 | |||
2598 |
|
2596 | |||
2599 | def movetopmostbookmarks(repo, oldtopmost, newtopmost): |
|
2597 | def movetopmostbookmarks(repo, oldtopmost, newtopmost): | |
2600 | """Move bookmark from oldtopmost to newly created topmost |
|
2598 | """Move bookmark from oldtopmost to newly created topmost | |
2601 |
|
2599 | |||
2602 | This is arguably a feature and we may only want that for the active |
|
2600 | This is arguably a feature and we may only want that for the active | |
2603 | bookmark. But the behavior is kept compatible with the old version for now. |
|
2601 | bookmark. But the behavior is kept compatible with the old version for now. | |
2604 | """ |
|
2602 | """ | |
2605 | if not oldtopmost or not newtopmost: |
|
2603 | if not oldtopmost or not newtopmost: | |
2606 | return |
|
2604 | return | |
2607 | oldbmarks = repo.nodebookmarks(oldtopmost) |
|
2605 | oldbmarks = repo.nodebookmarks(oldtopmost) | |
2608 | if oldbmarks: |
|
2606 | if oldbmarks: | |
2609 | with repo.lock(), repo.transaction(b'histedit') as tr: |
|
2607 | with repo.lock(), repo.transaction(b'histedit') as tr: | |
2610 | marks = repo._bookmarks |
|
2608 | marks = repo._bookmarks | |
2611 | changes = [] |
|
2609 | changes = [] | |
2612 | for name in oldbmarks: |
|
2610 | for name in oldbmarks: | |
2613 | changes.append((name, newtopmost)) |
|
2611 | changes.append((name, newtopmost)) | |
2614 | marks.applychanges(repo, tr, changes) |
|
2612 | marks.applychanges(repo, tr, changes) | |
2615 |
|
2613 | |||
2616 |
|
2614 | |||
2617 | def cleanupnode(ui, repo, nodes, nobackup=False): |
|
2615 | def cleanupnode(ui, repo, nodes, nobackup=False): | |
2618 | """strip a group of nodes from the repository |
|
2616 | """strip a group of nodes from the repository | |
2619 |
|
2617 | |||
2620 | The set of node to strip may contains unknown nodes.""" |
|
2618 | The set of node to strip may contains unknown nodes.""" | |
2621 | with repo.lock(): |
|
2619 | with repo.lock(): | |
2622 | # do not let filtering get in the way of the cleanse |
|
2620 | # do not let filtering get in the way of the cleanse | |
2623 | # we should probably get rid of obsolescence marker created during the |
|
2621 | # we should probably get rid of obsolescence marker created during the | |
2624 | # histedit, but we currently do not have such information. |
|
2622 | # histedit, but we currently do not have such information. | |
2625 | repo = repo.unfiltered() |
|
2623 | repo = repo.unfiltered() | |
2626 | # Find all nodes that need to be stripped |
|
2624 | # Find all nodes that need to be stripped | |
2627 | # (we use %lr instead of %ln to silently ignore unknown items) |
|
2625 | # (we use %lr instead of %ln to silently ignore unknown items) | |
2628 | has_node = repo.changelog.index.has_node |
|
2626 | has_node = repo.changelog.index.has_node | |
2629 | nodes = sorted(n for n in nodes if has_node(n)) |
|
2627 | nodes = sorted(n for n in nodes if has_node(n)) | |
2630 | roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)] |
|
2628 | roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)] | |
2631 | if roots: |
|
2629 | if roots: | |
2632 | backup = not nobackup |
|
2630 | backup = not nobackup | |
2633 | repair.strip(ui, repo, roots, backup=backup) |
|
2631 | repair.strip(ui, repo, roots, backup=backup) | |
2634 |
|
2632 | |||
2635 |
|
2633 | |||
2636 | def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs): |
|
2634 | def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs): | |
2637 | if isinstance(nodelist, bytes): |
|
2635 | if isinstance(nodelist, bytes): | |
2638 | nodelist = [nodelist] |
|
2636 | nodelist = [nodelist] | |
2639 | state = histeditstate(repo) |
|
2637 | state = histeditstate(repo) | |
2640 | if state.inprogress(): |
|
2638 | if state.inprogress(): | |
2641 | state.read() |
|
2639 | state.read() | |
2642 | histedit_nodes = { |
|
2640 | histedit_nodes = { | |
2643 | action.node for action in state.actions if action.node |
|
2641 | action.node for action in state.actions if action.node | |
2644 | } |
|
2642 | } | |
2645 | common_nodes = histedit_nodes & set(nodelist) |
|
2643 | common_nodes = histedit_nodes & set(nodelist) | |
2646 | if common_nodes: |
|
2644 | if common_nodes: | |
2647 | raise error.Abort( |
|
2645 | raise error.Abort( | |
2648 | _(b"histedit in progress, can't strip %s") |
|
2646 | _(b"histedit in progress, can't strip %s") | |
2649 | % b', '.join(short(x) for x in common_nodes) |
|
2647 | % b', '.join(short(x) for x in common_nodes) | |
2650 | ) |
|
2648 | ) | |
2651 | return orig(ui, repo, nodelist, *args, **kwargs) |
|
2649 | return orig(ui, repo, nodelist, *args, **kwargs) | |
2652 |
|
2650 | |||
2653 |
|
2651 | |||
2654 | extensions.wrapfunction(repair, 'strip', stripwrapper) |
|
2652 | extensions.wrapfunction(repair, 'strip', stripwrapper) | |
2655 |
|
2653 | |||
2656 |
|
2654 | |||
2657 | def summaryhook(ui, repo): |
|
2655 | def summaryhook(ui, repo): | |
2658 | state = histeditstate(repo) |
|
2656 | state = histeditstate(repo) | |
2659 | if not state.inprogress(): |
|
2657 | if not state.inprogress(): | |
2660 | return |
|
2658 | return | |
2661 | state.read() |
|
2659 | state.read() | |
2662 | if state.actions: |
|
2660 | if state.actions: | |
2663 | # i18n: column positioning for "hg summary" |
|
2661 | # i18n: column positioning for "hg summary" | |
2664 | ui.write( |
|
2662 | ui.write( | |
2665 | _(b'hist: %s (histedit --continue)\n') |
|
2663 | _(b'hist: %s (histedit --continue)\n') | |
2666 | % ( |
|
2664 | % ( | |
2667 | ui.label(_(b'%d remaining'), b'histedit.remaining') |
|
2665 | ui.label(_(b'%d remaining'), b'histedit.remaining') | |
2668 | % len(state.actions) |
|
2666 | % len(state.actions) | |
2669 | ) |
|
2667 | ) | |
2670 | ) |
|
2668 | ) | |
2671 |
|
2669 | |||
2672 |
|
2670 | |||
2673 | def extsetup(ui): |
|
2671 | def extsetup(ui): | |
2674 | cmdutil.summaryhooks.add(b'histedit', summaryhook) |
|
2672 | cmdutil.summaryhooks.add(b'histedit', summaryhook) | |
2675 | statemod.addunfinished( |
|
2673 | statemod.addunfinished( | |
2676 | b'histedit', |
|
2674 | b'histedit', | |
2677 | fname=b'histedit-state', |
|
2675 | fname=b'histedit-state', | |
2678 | allowcommit=True, |
|
2676 | allowcommit=True, | |
2679 | continueflag=True, |
|
2677 | continueflag=True, | |
2680 | abortfunc=hgaborthistedit, |
|
2678 | abortfunc=hgaborthistedit, | |
2681 | ) |
|
2679 | ) |
@@ -1,668 +1,670 b'' | |||||
1 | # nodemap.py - nodemap related code and utilities |
|
1 | # nodemap.py - nodemap related code and utilities | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net> |
|
3 | # Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net> | |
4 | # Copyright 2019 George Racinet <georges.racinet@octobus.net> |
|
4 | # Copyright 2019 George Racinet <georges.racinet@octobus.net> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 |
|
9 | |||
10 | import re |
|
10 | import re | |
11 | import struct |
|
11 | import struct | |
12 |
|
12 | |||
13 | from ..node import hex |
|
13 | from ..node import hex | |
14 |
|
14 | |||
15 | from .. import ( |
|
15 | from .. import ( | |
16 | error, |
|
16 | error, | |
17 | requirements, |
|
17 | requirements, | |
18 | util, |
|
18 | util, | |
19 | ) |
|
19 | ) | |
20 | from . import docket as docket_mod |
|
20 | from . import docket as docket_mod | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | class NodeMap(dict): |
|
23 | class NodeMap(dict): | |
24 | def __missing__(self, x): |
|
24 | def __missing__(self, x): | |
25 | raise error.RevlogError(b'unknown node: %s' % x) |
|
25 | raise error.RevlogError(b'unknown node: %s' % x) | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | def test_race_hook_1(): |
|
28 | def test_race_hook_1(): | |
29 | """hook point for test |
|
29 | """hook point for test | |
30 |
|
30 | |||
31 | This let tests to have things happens between the docket reading and the |
|
31 | This let tests to have things happens between the docket reading and the | |
32 | data reading""" |
|
32 | data reading""" | |
33 | pass |
|
33 | pass | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | def post_stream_cleanup(repo): |
|
36 | def post_stream_cleanup(repo): | |
37 | """The stream clone might needs to remove some file if persisten nodemap |
|
37 | """The stream clone might needs to remove some file if persisten nodemap | |
38 | was dropped while stream cloning |
|
38 | was dropped while stream cloning | |
39 | """ |
|
39 | """ | |
40 | if requirements.REVLOGV1_REQUIREMENT not in repo.requirements: |
|
40 | if requirements.REVLOGV1_REQUIREMENT not in repo.requirements: | |
41 | return |
|
41 | return | |
42 | if requirements.NODEMAP_REQUIREMENT in repo.requirements: |
|
42 | if requirements.NODEMAP_REQUIREMENT in repo.requirements: | |
43 | return |
|
43 | return | |
44 | unfi = repo.unfiltered() |
|
44 | unfi = repo.unfiltered() | |
45 | delete_nodemap(None, unfi, unfi.changelog) |
|
45 | delete_nodemap(None, unfi, unfi.changelog) | |
46 | delete_nodemap(None, repo, unfi.manifestlog._rootstore._revlog) |
|
46 | delete_nodemap(None, repo, unfi.manifestlog._rootstore._revlog) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | def persisted_data(revlog): |
|
49 | def persisted_data(revlog): | |
50 | """read the nodemap for a revlog from disk""" |
|
50 | """read the nodemap for a revlog from disk""" | |
51 | if revlog._nodemap_file is None: |
|
51 | if revlog._nodemap_file is None: | |
52 | return None |
|
52 | return None | |
53 | pdata = revlog.opener.tryread(revlog._nodemap_file) |
|
53 | pdata = revlog.opener.tryread(revlog._nodemap_file) | |
54 | if not pdata: |
|
54 | if not pdata: | |
55 | return None |
|
55 | return None | |
56 | offset = 0 |
|
56 | offset = 0 | |
57 | (version,) = S_VERSION.unpack(pdata[offset : offset + S_VERSION.size]) |
|
57 | (version,) = S_VERSION.unpack(pdata[offset : offset + S_VERSION.size]) | |
58 | if version != ONDISK_VERSION: |
|
58 | if version != ONDISK_VERSION: | |
59 | return None |
|
59 | return None | |
60 | offset += S_VERSION.size |
|
60 | offset += S_VERSION.size | |
61 | headers = S_HEADER.unpack(pdata[offset : offset + S_HEADER.size]) |
|
61 | headers = S_HEADER.unpack(pdata[offset : offset + S_HEADER.size]) | |
62 | uid_size, tip_rev, data_length, data_unused, tip_node_size = headers |
|
62 | uid_size, tip_rev, data_length, data_unused, tip_node_size = headers | |
63 | offset += S_HEADER.size |
|
63 | offset += S_HEADER.size | |
64 | docket = NodeMapDocket(pdata[offset : offset + uid_size]) |
|
64 | docket = NodeMapDocket(pdata[offset : offset + uid_size]) | |
65 | offset += uid_size |
|
65 | offset += uid_size | |
66 | docket.tip_rev = tip_rev |
|
66 | docket.tip_rev = tip_rev | |
67 | docket.tip_node = pdata[offset : offset + tip_node_size] |
|
67 | docket.tip_node = pdata[offset : offset + tip_node_size] | |
68 | docket.data_length = data_length |
|
68 | docket.data_length = data_length | |
69 | docket.data_unused = data_unused |
|
69 | docket.data_unused = data_unused | |
70 |
|
70 | |||
71 | filename = _rawdata_filepath(revlog, docket) |
|
71 | filename = _rawdata_filepath(revlog, docket) | |
72 | use_mmap = revlog.opener.options.get(b"persistent-nodemap.mmap") |
|
72 | use_mmap = revlog.opener.options.get(b"persistent-nodemap.mmap") | |
73 |
|
73 | |||
74 | test_race_hook_1() |
|
74 | test_race_hook_1() | |
75 | try: |
|
75 | try: | |
76 | with revlog.opener(filename) as fd: |
|
76 | with revlog.opener(filename) as fd: | |
77 | if use_mmap: |
|
77 | if use_mmap: | |
78 | try: |
|
78 | try: | |
79 | data = util.buffer(util.mmapread(fd, data_length)) |
|
79 | data = util.buffer(util.mmapread(fd, data_length)) | |
80 | except ValueError: |
|
80 | except ValueError: | |
81 | # raised when the read file is too small |
|
81 | # raised when the read file is too small | |
82 | data = b'' |
|
82 | data = b'' | |
83 | else: |
|
83 | else: | |
84 | data = fd.read(data_length) |
|
84 | data = fd.read(data_length) | |
85 | except FileNotFoundError: |
|
85 | except FileNotFoundError: | |
86 | return None |
|
86 | return None | |
87 | if len(data) < data_length: |
|
87 | if len(data) < data_length: | |
88 | return None |
|
88 | return None | |
89 | return docket, data |
|
89 | return docket, data | |
90 |
|
90 | |||
91 |
|
91 | |||
92 | def setup_persistent_nodemap(tr, revlog): |
|
92 | def setup_persistent_nodemap(tr, revlog): | |
93 | """Install whatever is needed transaction side to persist a nodemap on disk |
|
93 | """Install whatever is needed transaction side to persist a nodemap on disk | |
94 |
|
94 | |||
95 | (only actually persist the nodemap if this is relevant for this revlog) |
|
95 | (only actually persist the nodemap if this is relevant for this revlog) | |
96 | """ |
|
96 | """ | |
97 | if revlog._inline: |
|
97 | if revlog._inline: | |
98 | return # inlined revlog are too small for this to be relevant |
|
98 | return # inlined revlog are too small for this to be relevant | |
99 | if revlog._nodemap_file is None: |
|
99 | if revlog._nodemap_file is None: | |
100 | return # we do not use persistent_nodemap on this revlog |
|
100 | return # we do not use persistent_nodemap on this revlog | |
101 |
|
101 | |||
102 | # we need to happen after the changelog finalization, in that use "cl-" |
|
102 | # we need to happen after the changelog finalization, in that use "cl-" | |
103 | callback_id = b"nm-revlog-persistent-nodemap-%s" % revlog._nodemap_file |
|
103 | callback_id = b"nm-revlog-persistent-nodemap-%s" % revlog._nodemap_file | |
104 | if tr.hasfinalize(callback_id): |
|
104 | if tr.hasfinalize(callback_id): | |
105 | return # no need to register again |
|
105 | return # no need to register again | |
106 | tr.addpending( |
|
106 | tr.addpending( | |
107 | callback_id, lambda tr: persist_nodemap(tr, revlog, pending=True) |
|
107 | callback_id, lambda tr: persist_nodemap(tr, revlog, pending=True) | |
108 | ) |
|
108 | ) | |
109 | tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog)) |
|
109 | tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog)) | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | class _NoTransaction: |
|
112 | class _NoTransaction: | |
113 | """transaction like object to update the nodemap outside a transaction""" |
|
113 | """transaction like object to update the nodemap outside a transaction""" | |
114 |
|
114 | |||
115 | def __init__(self): |
|
115 | def __init__(self): | |
116 | self._postclose = {} |
|
116 | self._postclose = {} | |
117 |
|
117 | |||
118 | def addpostclose(self, callback_id, callback_func): |
|
118 | def addpostclose(self, callback_id, callback_func): | |
119 | self._postclose[callback_id] = callback_func |
|
119 | self._postclose[callback_id] = callback_func | |
120 |
|
120 | |||
121 | def registertmp(self, *args, **kwargs): |
|
121 | def registertmp(self, *args, **kwargs): | |
122 | pass |
|
122 | pass | |
123 |
|
123 | |||
124 | def addbackup(self, *args, **kwargs): |
|
124 | def addbackup(self, *args, **kwargs): | |
125 | pass |
|
125 | pass | |
126 |
|
126 | |||
127 | def add(self, *args, **kwargs): |
|
127 | def add(self, *args, **kwargs): | |
128 | pass |
|
128 | pass | |
129 |
|
129 | |||
130 | def addabort(self, *args, **kwargs): |
|
130 | def addabort(self, *args, **kwargs): | |
131 | pass |
|
131 | pass | |
132 |
|
132 | |||
133 | def _report(self, *args): |
|
133 | def _report(self, *args): | |
134 | pass |
|
134 | pass | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | def update_persistent_nodemap(revlog): |
|
137 | def update_persistent_nodemap(revlog): | |
138 | """update the persistent nodemap right now |
|
138 | """update the persistent nodemap right now | |
139 |
|
139 | |||
140 | To be used for updating the nodemap on disk outside of a normal transaction |
|
140 | To be used for updating the nodemap on disk outside of a normal transaction | |
141 | setup (eg, `debugupdatecache`). |
|
141 | setup (eg, `debugupdatecache`). | |
142 | """ |
|
142 | """ | |
143 | if revlog._inline: |
|
143 | if revlog._inline: | |
144 | return # inlined revlog are too small for this to be relevant |
|
144 | return # inlined revlog are too small for this to be relevant | |
145 | if revlog._nodemap_file is None: |
|
145 | if revlog._nodemap_file is None: | |
146 | return # we do not use persistent_nodemap on this revlog |
|
146 | return # we do not use persistent_nodemap on this revlog | |
147 |
|
147 | |||
148 | notr = _NoTransaction() |
|
148 | notr = _NoTransaction() | |
149 | persist_nodemap(notr, revlog) |
|
149 | persist_nodemap(notr, revlog) | |
150 | for k in sorted(notr._postclose): |
|
150 | for k in sorted(notr._postclose): | |
151 | notr._postclose[k](None) |
|
151 | notr._postclose[k](None) | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | def delete_nodemap(tr, repo, revlog): |
|
154 | def delete_nodemap(tr, repo, revlog): | |
155 | """Delete nodemap data on disk for a given revlog""" |
|
155 | """Delete nodemap data on disk for a given revlog""" | |
156 | prefix = revlog.radix |
|
156 | prefix = revlog.radix | |
157 | pattern = re.compile(br"(^|/)%s(-[0-9a-f]+\.nd|\.n(\.a)?)$" % prefix) |
|
157 | pattern = re.compile(br"(^|/)%s(-[0-9a-f]+\.nd|\.n(\.a)?)$" % prefix) | |
158 | dirpath = revlog.opener.dirname(revlog._indexfile) |
|
158 | dirpath = revlog.opener.dirname(revlog._indexfile) | |
159 | for f in revlog.opener.listdir(dirpath): |
|
159 | for f in revlog.opener.listdir(dirpath): | |
160 | if pattern.match(f): |
|
160 | if pattern.match(f): | |
161 | repo.svfs.tryunlink(f) |
|
161 | repo.svfs.tryunlink(f) | |
162 |
|
162 | |||
163 |
|
163 | |||
164 | def persist_nodemap(tr, revlog, pending=False, force=False): |
|
164 | def persist_nodemap(tr, revlog, pending=False, force=False): | |
165 | """Write nodemap data on disk for a given revlog""" |
|
165 | """Write nodemap data on disk for a given revlog""" | |
|
166 | if len(revlog.index) <= 0: | |||
|
167 | return | |||
166 | if getattr(revlog, 'filteredrevs', ()): |
|
168 | if getattr(revlog, 'filteredrevs', ()): | |
167 | raise error.ProgrammingError( |
|
169 | raise error.ProgrammingError( | |
168 | "cannot persist nodemap of a filtered changelog" |
|
170 | "cannot persist nodemap of a filtered changelog" | |
169 | ) |
|
171 | ) | |
170 | if revlog._nodemap_file is None: |
|
172 | if revlog._nodemap_file is None: | |
171 | if force: |
|
173 | if force: | |
172 | revlog._nodemap_file = get_nodemap_file(revlog) |
|
174 | revlog._nodemap_file = get_nodemap_file(revlog) | |
173 | else: |
|
175 | else: | |
174 | msg = "calling persist nodemap on a revlog without the feature enabled" |
|
176 | msg = "calling persist nodemap on a revlog without the feature enabled" | |
175 | raise error.ProgrammingError(msg) |
|
177 | raise error.ProgrammingError(msg) | |
176 |
|
178 | |||
177 | can_incremental = hasattr(revlog.index, "nodemap_data_incremental") |
|
179 | can_incremental = hasattr(revlog.index, "nodemap_data_incremental") | |
178 | ondisk_docket = revlog._nodemap_docket |
|
180 | ondisk_docket = revlog._nodemap_docket | |
179 | feed_data = hasattr(revlog.index, "update_nodemap_data") |
|
181 | feed_data = hasattr(revlog.index, "update_nodemap_data") | |
180 | use_mmap = revlog.opener.options.get(b"persistent-nodemap.mmap") |
|
182 | use_mmap = revlog.opener.options.get(b"persistent-nodemap.mmap") | |
181 |
|
183 | |||
182 | data = None |
|
184 | data = None | |
183 | # first attemp an incremental update of the data |
|
185 | # first attemp an incremental update of the data | |
184 | if can_incremental and ondisk_docket is not None: |
|
186 | if can_incremental and ondisk_docket is not None: | |
185 | target_docket = revlog._nodemap_docket.copy() |
|
187 | target_docket = revlog._nodemap_docket.copy() | |
186 | ( |
|
188 | ( | |
187 | src_docket, |
|
189 | src_docket, | |
188 | data_changed_count, |
|
190 | data_changed_count, | |
189 | data, |
|
191 | data, | |
190 | ) = revlog.index.nodemap_data_incremental() |
|
192 | ) = revlog.index.nodemap_data_incremental() | |
191 | new_length = target_docket.data_length + len(data) |
|
193 | new_length = target_docket.data_length + len(data) | |
192 | new_unused = target_docket.data_unused + data_changed_count |
|
194 | new_unused = target_docket.data_unused + data_changed_count | |
193 | if src_docket != target_docket: |
|
195 | if src_docket != target_docket: | |
194 | data = None |
|
196 | data = None | |
195 | elif new_length <= (new_unused * 10): # under 10% of unused data |
|
197 | elif new_length <= (new_unused * 10): # under 10% of unused data | |
196 | data = None |
|
198 | data = None | |
197 | else: |
|
199 | else: | |
198 | datafile = _rawdata_filepath(revlog, target_docket) |
|
200 | datafile = _rawdata_filepath(revlog, target_docket) | |
199 | # EXP-TODO: if this is a cache, this should use a cache vfs, not a |
|
201 | # EXP-TODO: if this is a cache, this should use a cache vfs, not a | |
200 | # store vfs |
|
202 | # store vfs | |
201 | tr.add(datafile, target_docket.data_length) |
|
203 | tr.add(datafile, target_docket.data_length) | |
202 | with revlog.opener(datafile, b'r+') as fd: |
|
204 | with revlog.opener(datafile, b'r+') as fd: | |
203 | fd.seek(target_docket.data_length) |
|
205 | fd.seek(target_docket.data_length) | |
204 | fd.write(data) |
|
206 | fd.write(data) | |
205 | if feed_data: |
|
207 | if feed_data: | |
206 | if use_mmap: |
|
208 | if use_mmap: | |
207 | fd.seek(0) |
|
209 | fd.seek(0) | |
208 | new_data = fd.read(new_length) |
|
210 | new_data = fd.read(new_length) | |
209 | else: |
|
211 | else: | |
210 | fd.flush() |
|
212 | fd.flush() | |
211 | new_data = util.buffer(util.mmapread(fd, new_length)) |
|
213 | new_data = util.buffer(util.mmapread(fd, new_length)) | |
212 | target_docket.data_length = new_length |
|
214 | target_docket.data_length = new_length | |
213 | target_docket.data_unused = new_unused |
|
215 | target_docket.data_unused = new_unused | |
214 |
|
216 | |||
215 | if data is None: |
|
217 | if data is None: | |
216 | # otherwise fallback to a full new export |
|
218 | # otherwise fallback to a full new export | |
217 | target_docket = NodeMapDocket() |
|
219 | target_docket = NodeMapDocket() | |
218 | datafile = _rawdata_filepath(revlog, target_docket) |
|
220 | datafile = _rawdata_filepath(revlog, target_docket) | |
219 | if hasattr(revlog.index, "nodemap_data_all"): |
|
221 | if hasattr(revlog.index, "nodemap_data_all"): | |
220 | data = revlog.index.nodemap_data_all() |
|
222 | data = revlog.index.nodemap_data_all() | |
221 | else: |
|
223 | else: | |
222 | data = persistent_data(revlog.index) |
|
224 | data = persistent_data(revlog.index) | |
223 | # EXP-TODO: if this is a cache, this should use a cache vfs, not a |
|
225 | # EXP-TODO: if this is a cache, this should use a cache vfs, not a | |
224 | # store vfs |
|
226 | # store vfs | |
225 |
|
227 | |||
226 | tryunlink = revlog.opener.tryunlink |
|
228 | tryunlink = revlog.opener.tryunlink | |
227 |
|
229 | |||
228 | def abortck(tr): |
|
230 | def abortck(tr): | |
229 | tryunlink(datafile) |
|
231 | tryunlink(datafile) | |
230 |
|
232 | |||
231 | callback_id = b"delete-%s" % datafile |
|
233 | callback_id = b"delete-%s" % datafile | |
232 |
|
234 | |||
233 | # some flavor of the transaction abort does not cleanup new file, it |
|
235 | # some flavor of the transaction abort does not cleanup new file, it | |
234 | # simply empty them. |
|
236 | # simply empty them. | |
235 | tr.addabort(callback_id, abortck) |
|
237 | tr.addabort(callback_id, abortck) | |
236 | with revlog.opener(datafile, b'w+') as fd: |
|
238 | with revlog.opener(datafile, b'w+') as fd: | |
237 | fd.write(data) |
|
239 | fd.write(data) | |
238 | if feed_data: |
|
240 | if feed_data: | |
239 | if use_mmap: |
|
241 | if use_mmap: | |
240 | new_data = data |
|
242 | new_data = data | |
241 | else: |
|
243 | else: | |
242 | fd.flush() |
|
244 | fd.flush() | |
243 | new_data = util.buffer(util.mmapread(fd, len(data))) |
|
245 | new_data = util.buffer(util.mmapread(fd, len(data))) | |
244 | target_docket.data_length = len(data) |
|
246 | target_docket.data_length = len(data) | |
245 | target_docket.tip_rev = revlog.tiprev() |
|
247 | target_docket.tip_rev = revlog.tiprev() | |
246 | target_docket.tip_node = revlog.node(target_docket.tip_rev) |
|
248 | target_docket.tip_node = revlog.node(target_docket.tip_rev) | |
247 | # EXP-TODO: if this is a cache, this should use a cache vfs, not a |
|
249 | # EXP-TODO: if this is a cache, this should use a cache vfs, not a | |
248 | # store vfs |
|
250 | # store vfs | |
249 | file_path = revlog._nodemap_file |
|
251 | file_path = revlog._nodemap_file | |
250 | if pending: |
|
252 | if pending: | |
251 | file_path += b'.a' |
|
253 | file_path += b'.a' | |
252 | tr.registertmp(file_path) |
|
254 | tr.registertmp(file_path) | |
253 | else: |
|
255 | else: | |
254 | tr.addbackup(file_path) |
|
256 | tr.addbackup(file_path) | |
255 |
|
257 | |||
256 | with revlog.opener(file_path, b'w', atomictemp=True) as fp: |
|
258 | with revlog.opener(file_path, b'w', atomictemp=True) as fp: | |
257 | fp.write(target_docket.serialize()) |
|
259 | fp.write(target_docket.serialize()) | |
258 | revlog._nodemap_docket = target_docket |
|
260 | revlog._nodemap_docket = target_docket | |
259 | if feed_data: |
|
261 | if feed_data: | |
260 | revlog.index.update_nodemap_data(target_docket, new_data) |
|
262 | revlog.index.update_nodemap_data(target_docket, new_data) | |
261 |
|
263 | |||
262 | # search for old index file in all cases, some older process might have |
|
264 | # search for old index file in all cases, some older process might have | |
263 | # left one behind. |
|
265 | # left one behind. | |
264 | olds = _other_rawdata_filepath(revlog, target_docket) |
|
266 | olds = _other_rawdata_filepath(revlog, target_docket) | |
265 | if olds: |
|
267 | if olds: | |
266 | realvfs = getattr(revlog, '_realopener', revlog.opener) |
|
268 | realvfs = getattr(revlog, '_realopener', revlog.opener) | |
267 |
|
269 | |||
268 | def cleanup(tr): |
|
270 | def cleanup(tr): | |
269 | for oldfile in olds: |
|
271 | for oldfile in olds: | |
270 | realvfs.tryunlink(oldfile) |
|
272 | realvfs.tryunlink(oldfile) | |
271 |
|
273 | |||
272 | callback_id = b"revlog-cleanup-nodemap-%s" % revlog._nodemap_file |
|
274 | callback_id = b"revlog-cleanup-nodemap-%s" % revlog._nodemap_file | |
273 | tr.addpostclose(callback_id, cleanup) |
|
275 | tr.addpostclose(callback_id, cleanup) | |
274 |
|
276 | |||
275 |
|
277 | |||
276 | ### Nodemap docket file |
|
278 | ### Nodemap docket file | |
277 | # |
|
279 | # | |
278 | # The nodemap data are stored on disk using 2 files: |
|
280 | # The nodemap data are stored on disk using 2 files: | |
279 | # |
|
281 | # | |
280 | # * a raw data files containing a persistent nodemap |
|
282 | # * a raw data files containing a persistent nodemap | |
281 | # (see `Nodemap Trie` section) |
|
283 | # (see `Nodemap Trie` section) | |
282 | # |
|
284 | # | |
283 | # * a small "docket" file containing medatadata |
|
285 | # * a small "docket" file containing medatadata | |
284 | # |
|
286 | # | |
285 | # While the nodemap data can be multiple tens of megabytes, the "docket" is |
|
287 | # While the nodemap data can be multiple tens of megabytes, the "docket" is | |
286 | # small, it is easy to update it automatically or to duplicated its content |
|
288 | # small, it is easy to update it automatically or to duplicated its content | |
287 | # during a transaction. |
|
289 | # during a transaction. | |
288 | # |
|
290 | # | |
289 | # Multiple raw data can exist at the same time (The currently valid one and a |
|
291 | # Multiple raw data can exist at the same time (The currently valid one and a | |
290 | # new one beind used by an in progress transaction). To accomodate this, the |
|
292 | # new one beind used by an in progress transaction). To accomodate this, the | |
291 | # filename hosting the raw data has a variable parts. The exact filename is |
|
293 | # filename hosting the raw data has a variable parts. The exact filename is | |
292 | # specified inside the "docket" file. |
|
294 | # specified inside the "docket" file. | |
293 | # |
|
295 | # | |
294 | # The docket file contains information to find, qualify and validate the raw |
|
296 | # The docket file contains information to find, qualify and validate the raw | |
295 | # data. Its content is currently very light, but it will expand as the on disk |
|
297 | # data. Its content is currently very light, but it will expand as the on disk | |
296 | # nodemap gains the necessary features to be used in production. |
|
298 | # nodemap gains the necessary features to be used in production. | |
297 |
|
299 | |||
298 | ONDISK_VERSION = 1 |
|
300 | ONDISK_VERSION = 1 | |
299 | S_VERSION = struct.Struct(">B") |
|
301 | S_VERSION = struct.Struct(">B") | |
300 | S_HEADER = struct.Struct(">BQQQQ") |
|
302 | S_HEADER = struct.Struct(">BQQQQ") | |
301 |
|
303 | |||
302 |
|
304 | |||
303 | class NodeMapDocket: |
|
305 | class NodeMapDocket: | |
304 | """metadata associated with persistent nodemap data |
|
306 | """metadata associated with persistent nodemap data | |
305 |
|
307 | |||
306 | The persistent data may come from disk or be on their way to disk. |
|
308 | The persistent data may come from disk or be on their way to disk. | |
307 | """ |
|
309 | """ | |
308 |
|
310 | |||
309 | def __init__(self, uid=None): |
|
311 | def __init__(self, uid=None): | |
310 | if uid is None: |
|
312 | if uid is None: | |
311 | uid = docket_mod.make_uid() |
|
313 | uid = docket_mod.make_uid() | |
312 | # a unique identifier for the data file: |
|
314 | # a unique identifier for the data file: | |
313 | # - When new data are appended, it is preserved. |
|
315 | # - When new data are appended, it is preserved. | |
314 | # - When a new data file is created, a new identifier is generated. |
|
316 | # - When a new data file is created, a new identifier is generated. | |
315 | self.uid = uid |
|
317 | self.uid = uid | |
316 | # the tipmost revision stored in the data file. This revision and all |
|
318 | # the tipmost revision stored in the data file. This revision and all | |
317 | # revision before it are expected to be encoded in the data file. |
|
319 | # revision before it are expected to be encoded in the data file. | |
318 | self.tip_rev = None |
|
320 | self.tip_rev = None | |
319 | # the node of that tipmost revision, if it mismatch the current index |
|
321 | # the node of that tipmost revision, if it mismatch the current index | |
320 | # data the docket is not valid for the current index and should be |
|
322 | # data the docket is not valid for the current index and should be | |
321 | # discarded. |
|
323 | # discarded. | |
322 | # |
|
324 | # | |
323 | # note: this method is not perfect as some destructive operation could |
|
325 | # note: this method is not perfect as some destructive operation could | |
324 | # preserve the same tip_rev + tip_node while altering lower revision. |
|
326 | # preserve the same tip_rev + tip_node while altering lower revision. | |
325 | # However this multiple other caches have the same vulnerability (eg: |
|
327 | # However this multiple other caches have the same vulnerability (eg: | |
326 | # brancmap cache). |
|
328 | # brancmap cache). | |
327 | self.tip_node = None |
|
329 | self.tip_node = None | |
328 | # the size (in bytes) of the persisted data to encode the nodemap valid |
|
330 | # the size (in bytes) of the persisted data to encode the nodemap valid | |
329 | # for `tip_rev`. |
|
331 | # for `tip_rev`. | |
330 | # - data file shorter than this are corrupted, |
|
332 | # - data file shorter than this are corrupted, | |
331 | # - any extra data should be ignored. |
|
333 | # - any extra data should be ignored. | |
332 | self.data_length = None |
|
334 | self.data_length = None | |
333 | # the amount (in bytes) of "dead" data, still in the data file but no |
|
335 | # the amount (in bytes) of "dead" data, still in the data file but no | |
334 | # longer used for the nodemap. |
|
336 | # longer used for the nodemap. | |
335 | self.data_unused = 0 |
|
337 | self.data_unused = 0 | |
336 |
|
338 | |||
337 | def copy(self): |
|
339 | def copy(self): | |
338 | new = NodeMapDocket(uid=self.uid) |
|
340 | new = NodeMapDocket(uid=self.uid) | |
339 | new.tip_rev = self.tip_rev |
|
341 | new.tip_rev = self.tip_rev | |
340 | new.tip_node = self.tip_node |
|
342 | new.tip_node = self.tip_node | |
341 | new.data_length = self.data_length |
|
343 | new.data_length = self.data_length | |
342 | new.data_unused = self.data_unused |
|
344 | new.data_unused = self.data_unused | |
343 | return new |
|
345 | return new | |
344 |
|
346 | |||
345 | def __cmp__(self, other): |
|
347 | def __cmp__(self, other): | |
346 | if self.uid < other.uid: |
|
348 | if self.uid < other.uid: | |
347 | return -1 |
|
349 | return -1 | |
348 | if self.uid > other.uid: |
|
350 | if self.uid > other.uid: | |
349 | return 1 |
|
351 | return 1 | |
350 | elif self.data_length < other.data_length: |
|
352 | elif self.data_length < other.data_length: | |
351 | return -1 |
|
353 | return -1 | |
352 | elif self.data_length > other.data_length: |
|
354 | elif self.data_length > other.data_length: | |
353 | return 1 |
|
355 | return 1 | |
354 | return 0 |
|
356 | return 0 | |
355 |
|
357 | |||
356 | def __eq__(self, other): |
|
358 | def __eq__(self, other): | |
357 | return self.uid == other.uid and self.data_length == other.data_length |
|
359 | return self.uid == other.uid and self.data_length == other.data_length | |
358 |
|
360 | |||
359 | def serialize(self): |
|
361 | def serialize(self): | |
360 | """return serialized bytes for a docket using the passed uid""" |
|
362 | """return serialized bytes for a docket using the passed uid""" | |
361 | data = [] |
|
363 | data = [] | |
362 | data.append(S_VERSION.pack(ONDISK_VERSION)) |
|
364 | data.append(S_VERSION.pack(ONDISK_VERSION)) | |
363 | headers = ( |
|
365 | headers = ( | |
364 | len(self.uid), |
|
366 | len(self.uid), | |
365 | self.tip_rev, |
|
367 | self.tip_rev, | |
366 | self.data_length, |
|
368 | self.data_length, | |
367 | self.data_unused, |
|
369 | self.data_unused, | |
368 | len(self.tip_node), |
|
370 | len(self.tip_node), | |
369 | ) |
|
371 | ) | |
370 | data.append(S_HEADER.pack(*headers)) |
|
372 | data.append(S_HEADER.pack(*headers)) | |
371 | data.append(self.uid) |
|
373 | data.append(self.uid) | |
372 | data.append(self.tip_node) |
|
374 | data.append(self.tip_node) | |
373 | return b''.join(data) |
|
375 | return b''.join(data) | |
374 |
|
376 | |||
375 |
|
377 | |||
376 | def _rawdata_filepath(revlog, docket): |
|
378 | def _rawdata_filepath(revlog, docket): | |
377 | """The (vfs relative) nodemap's rawdata file for a given uid""" |
|
379 | """The (vfs relative) nodemap's rawdata file for a given uid""" | |
378 | prefix = revlog.radix |
|
380 | prefix = revlog.radix | |
379 | return b"%s-%s.nd" % (prefix, docket.uid) |
|
381 | return b"%s-%s.nd" % (prefix, docket.uid) | |
380 |
|
382 | |||
381 |
|
383 | |||
382 | def _other_rawdata_filepath(revlog, docket): |
|
384 | def _other_rawdata_filepath(revlog, docket): | |
383 | prefix = revlog.radix |
|
385 | prefix = revlog.radix | |
384 | pattern = re.compile(br"(^|/)%s-[0-9a-f]+\.nd$" % prefix) |
|
386 | pattern = re.compile(br"(^|/)%s-[0-9a-f]+\.nd$" % prefix) | |
385 | new_file_path = _rawdata_filepath(revlog, docket) |
|
387 | new_file_path = _rawdata_filepath(revlog, docket) | |
386 | new_file_name = revlog.opener.basename(new_file_path) |
|
388 | new_file_name = revlog.opener.basename(new_file_path) | |
387 | dirpath = revlog.opener.dirname(new_file_path) |
|
389 | dirpath = revlog.opener.dirname(new_file_path) | |
388 | others = [] |
|
390 | others = [] | |
389 | for f in revlog.opener.listdir(dirpath): |
|
391 | for f in revlog.opener.listdir(dirpath): | |
390 | if pattern.match(f) and f != new_file_name: |
|
392 | if pattern.match(f) and f != new_file_name: | |
391 | others.append(f) |
|
393 | others.append(f) | |
392 | return others |
|
394 | return others | |
393 |
|
395 | |||
394 |
|
396 | |||
395 | ### Nodemap Trie |
|
397 | ### Nodemap Trie | |
396 | # |
|
398 | # | |
397 | # This is a simple reference implementation to compute and persist a nodemap |
|
399 | # This is a simple reference implementation to compute and persist a nodemap | |
398 | # trie. This reference implementation is write only. The python version of this |
|
400 | # trie. This reference implementation is write only. The python version of this | |
399 | # is not expected to be actually used, since it wont provide performance |
|
401 | # is not expected to be actually used, since it wont provide performance | |
400 | # improvement over existing non-persistent C implementation. |
|
402 | # improvement over existing non-persistent C implementation. | |
401 | # |
|
403 | # | |
402 | # The nodemap is persisted as Trie using 4bits-address/16-entries block. each |
|
404 | # The nodemap is persisted as Trie using 4bits-address/16-entries block. each | |
403 | # revision can be adressed using its node shortest prefix. |
|
405 | # revision can be adressed using its node shortest prefix. | |
404 | # |
|
406 | # | |
405 | # The trie is stored as a sequence of block. Each block contains 16 entries |
|
407 | # The trie is stored as a sequence of block. Each block contains 16 entries | |
406 | # (signed 64bit integer, big endian). Each entry can be one of the following: |
|
408 | # (signed 64bit integer, big endian). Each entry can be one of the following: | |
407 | # |
|
409 | # | |
408 | # * value >= 0 -> index of sub-block |
|
410 | # * value >= 0 -> index of sub-block | |
409 | # * value == -1 -> no value |
|
411 | # * value == -1 -> no value | |
410 | # * value < -1 -> encoded revision: rev = -(value+2) |
|
412 | # * value < -1 -> encoded revision: rev = -(value+2) | |
411 | # |
|
413 | # | |
412 | # See REV_OFFSET and _transform_rev below. |
|
414 | # See REV_OFFSET and _transform_rev below. | |
413 | # |
|
415 | # | |
414 | # The implementation focus on simplicity, not on performance. A Rust |
|
416 | # The implementation focus on simplicity, not on performance. A Rust | |
415 | # implementation should provide a efficient version of the same binary |
|
417 | # implementation should provide a efficient version of the same binary | |
416 | # persistence. This reference python implementation is never meant to be |
|
418 | # persistence. This reference python implementation is never meant to be | |
417 | # extensively use in production. |
|
419 | # extensively use in production. | |
418 |
|
420 | |||
419 |
|
421 | |||
420 | def persistent_data(index): |
|
422 | def persistent_data(index): | |
421 | """return the persistent binary form for a nodemap for a given index""" |
|
423 | """return the persistent binary form for a nodemap for a given index""" | |
422 | trie = _build_trie(index) |
|
424 | trie = _build_trie(index) | |
423 | return _persist_trie(trie) |
|
425 | return _persist_trie(trie) | |
424 |
|
426 | |||
425 |
|
427 | |||
426 | def update_persistent_data(index, root, max_idx, last_rev): |
|
428 | def update_persistent_data(index, root, max_idx, last_rev): | |
427 | """return the incremental update for persistent nodemap from a given index""" |
|
429 | """return the incremental update for persistent nodemap from a given index""" | |
428 | changed_block, trie = _update_trie(index, root, last_rev) |
|
430 | changed_block, trie = _update_trie(index, root, last_rev) | |
429 | return ( |
|
431 | return ( | |
430 | changed_block * S_BLOCK.size, |
|
432 | changed_block * S_BLOCK.size, | |
431 | _persist_trie(trie, existing_idx=max_idx), |
|
433 | _persist_trie(trie, existing_idx=max_idx), | |
432 | ) |
|
434 | ) | |
433 |
|
435 | |||
434 |
|
436 | |||
435 | S_BLOCK = struct.Struct(">" + ("l" * 16)) |
|
437 | S_BLOCK = struct.Struct(">" + ("l" * 16)) | |
436 |
|
438 | |||
437 | NO_ENTRY = -1 |
|
439 | NO_ENTRY = -1 | |
438 | # rev 0 need to be -2 because 0 is used by block, -1 is a special value. |
|
440 | # rev 0 need to be -2 because 0 is used by block, -1 is a special value. | |
439 | REV_OFFSET = 2 |
|
441 | REV_OFFSET = 2 | |
440 |
|
442 | |||
441 |
|
443 | |||
442 | def _transform_rev(rev): |
|
444 | def _transform_rev(rev): | |
443 | """Return the number used to represent the rev in the tree. |
|
445 | """Return the number used to represent the rev in the tree. | |
444 |
|
446 | |||
445 | (or retrieve a rev number from such representation) |
|
447 | (or retrieve a rev number from such representation) | |
446 |
|
448 | |||
447 | Note that this is an involution, a function equal to its inverse (i.e. |
|
449 | Note that this is an involution, a function equal to its inverse (i.e. | |
448 | which gives the identity when applied to itself). |
|
450 | which gives the identity when applied to itself). | |
449 | """ |
|
451 | """ | |
450 | return -(rev + REV_OFFSET) |
|
452 | return -(rev + REV_OFFSET) | |
451 |
|
453 | |||
452 |
|
454 | |||
453 | def _to_int(hex_digit): |
|
455 | def _to_int(hex_digit): | |
454 | """turn an hexadecimal digit into a proper integer""" |
|
456 | """turn an hexadecimal digit into a proper integer""" | |
455 | return int(hex_digit, 16) |
|
457 | return int(hex_digit, 16) | |
456 |
|
458 | |||
457 |
|
459 | |||
458 | class Block(dict): |
|
460 | class Block(dict): | |
459 | """represent a block of the Trie |
|
461 | """represent a block of the Trie | |
460 |
|
462 | |||
461 | contains up to 16 entry indexed from 0 to 15""" |
|
463 | contains up to 16 entry indexed from 0 to 15""" | |
462 |
|
464 | |||
463 | def __init__(self): |
|
465 | def __init__(self): | |
464 | super(Block, self).__init__() |
|
466 | super(Block, self).__init__() | |
465 | # If this block exist on disk, here is its ID |
|
467 | # If this block exist on disk, here is its ID | |
466 | self.ondisk_id = None |
|
468 | self.ondisk_id = None | |
467 |
|
469 | |||
468 | def __iter__(self): |
|
470 | def __iter__(self): | |
469 | return iter(self.get(i) for i in range(16)) |
|
471 | return iter(self.get(i) for i in range(16)) | |
470 |
|
472 | |||
471 |
|
473 | |||
472 | def _build_trie(index): |
|
474 | def _build_trie(index): | |
473 | """build a nodemap trie |
|
475 | """build a nodemap trie | |
474 |
|
476 | |||
475 | The nodemap stores revision number for each unique prefix. |
|
477 | The nodemap stores revision number for each unique prefix. | |
476 |
|
478 | |||
477 | Each block is a dictionary with keys in `[0, 15]`. Values are either |
|
479 | Each block is a dictionary with keys in `[0, 15]`. Values are either | |
478 | another block or a revision number. |
|
480 | another block or a revision number. | |
479 | """ |
|
481 | """ | |
480 | root = Block() |
|
482 | root = Block() | |
481 | for rev in range(len(index)): |
|
483 | for rev in range(len(index)): | |
482 | current_hex = hex(index[rev][7]) |
|
484 | current_hex = hex(index[rev][7]) | |
483 | _insert_into_block(index, 0, root, rev, current_hex) |
|
485 | _insert_into_block(index, 0, root, rev, current_hex) | |
484 | return root |
|
486 | return root | |
485 |
|
487 | |||
486 |
|
488 | |||
487 | def _update_trie(index, root, last_rev): |
|
489 | def _update_trie(index, root, last_rev): | |
488 | """consume""" |
|
490 | """consume""" | |
489 | changed = 0 |
|
491 | changed = 0 | |
490 | for rev in range(last_rev + 1, len(index)): |
|
492 | for rev in range(last_rev + 1, len(index)): | |
491 | current_hex = hex(index[rev][7]) |
|
493 | current_hex = hex(index[rev][7]) | |
492 | changed += _insert_into_block(index, 0, root, rev, current_hex) |
|
494 | changed += _insert_into_block(index, 0, root, rev, current_hex) | |
493 | return changed, root |
|
495 | return changed, root | |
494 |
|
496 | |||
495 |
|
497 | |||
496 | def _insert_into_block(index, level, block, current_rev, current_hex): |
|
498 | def _insert_into_block(index, level, block, current_rev, current_hex): | |
497 | """insert a new revision in a block |
|
499 | """insert a new revision in a block | |
498 |
|
500 | |||
499 | index: the index we are adding revision for |
|
501 | index: the index we are adding revision for | |
500 | level: the depth of the current block in the trie |
|
502 | level: the depth of the current block in the trie | |
501 | block: the block currently being considered |
|
503 | block: the block currently being considered | |
502 | current_rev: the revision number we are adding |
|
504 | current_rev: the revision number we are adding | |
503 | current_hex: the hexadecimal representation of the of that revision |
|
505 | current_hex: the hexadecimal representation of the of that revision | |
504 | """ |
|
506 | """ | |
505 | changed = 1 |
|
507 | changed = 1 | |
506 | if block.ondisk_id is not None: |
|
508 | if block.ondisk_id is not None: | |
507 | block.ondisk_id = None |
|
509 | block.ondisk_id = None | |
508 | hex_digit = _to_int(current_hex[level : level + 1]) |
|
510 | hex_digit = _to_int(current_hex[level : level + 1]) | |
509 | entry = block.get(hex_digit) |
|
511 | entry = block.get(hex_digit) | |
510 | if entry is None: |
|
512 | if entry is None: | |
511 | # no entry, simply store the revision number |
|
513 | # no entry, simply store the revision number | |
512 | block[hex_digit] = current_rev |
|
514 | block[hex_digit] = current_rev | |
513 | elif isinstance(entry, dict): |
|
515 | elif isinstance(entry, dict): | |
514 | # need to recurse to an underlying block |
|
516 | # need to recurse to an underlying block | |
515 | changed += _insert_into_block( |
|
517 | changed += _insert_into_block( | |
516 | index, level + 1, entry, current_rev, current_hex |
|
518 | index, level + 1, entry, current_rev, current_hex | |
517 | ) |
|
519 | ) | |
518 | else: |
|
520 | else: | |
519 | # collision with a previously unique prefix, inserting new |
|
521 | # collision with a previously unique prefix, inserting new | |
520 | # vertices to fit both entry. |
|
522 | # vertices to fit both entry. | |
521 | other_hex = hex(index[entry][7]) |
|
523 | other_hex = hex(index[entry][7]) | |
522 | other_rev = entry |
|
524 | other_rev = entry | |
523 | new = Block() |
|
525 | new = Block() | |
524 | block[hex_digit] = new |
|
526 | block[hex_digit] = new | |
525 | _insert_into_block(index, level + 1, new, other_rev, other_hex) |
|
527 | _insert_into_block(index, level + 1, new, other_rev, other_hex) | |
526 | _insert_into_block(index, level + 1, new, current_rev, current_hex) |
|
528 | _insert_into_block(index, level + 1, new, current_rev, current_hex) | |
527 | return changed |
|
529 | return changed | |
528 |
|
530 | |||
529 |
|
531 | |||
530 | def _persist_trie(root, existing_idx=None): |
|
532 | def _persist_trie(root, existing_idx=None): | |
531 | """turn a nodemap trie into persistent binary data |
|
533 | """turn a nodemap trie into persistent binary data | |
532 |
|
534 | |||
533 | See `_build_trie` for nodemap trie structure""" |
|
535 | See `_build_trie` for nodemap trie structure""" | |
534 | block_map = {} |
|
536 | block_map = {} | |
535 | if existing_idx is not None: |
|
537 | if existing_idx is not None: | |
536 | base_idx = existing_idx + 1 |
|
538 | base_idx = existing_idx + 1 | |
537 | else: |
|
539 | else: | |
538 | base_idx = 0 |
|
540 | base_idx = 0 | |
539 | chunks = [] |
|
541 | chunks = [] | |
540 | for tn in _walk_trie(root): |
|
542 | for tn in _walk_trie(root): | |
541 | if tn.ondisk_id is not None: |
|
543 | if tn.ondisk_id is not None: | |
542 | block_map[id(tn)] = tn.ondisk_id |
|
544 | block_map[id(tn)] = tn.ondisk_id | |
543 | else: |
|
545 | else: | |
544 | block_map[id(tn)] = len(chunks) + base_idx |
|
546 | block_map[id(tn)] = len(chunks) + base_idx | |
545 | chunks.append(_persist_block(tn, block_map)) |
|
547 | chunks.append(_persist_block(tn, block_map)) | |
546 | return b''.join(chunks) |
|
548 | return b''.join(chunks) | |
547 |
|
549 | |||
548 |
|
550 | |||
549 | def _walk_trie(block): |
|
551 | def _walk_trie(block): | |
550 | """yield all the block in a trie |
|
552 | """yield all the block in a trie | |
551 |
|
553 | |||
552 | Children blocks are always yield before their parent block. |
|
554 | Children blocks are always yield before their parent block. | |
553 | """ |
|
555 | """ | |
554 | for (__, item) in sorted(block.items()): |
|
556 | for (__, item) in sorted(block.items()): | |
555 | if isinstance(item, dict): |
|
557 | if isinstance(item, dict): | |
556 | for sub_block in _walk_trie(item): |
|
558 | for sub_block in _walk_trie(item): | |
557 | yield sub_block |
|
559 | yield sub_block | |
558 | yield block |
|
560 | yield block | |
559 |
|
561 | |||
560 |
|
562 | |||
561 | def _persist_block(block_node, block_map): |
|
563 | def _persist_block(block_node, block_map): | |
562 | """produce persistent binary data for a single block |
|
564 | """produce persistent binary data for a single block | |
563 |
|
565 | |||
564 | Children block are assumed to be already persisted and present in |
|
566 | Children block are assumed to be already persisted and present in | |
565 | block_map. |
|
567 | block_map. | |
566 | """ |
|
568 | """ | |
567 | data = tuple(_to_value(v, block_map) for v in block_node) |
|
569 | data = tuple(_to_value(v, block_map) for v in block_node) | |
568 | return S_BLOCK.pack(*data) |
|
570 | return S_BLOCK.pack(*data) | |
569 |
|
571 | |||
570 |
|
572 | |||
571 | def _to_value(item, block_map): |
|
573 | def _to_value(item, block_map): | |
572 | """persist any value as an integer""" |
|
574 | """persist any value as an integer""" | |
573 | if item is None: |
|
575 | if item is None: | |
574 | return NO_ENTRY |
|
576 | return NO_ENTRY | |
575 | elif isinstance(item, dict): |
|
577 | elif isinstance(item, dict): | |
576 | return block_map[id(item)] |
|
578 | return block_map[id(item)] | |
577 | else: |
|
579 | else: | |
578 | return _transform_rev(item) |
|
580 | return _transform_rev(item) | |
579 |
|
581 | |||
580 |
|
582 | |||
581 | def parse_data(data): |
|
583 | def parse_data(data): | |
582 | """parse parse nodemap data into a nodemap Trie""" |
|
584 | """parse parse nodemap data into a nodemap Trie""" | |
583 | if (len(data) % S_BLOCK.size) != 0: |
|
585 | if (len(data) % S_BLOCK.size) != 0: | |
584 | msg = b"nodemap data size is not a multiple of block size (%d): %d" |
|
586 | msg = b"nodemap data size is not a multiple of block size (%d): %d" | |
585 | raise error.Abort(msg % (S_BLOCK.size, len(data))) |
|
587 | raise error.Abort(msg % (S_BLOCK.size, len(data))) | |
586 | if not data: |
|
588 | if not data: | |
587 | return Block(), None |
|
589 | return Block(), None | |
588 | block_map = {} |
|
590 | block_map = {} | |
589 | new_blocks = [] |
|
591 | new_blocks = [] | |
590 | for i in range(0, len(data), S_BLOCK.size): |
|
592 | for i in range(0, len(data), S_BLOCK.size): | |
591 | block = Block() |
|
593 | block = Block() | |
592 | block.ondisk_id = len(block_map) |
|
594 | block.ondisk_id = len(block_map) | |
593 | block_map[block.ondisk_id] = block |
|
595 | block_map[block.ondisk_id] = block | |
594 | block_data = data[i : i + S_BLOCK.size] |
|
596 | block_data = data[i : i + S_BLOCK.size] | |
595 | values = S_BLOCK.unpack(block_data) |
|
597 | values = S_BLOCK.unpack(block_data) | |
596 | new_blocks.append((block, values)) |
|
598 | new_blocks.append((block, values)) | |
597 | for b, values in new_blocks: |
|
599 | for b, values in new_blocks: | |
598 | for idx, v in enumerate(values): |
|
600 | for idx, v in enumerate(values): | |
599 | if v == NO_ENTRY: |
|
601 | if v == NO_ENTRY: | |
600 | continue |
|
602 | continue | |
601 | elif v >= 0: |
|
603 | elif v >= 0: | |
602 | b[idx] = block_map[v] |
|
604 | b[idx] = block_map[v] | |
603 | else: |
|
605 | else: | |
604 | b[idx] = _transform_rev(v) |
|
606 | b[idx] = _transform_rev(v) | |
605 | return block, i // S_BLOCK.size |
|
607 | return block, i // S_BLOCK.size | |
606 |
|
608 | |||
607 |
|
609 | |||
608 | # debug utility |
|
610 | # debug utility | |
609 |
|
611 | |||
610 |
|
612 | |||
611 | def check_data(ui, index, data): |
|
613 | def check_data(ui, index, data): | |
612 | """verify that the provided nodemap data are valid for the given idex""" |
|
614 | """verify that the provided nodemap data are valid for the given idex""" | |
613 | ret = 0 |
|
615 | ret = 0 | |
614 | ui.status((b"revisions in index: %d\n") % len(index)) |
|
616 | ui.status((b"revisions in index: %d\n") % len(index)) | |
615 | root, __ = parse_data(data) |
|
617 | root, __ = parse_data(data) | |
616 | all_revs = set(_all_revisions(root)) |
|
618 | all_revs = set(_all_revisions(root)) | |
617 | ui.status((b"revisions in nodemap: %d\n") % len(all_revs)) |
|
619 | ui.status((b"revisions in nodemap: %d\n") % len(all_revs)) | |
618 | for r in range(len(index)): |
|
620 | for r in range(len(index)): | |
619 | if r not in all_revs: |
|
621 | if r not in all_revs: | |
620 | msg = b" revision missing from nodemap: %d\n" % r |
|
622 | msg = b" revision missing from nodemap: %d\n" % r | |
621 | ui.write_err(msg) |
|
623 | ui.write_err(msg) | |
622 | ret = 1 |
|
624 | ret = 1 | |
623 | else: |
|
625 | else: | |
624 | all_revs.remove(r) |
|
626 | all_revs.remove(r) | |
625 | nm_rev = _find_node(root, hex(index[r][7])) |
|
627 | nm_rev = _find_node(root, hex(index[r][7])) | |
626 | if nm_rev is None: |
|
628 | if nm_rev is None: | |
627 | msg = b" revision node does not match any entries: %d\n" % r |
|
629 | msg = b" revision node does not match any entries: %d\n" % r | |
628 | ui.write_err(msg) |
|
630 | ui.write_err(msg) | |
629 | ret = 1 |
|
631 | ret = 1 | |
630 | elif nm_rev != r: |
|
632 | elif nm_rev != r: | |
631 | msg = ( |
|
633 | msg = ( | |
632 | b" revision node does not match the expected revision: " |
|
634 | b" revision node does not match the expected revision: " | |
633 | b"%d != %d\n" % (r, nm_rev) |
|
635 | b"%d != %d\n" % (r, nm_rev) | |
634 | ) |
|
636 | ) | |
635 | ui.write_err(msg) |
|
637 | ui.write_err(msg) | |
636 | ret = 1 |
|
638 | ret = 1 | |
637 |
|
639 | |||
638 | if all_revs: |
|
640 | if all_revs: | |
639 | for r in sorted(all_revs): |
|
641 | for r in sorted(all_revs): | |
640 | msg = b" extra revisions in nodemap: %d\n" % r |
|
642 | msg = b" extra revisions in nodemap: %d\n" % r | |
641 | ui.write_err(msg) |
|
643 | ui.write_err(msg) | |
642 | ret = 1 |
|
644 | ret = 1 | |
643 | return ret |
|
645 | return ret | |
644 |
|
646 | |||
645 |
|
647 | |||
646 | def _all_revisions(root): |
|
648 | def _all_revisions(root): | |
647 | """return all revisions stored in a Trie""" |
|
649 | """return all revisions stored in a Trie""" | |
648 | for block in _walk_trie(root): |
|
650 | for block in _walk_trie(root): | |
649 | for v in block: |
|
651 | for v in block: | |
650 | if v is None or isinstance(v, Block): |
|
652 | if v is None or isinstance(v, Block): | |
651 | continue |
|
653 | continue | |
652 | yield v |
|
654 | yield v | |
653 |
|
655 | |||
654 |
|
656 | |||
655 | def _find_node(block, node): |
|
657 | def _find_node(block, node): | |
656 | """find the revision associated with a given node""" |
|
658 | """find the revision associated with a given node""" | |
657 | entry = block.get(_to_int(node[0:1])) |
|
659 | entry = block.get(_to_int(node[0:1])) | |
658 | if isinstance(entry, dict): |
|
660 | if isinstance(entry, dict): | |
659 | return _find_node(entry, node[1:]) |
|
661 | return _find_node(entry, node[1:]) | |
660 | return entry |
|
662 | return entry | |
661 |
|
663 | |||
662 |
|
664 | |||
663 | def get_nodemap_file(revlog): |
|
665 | def get_nodemap_file(revlog): | |
664 | if revlog._trypending: |
|
666 | if revlog._trypending: | |
665 | pending_path = revlog.radix + b".n.a" |
|
667 | pending_path = revlog.radix + b".n.a" | |
666 | if revlog.opener.exists(pending_path): |
|
668 | if revlog.opener.exists(pending_path): | |
667 | return pending_path |
|
669 | return pending_path | |
668 | return revlog.radix + b".n" |
|
670 | return revlog.radix + b".n" |
General Comments 0
You need to be logged in to leave comments.
Login now