##// END OF EJS Templates
fix: move handling of --all into getrevstofix() for consistency...
Martin von Zweigbergk -
r45063:9f5e94bb default
parent child Browse files
Show More
@@ -1,863 +1,864 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:pattern=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. Any output on standard error
22 fixed file content is expected on standard output. Any output on standard error
23 will be displayed as a warning. If the exit status is not zero, the file will
23 will be displayed as a warning. If the exit status is not zero, the file will
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
25 status but no standard error output. Some values may be substituted into the
26 command::
26 command::
27
27
28 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
29 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
30
30
31 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
32 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
33 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
34 substituted into the command::
34 substituted into the command::
35
35
36 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
37 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
38
38
39 Deleted sections of a file will be ignored by :linerange, because there is no
39 Deleted sections of a file will be ignored by :linerange, because there is no
40 corresponding line range in the version being fixed.
40 corresponding line range in the version being fixed.
41
41
42 By default, tools that set :linerange will only be executed if there is at least
42 By default, tools that set :linerange will only be executed if there is at least
43 one changed line range. This is meant to prevent accidents like running a code
43 one changed line range. This is meant to prevent accidents like running a code
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 to false.
46 to false.
47
47
48 The :pattern suboption determines which files will be passed through each
48 The :pattern suboption determines which files will be passed through each
49 configured tool. See :hg:`help patterns` for possible values. However, all
49 configured tool. See :hg:`help patterns` for possible values. However, all
50 patterns are relative to the repo root, even if that text says they are relative
50 patterns are relative to the repo root, even if that text says they are relative
51 to the current working directory. If there are file arguments to :hg:`fix`, the
51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 intersection of these patterns is used.
52 intersection of these patterns is used.
53
53
54 There is also a configurable limit for the maximum size of file that will be
54 There is also a configurable limit for the maximum size of file that will be
55 processed by :hg:`fix`::
55 processed by :hg:`fix`::
56
56
57 [fix]
57 [fix]
58 maxfilesize = 2MB
58 maxfilesize = 2MB
59
59
60 Normally, execution of configured tools will continue after a failure (indicated
60 Normally, execution of configured tools will continue after a failure (indicated
61 by a non-zero exit status). It can also be configured to abort after the first
61 by a non-zero exit status). It can also be configured to abort after the first
62 such failure, so that no files will be affected if any tool fails. This abort
62 such failure, so that no files will be affected if any tool fails. This abort
63 will also cause :hg:`fix` to exit with a non-zero status::
63 will also cause :hg:`fix` to exit with a non-zero status::
64
64
65 [fix]
65 [fix]
66 failure = abort
66 failure = abort
67
67
68 When multiple tools are configured to affect a file, they execute in an order
68 When multiple tools are configured to affect a file, they execute in an order
69 defined by the :priority suboption. The priority suboption has a default value
69 defined by the :priority suboption. The priority suboption has a default value
70 of zero for each tool. Tools are executed in order of descending priority. The
70 of zero for each tool. Tools are executed in order of descending priority. The
71 execution order of tools with equal priority is unspecified. For example, you
71 execution order of tools with equal priority is unspecified. For example, you
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 in a text file by ensuring that 'sort' runs before 'head'::
73 in a text file by ensuring that 'sort' runs before 'head'::
74
74
75 [fix]
75 [fix]
76 sort:command = sort -n
76 sort:command = sort -n
77 head:command = head -n 10
77 head:command = head -n 10
78 sort:pattern = numbers.txt
78 sort:pattern = numbers.txt
79 head:pattern = numbers.txt
79 head:pattern = numbers.txt
80 sort:priority = 2
80 sort:priority = 2
81 head:priority = 1
81 head:priority = 1
82
82
83 To account for changes made by each tool, the line numbers used for incremental
83 To account for changes made by each tool, the line numbers used for incremental
84 formatting are recomputed before executing the next tool. So, each tool may see
84 formatting are recomputed before executing the next tool. So, each tool may see
85 different values for the arguments added by the :linerange suboption.
85 different values for the arguments added by the :linerange suboption.
86
86
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 content. The metadata must be placed before the file content on stdout,
88 content. The metadata must be placed before the file content on stdout,
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 is expected to produce this metadata encoding if and only if the :metadata
91 is expected to produce this metadata encoding if and only if the :metadata
92 suboption is true::
92 suboption is true::
93
93
94 [fix]
94 [fix]
95 tool:command = tool --prepend-json-metadata
95 tool:command = tool --prepend-json-metadata
96 tool:metadata = true
96 tool:metadata = true
97
97
98 The metadata values are passed to hooks, which can be used to print summaries or
98 The metadata values are passed to hooks, which can be used to print summaries or
99 perform other post-fixing work. The supported hooks are::
99 perform other post-fixing work. The supported hooks are::
100
100
101 "postfixfile"
101 "postfixfile"
102 Run once for each file in each revision where any fixer tools made changes
102 Run once for each file in each revision where any fixer tools made changes
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 tools that affected the file. Fixer tools that didn't affect the file have a
105 tools that affected the file. Fixer tools that didn't affect the file have a
106 value of None. Only fixer tools that executed are present in the metadata.
106 value of None. Only fixer tools that executed are present in the metadata.
107
107
108 "postfix"
108 "postfix"
109 Run once after all files and revisions have been handled. Provides
109 Run once after all files and revisions have been handled. Provides
110 "$HG_REPLACEMENTS" with information about what revisions were created and
110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 files in the working copy were updated. Provides a list "$HG_METADATA"
112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 mapping fixer tool names to lists of metadata values returned from
113 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
114 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
115 previously passed to the "postfixfile" hook.
116
116
117 Fixer tools are run in the repository's root directory. This allows them to read
117 Fixer tools are run in the repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125 from __future__ import absolute_import
125 from __future__ import absolute_import
126
126
127 import collections
127 import collections
128 import itertools
128 import itertools
129 import os
129 import os
130 import re
130 import re
131 import subprocess
131 import subprocess
132
132
133 from mercurial.i18n import _
133 from mercurial.i18n import _
134 from mercurial.node import nullrev
134 from mercurial.node import nullrev
135 from mercurial.node import wdirrev
135 from mercurial.node import wdirrev
136
136
137 from mercurial.utils import procutil
137 from mercurial.utils import procutil
138
138
139 from mercurial import (
139 from mercurial import (
140 cmdutil,
140 cmdutil,
141 context,
141 context,
142 copies,
142 copies,
143 error,
143 error,
144 match as matchmod,
144 match as matchmod,
145 mdiff,
145 mdiff,
146 merge,
146 merge,
147 pycompat,
147 pycompat,
148 registrar,
148 registrar,
149 rewriteutil,
149 rewriteutil,
150 scmutil,
150 scmutil,
151 util,
151 util,
152 worker,
152 worker,
153 )
153 )
154
154
155 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
155 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
156 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
156 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
157 # be specifying the version(s) of Mercurial they are tested with, or
157 # be specifying the version(s) of Mercurial they are tested with, or
158 # leave the attribute unspecified.
158 # leave the attribute unspecified.
159 testedwith = b'ships-with-hg-core'
159 testedwith = b'ships-with-hg-core'
160
160
161 cmdtable = {}
161 cmdtable = {}
162 command = registrar.command(cmdtable)
162 command = registrar.command(cmdtable)
163
163
164 configtable = {}
164 configtable = {}
165 configitem = registrar.configitem(configtable)
165 configitem = registrar.configitem(configtable)
166
166
167 # Register the suboptions allowed for each configured fixer, and default values.
167 # Register the suboptions allowed for each configured fixer, and default values.
168 FIXER_ATTRS = {
168 FIXER_ATTRS = {
169 b'command': None,
169 b'command': None,
170 b'linerange': None,
170 b'linerange': None,
171 b'pattern': None,
171 b'pattern': None,
172 b'priority': 0,
172 b'priority': 0,
173 b'metadata': False,
173 b'metadata': False,
174 b'skipclean': True,
174 b'skipclean': True,
175 b'enabled': True,
175 b'enabled': True,
176 }
176 }
177
177
178 for key, default in FIXER_ATTRS.items():
178 for key, default in FIXER_ATTRS.items():
179 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
179 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
180
180
181 # A good default size allows most source code files to be fixed, but avoids
181 # A good default size allows most source code files to be fixed, but avoids
182 # letting fixer tools choke on huge inputs, which could be surprising to the
182 # letting fixer tools choke on huge inputs, which could be surprising to the
183 # user.
183 # user.
184 configitem(b'fix', b'maxfilesize', default=b'2MB')
184 configitem(b'fix', b'maxfilesize', default=b'2MB')
185
185
186 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
186 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
187 # This helps users do shell scripts that stop when a fixer tool signals a
187 # This helps users do shell scripts that stop when a fixer tool signals a
188 # problem.
188 # problem.
189 configitem(b'fix', b'failure', default=b'continue')
189 configitem(b'fix', b'failure', default=b'continue')
190
190
191
191
192 def checktoolfailureaction(ui, message, hint=None):
192 def checktoolfailureaction(ui, message, hint=None):
193 """Abort with 'message' if fix.failure=abort"""
193 """Abort with 'message' if fix.failure=abort"""
194 action = ui.config(b'fix', b'failure')
194 action = ui.config(b'fix', b'failure')
195 if action not in (b'continue', b'abort'):
195 if action not in (b'continue', b'abort'):
196 raise error.Abort(
196 raise error.Abort(
197 _(b'unknown fix.failure action: %s') % (action,),
197 _(b'unknown fix.failure action: %s') % (action,),
198 hint=_(b'use "continue" or "abort"'),
198 hint=_(b'use "continue" or "abort"'),
199 )
199 )
200 if action == b'abort':
200 if action == b'abort':
201 raise error.Abort(message, hint=hint)
201 raise error.Abort(message, hint=hint)
202
202
203
203
204 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
204 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
205 baseopt = (
205 baseopt = (
206 b'',
206 b'',
207 b'base',
207 b'base',
208 [],
208 [],
209 _(
209 _(
210 b'revisions to diff against (overrides automatic '
210 b'revisions to diff against (overrides automatic '
211 b'selection, and applies to every revision being '
211 b'selection, and applies to every revision being '
212 b'fixed)'
212 b'fixed)'
213 ),
213 ),
214 _(b'REV'),
214 _(b'REV'),
215 )
215 )
216 revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
216 revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
217 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
217 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
218 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
218 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
219 usage = _(b'[OPTION]... [FILE]...')
219 usage = _(b'[OPTION]... [FILE]...')
220
220
221
221
222 @command(
222 @command(
223 b'fix',
223 b'fix',
224 [allopt, baseopt, revopt, wdiropt, wholeopt],
224 [allopt, baseopt, revopt, wdiropt, wholeopt],
225 usage,
225 usage,
226 helpcategory=command.CATEGORY_FILE_CONTENTS,
226 helpcategory=command.CATEGORY_FILE_CONTENTS,
227 )
227 )
228 def fix(ui, repo, *pats, **opts):
228 def fix(ui, repo, *pats, **opts):
229 """rewrite file content in changesets or working directory
229 """rewrite file content in changesets or working directory
230
230
231 Runs any configured tools to fix the content of files. Only affects files
231 Runs any configured tools to fix the content of files. Only affects files
232 with changes, unless file arguments are provided. Only affects changed lines
232 with changes, unless file arguments are provided. Only affects changed lines
233 of files, unless the --whole flag is used. Some tools may always affect the
233 of files, unless the --whole flag is used. Some tools may always affect the
234 whole file regardless of --whole.
234 whole file regardless of --whole.
235
235
236 If revisions are specified with --rev, those revisions will be checked, and
236 If revisions are specified with --rev, those revisions will be checked, and
237 they may be replaced with new revisions that have fixed file content. It is
237 they may be replaced with new revisions that have fixed file content. It is
238 desirable to specify all descendants of each specified revision, so that the
238 desirable to specify all descendants of each specified revision, so that the
239 fixes propagate to the descendants. If all descendants are fixed at the same
239 fixes propagate to the descendants. If all descendants are fixed at the same
240 time, no merging, rebasing, or evolution will be required.
240 time, no merging, rebasing, or evolution will be required.
241
241
242 If --working-dir is used, files with uncommitted changes in the working copy
242 If --working-dir is used, files with uncommitted changes in the working copy
243 will be fixed. If the checked-out revision is also fixed, the working
243 will be fixed. If the checked-out revision is also fixed, the working
244 directory will update to the replacement revision.
244 directory will update to the replacement revision.
245
245
246 When determining what lines of each file to fix at each revision, the whole
246 When determining what lines of each file to fix at each revision, the whole
247 set of revisions being fixed is considered, so that fixes to earlier
247 set of revisions being fixed is considered, so that fixes to earlier
248 revisions are not forgotten in later ones. The --base flag can be used to
248 revisions are not forgotten in later ones. The --base flag can be used to
249 override this default behavior, though it is not usually desirable to do so.
249 override this default behavior, though it is not usually desirable to do so.
250 """
250 """
251 opts = pycompat.byteskwargs(opts)
251 opts = pycompat.byteskwargs(opts)
252 cmdutil.check_at_most_one_arg(opts, b'all', b'rev')
252 cmdutil.check_at_most_one_arg(opts, b'all', b'rev')
253 cmdutil.check_incompatible_arguments(opts, b'working_dir', [b'all'])
253 cmdutil.check_incompatible_arguments(opts, b'working_dir', [b'all'])
254 if opts[b'all']:
254
255 opts[b'rev'] = [b'not public() and not obsolete()']
256 opts[b'working_dir'] = True
257 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
255 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
258 revstofix = getrevstofix(ui, repo, opts)
256 revstofix = getrevstofix(ui, repo, opts)
259 basectxs = getbasectxs(repo, opts, revstofix)
257 basectxs = getbasectxs(repo, opts, revstofix)
260 workqueue, numitems = getworkqueue(
258 workqueue, numitems = getworkqueue(
261 ui, repo, pats, opts, revstofix, basectxs
259 ui, repo, pats, opts, revstofix, basectxs
262 )
260 )
263 fixers = getfixers(ui)
261 fixers = getfixers(ui)
264
262
265 # There are no data dependencies between the workers fixing each file
263 # There are no data dependencies between the workers fixing each file
266 # revision, so we can use all available parallelism.
264 # revision, so we can use all available parallelism.
267 def getfixes(items):
265 def getfixes(items):
268 for rev, path in items:
266 for rev, path in items:
269 ctx = repo[rev]
267 ctx = repo[rev]
270 olddata = ctx[path].data()
268 olddata = ctx[path].data()
271 metadata, newdata = fixfile(
269 metadata, newdata = fixfile(
272 ui, repo, opts, fixers, ctx, path, basectxs[rev]
270 ui, repo, opts, fixers, ctx, path, basectxs[rev]
273 )
271 )
274 # Don't waste memory/time passing unchanged content back, but
272 # Don't waste memory/time passing unchanged content back, but
275 # produce one result per item either way.
273 # produce one result per item either way.
276 yield (
274 yield (
277 rev,
275 rev,
278 path,
276 path,
279 metadata,
277 metadata,
280 newdata if newdata != olddata else None,
278 newdata if newdata != olddata else None,
281 )
279 )
282
280
283 results = worker.worker(
281 results = worker.worker(
284 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
282 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
285 )
283 )
286
284
287 # We have to hold on to the data for each successor revision in memory
285 # We have to hold on to the data for each successor revision in memory
288 # until all its parents are committed. We ensure this by committing and
286 # until all its parents are committed. We ensure this by committing and
289 # freeing memory for the revisions in some topological order. This
287 # freeing memory for the revisions in some topological order. This
290 # leaves a little bit of memory efficiency on the table, but also makes
288 # leaves a little bit of memory efficiency on the table, but also makes
291 # the tests deterministic. It might also be considered a feature since
289 # the tests deterministic. It might also be considered a feature since
292 # it makes the results more easily reproducible.
290 # it makes the results more easily reproducible.
293 filedata = collections.defaultdict(dict)
291 filedata = collections.defaultdict(dict)
294 aggregatemetadata = collections.defaultdict(list)
292 aggregatemetadata = collections.defaultdict(list)
295 replacements = {}
293 replacements = {}
296 wdirwritten = False
294 wdirwritten = False
297 commitorder = sorted(revstofix, reverse=True)
295 commitorder = sorted(revstofix, reverse=True)
298 with ui.makeprogress(
296 with ui.makeprogress(
299 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
297 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
300 ) as progress:
298 ) as progress:
301 for rev, path, filerevmetadata, newdata in results:
299 for rev, path, filerevmetadata, newdata in results:
302 progress.increment(item=path)
300 progress.increment(item=path)
303 for fixername, fixermetadata in filerevmetadata.items():
301 for fixername, fixermetadata in filerevmetadata.items():
304 aggregatemetadata[fixername].append(fixermetadata)
302 aggregatemetadata[fixername].append(fixermetadata)
305 if newdata is not None:
303 if newdata is not None:
306 filedata[rev][path] = newdata
304 filedata[rev][path] = newdata
307 hookargs = {
305 hookargs = {
308 b'rev': rev,
306 b'rev': rev,
309 b'path': path,
307 b'path': path,
310 b'metadata': filerevmetadata,
308 b'metadata': filerevmetadata,
311 }
309 }
312 repo.hook(
310 repo.hook(
313 b'postfixfile',
311 b'postfixfile',
314 throw=False,
312 throw=False,
315 **pycompat.strkwargs(hookargs)
313 **pycompat.strkwargs(hookargs)
316 )
314 )
317 numitems[rev] -= 1
315 numitems[rev] -= 1
318 # Apply the fixes for this and any other revisions that are
316 # Apply the fixes for this and any other revisions that are
319 # ready and sitting at the front of the queue. Using a loop here
317 # ready and sitting at the front of the queue. Using a loop here
320 # prevents the queue from being blocked by the first revision to
318 # prevents the queue from being blocked by the first revision to
321 # be ready out of order.
319 # be ready out of order.
322 while commitorder and not numitems[commitorder[-1]]:
320 while commitorder and not numitems[commitorder[-1]]:
323 rev = commitorder.pop()
321 rev = commitorder.pop()
324 ctx = repo[rev]
322 ctx = repo[rev]
325 if rev == wdirrev:
323 if rev == wdirrev:
326 writeworkingdir(repo, ctx, filedata[rev], replacements)
324 writeworkingdir(repo, ctx, filedata[rev], replacements)
327 wdirwritten = bool(filedata[rev])
325 wdirwritten = bool(filedata[rev])
328 else:
326 else:
329 replacerev(ui, repo, ctx, filedata[rev], replacements)
327 replacerev(ui, repo, ctx, filedata[rev], replacements)
330 del filedata[rev]
328 del filedata[rev]
331
329
332 cleanup(repo, replacements, wdirwritten)
330 cleanup(repo, replacements, wdirwritten)
333 hookargs = {
331 hookargs = {
334 b'replacements': replacements,
332 b'replacements': replacements,
335 b'wdirwritten': wdirwritten,
333 b'wdirwritten': wdirwritten,
336 b'metadata': aggregatemetadata,
334 b'metadata': aggregatemetadata,
337 }
335 }
338 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
336 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
339
337
340
338
341 def cleanup(repo, replacements, wdirwritten):
339 def cleanup(repo, replacements, wdirwritten):
342 """Calls scmutil.cleanupnodes() with the given replacements.
340 """Calls scmutil.cleanupnodes() with the given replacements.
343
341
344 "replacements" is a dict from nodeid to nodeid, with one key and one value
342 "replacements" is a dict from nodeid to nodeid, with one key and one value
345 for every revision that was affected by fixing. This is slightly different
343 for every revision that was affected by fixing. This is slightly different
346 from cleanupnodes().
344 from cleanupnodes().
347
345
348 "wdirwritten" is a bool which tells whether the working copy was affected by
346 "wdirwritten" is a bool which tells whether the working copy was affected by
349 fixing, since it has no entry in "replacements".
347 fixing, since it has no entry in "replacements".
350
348
351 Useful as a hook point for extending "hg fix" with output summarizing the
349 Useful as a hook point for extending "hg fix" with output summarizing the
352 effects of the command, though we choose not to output anything here.
350 effects of the command, though we choose not to output anything here.
353 """
351 """
354 replacements = {
352 replacements = {
355 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
353 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
356 }
354 }
357 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
355 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
358
356
359
357
360 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
358 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
361 """"Constructs the list of files to be fixed at specific revisions
359 """"Constructs the list of files to be fixed at specific revisions
362
360
363 It is up to the caller how to consume the work items, and the only
361 It is up to the caller how to consume the work items, and the only
364 dependence between them is that replacement revisions must be committed in
362 dependence between them is that replacement revisions must be committed in
365 topological order. Each work item represents a file in the working copy or
363 topological order. Each work item represents a file in the working copy or
366 in some revision that should be fixed and written back to the working copy
364 in some revision that should be fixed and written back to the working copy
367 or into a replacement revision.
365 or into a replacement revision.
368
366
369 Work items for the same revision are grouped together, so that a worker
367 Work items for the same revision are grouped together, so that a worker
370 pool starting with the first N items in parallel is likely to finish the
368 pool starting with the first N items in parallel is likely to finish the
371 first revision's work before other revisions. This can allow us to write
369 first revision's work before other revisions. This can allow us to write
372 the result to disk and reduce memory footprint. At time of writing, the
370 the result to disk and reduce memory footprint. At time of writing, the
373 partition strategy in worker.py seems favorable to this. We also sort the
371 partition strategy in worker.py seems favorable to this. We also sort the
374 items by ascending revision number to match the order in which we commit
372 items by ascending revision number to match the order in which we commit
375 the fixes later.
373 the fixes later.
376 """
374 """
377 workqueue = []
375 workqueue = []
378 numitems = collections.defaultdict(int)
376 numitems = collections.defaultdict(int)
379 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
377 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
380 for rev in sorted(revstofix):
378 for rev in sorted(revstofix):
381 fixctx = repo[rev]
379 fixctx = repo[rev]
382 match = scmutil.match(fixctx, pats, opts)
380 match = scmutil.match(fixctx, pats, opts)
383 for path in sorted(
381 for path in sorted(
384 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
382 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
385 ):
383 ):
386 fctx = fixctx[path]
384 fctx = fixctx[path]
387 if fctx.islink():
385 if fctx.islink():
388 continue
386 continue
389 if fctx.size() > maxfilesize:
387 if fctx.size() > maxfilesize:
390 ui.warn(
388 ui.warn(
391 _(b'ignoring file larger than %s: %s\n')
389 _(b'ignoring file larger than %s: %s\n')
392 % (util.bytecount(maxfilesize), path)
390 % (util.bytecount(maxfilesize), path)
393 )
391 )
394 continue
392 continue
395 workqueue.append((rev, path))
393 workqueue.append((rev, path))
396 numitems[rev] += 1
394 numitems[rev] += 1
397 return workqueue, numitems
395 return workqueue, numitems
398
396
399
397
400 def getrevstofix(ui, repo, opts):
398 def getrevstofix(ui, repo, opts):
401 """Returns the set of revision numbers that should be fixed"""
399 """Returns the set of revision numbers that should be fixed"""
402 revs = set(scmutil.revrange(repo, opts[b'rev']))
400 if opts[b'all']:
403 if opts.get(b'working_dir'):
401 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
404 revs.add(wdirrev)
402 else:
403 revs = set(scmutil.revrange(repo, opts[b'rev']))
404 if opts.get(b'working_dir'):
405 revs.add(wdirrev)
405 for rev in revs:
406 for rev in revs:
406 checkfixablectx(ui, repo, repo[rev])
407 checkfixablectx(ui, repo, repo[rev])
407 # Allow fixing only wdir() even if there's an unfinished operation
408 # Allow fixing only wdir() even if there's an unfinished operation
408 if not (len(revs) == 1 and wdirrev in revs):
409 if not (len(revs) == 1 and wdirrev in revs):
409 cmdutil.checkunfinished(repo)
410 cmdutil.checkunfinished(repo)
410 rewriteutil.precheck(repo, revs, b'fix')
411 rewriteutil.precheck(repo, revs, b'fix')
411 if wdirrev in revs and list(merge.mergestate.read(repo).unresolved()):
412 if wdirrev in revs and list(merge.mergestate.read(repo).unresolved()):
412 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
413 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
413 if not revs:
414 if not revs:
414 raise error.Abort(
415 raise error.Abort(
415 b'no changesets specified', hint=b'use --rev or --working-dir'
416 b'no changesets specified', hint=b'use --rev or --working-dir'
416 )
417 )
417 return revs
418 return revs
418
419
419
420
420 def checkfixablectx(ui, repo, ctx):
421 def checkfixablectx(ui, repo, ctx):
421 """Aborts if the revision shouldn't be replaced with a fixed one."""
422 """Aborts if the revision shouldn't be replaced with a fixed one."""
422 if ctx.obsolete():
423 if ctx.obsolete():
423 # It would be better to actually check if the revision has a successor.
424 # It would be better to actually check if the revision has a successor.
424 allowdivergence = ui.configbool(
425 allowdivergence = ui.configbool(
425 b'experimental', b'evolution.allowdivergence'
426 b'experimental', b'evolution.allowdivergence'
426 )
427 )
427 if not allowdivergence:
428 if not allowdivergence:
428 raise error.Abort(
429 raise error.Abort(
429 b'fixing obsolete revision could cause divergence'
430 b'fixing obsolete revision could cause divergence'
430 )
431 )
431
432
432
433
433 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
434 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
434 """Returns the set of files that should be fixed in a context
435 """Returns the set of files that should be fixed in a context
435
436
436 The result depends on the base contexts; we include any file that has
437 The result depends on the base contexts; we include any file that has
437 changed relative to any of the base contexts. Base contexts should be
438 changed relative to any of the base contexts. Base contexts should be
438 ancestors of the context being fixed.
439 ancestors of the context being fixed.
439 """
440 """
440 files = set()
441 files = set()
441 for basectx in basectxs:
442 for basectx in basectxs:
442 stat = basectx.status(
443 stat = basectx.status(
443 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
444 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
444 )
445 )
445 files.update(
446 files.update(
446 set(
447 set(
447 itertools.chain(
448 itertools.chain(
448 stat.added, stat.modified, stat.clean, stat.unknown
449 stat.added, stat.modified, stat.clean, stat.unknown
449 )
450 )
450 )
451 )
451 )
452 )
452 return files
453 return files
453
454
454
455
455 def lineranges(opts, path, basectxs, fixctx, content2):
456 def lineranges(opts, path, basectxs, fixctx, content2):
456 """Returns the set of line ranges that should be fixed in a file
457 """Returns the set of line ranges that should be fixed in a file
457
458
458 Of the form [(10, 20), (30, 40)].
459 Of the form [(10, 20), (30, 40)].
459
460
460 This depends on the given base contexts; we must consider lines that have
461 This depends on the given base contexts; we must consider lines that have
461 changed versus any of the base contexts, and whether the file has been
462 changed versus any of the base contexts, and whether the file has been
462 renamed versus any of them.
463 renamed versus any of them.
463
464
464 Another way to understand this is that we exclude line ranges that are
465 Another way to understand this is that we exclude line ranges that are
465 common to the file in all base contexts.
466 common to the file in all base contexts.
466 """
467 """
467 if opts.get(b'whole'):
468 if opts.get(b'whole'):
468 # Return a range containing all lines. Rely on the diff implementation's
469 # Return a range containing all lines. Rely on the diff implementation's
469 # idea of how many lines are in the file, instead of reimplementing it.
470 # idea of how many lines are in the file, instead of reimplementing it.
470 return difflineranges(b'', content2)
471 return difflineranges(b'', content2)
471
472
472 rangeslist = []
473 rangeslist = []
473 for basectx in basectxs:
474 for basectx in basectxs:
474 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
475 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
475 if basepath in basectx:
476 if basepath in basectx:
476 content1 = basectx[basepath].data()
477 content1 = basectx[basepath].data()
477 else:
478 else:
478 content1 = b''
479 content1 = b''
479 rangeslist.extend(difflineranges(content1, content2))
480 rangeslist.extend(difflineranges(content1, content2))
480 return unionranges(rangeslist)
481 return unionranges(rangeslist)
481
482
482
483
483 def unionranges(rangeslist):
484 def unionranges(rangeslist):
484 """Return the union of some closed intervals
485 """Return the union of some closed intervals
485
486
486 >>> unionranges([])
487 >>> unionranges([])
487 []
488 []
488 >>> unionranges([(1, 100)])
489 >>> unionranges([(1, 100)])
489 [(1, 100)]
490 [(1, 100)]
490 >>> unionranges([(1, 100), (1, 100)])
491 >>> unionranges([(1, 100), (1, 100)])
491 [(1, 100)]
492 [(1, 100)]
492 >>> unionranges([(1, 100), (2, 100)])
493 >>> unionranges([(1, 100), (2, 100)])
493 [(1, 100)]
494 [(1, 100)]
494 >>> unionranges([(1, 99), (1, 100)])
495 >>> unionranges([(1, 99), (1, 100)])
495 [(1, 100)]
496 [(1, 100)]
496 >>> unionranges([(1, 100), (40, 60)])
497 >>> unionranges([(1, 100), (40, 60)])
497 [(1, 100)]
498 [(1, 100)]
498 >>> unionranges([(1, 49), (50, 100)])
499 >>> unionranges([(1, 49), (50, 100)])
499 [(1, 100)]
500 [(1, 100)]
500 >>> unionranges([(1, 48), (50, 100)])
501 >>> unionranges([(1, 48), (50, 100)])
501 [(1, 48), (50, 100)]
502 [(1, 48), (50, 100)]
502 >>> unionranges([(1, 2), (3, 4), (5, 6)])
503 >>> unionranges([(1, 2), (3, 4), (5, 6)])
503 [(1, 6)]
504 [(1, 6)]
504 """
505 """
505 rangeslist = sorted(set(rangeslist))
506 rangeslist = sorted(set(rangeslist))
506 unioned = []
507 unioned = []
507 if rangeslist:
508 if rangeslist:
508 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
509 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
509 for a, b in rangeslist:
510 for a, b in rangeslist:
510 c, d = unioned[-1]
511 c, d = unioned[-1]
511 if a > d + 1:
512 if a > d + 1:
512 unioned.append((a, b))
513 unioned.append((a, b))
513 else:
514 else:
514 unioned[-1] = (c, max(b, d))
515 unioned[-1] = (c, max(b, d))
515 return unioned
516 return unioned
516
517
517
518
518 def difflineranges(content1, content2):
519 def difflineranges(content1, content2):
519 """Return list of line number ranges in content2 that differ from content1.
520 """Return list of line number ranges in content2 that differ from content1.
520
521
521 Line numbers are 1-based. The numbers are the first and last line contained
522 Line numbers are 1-based. The numbers are the first and last line contained
522 in the range. Single-line ranges have the same line number for the first and
523 in the range. Single-line ranges have the same line number for the first and
523 last line. Excludes any empty ranges that result from lines that are only
524 last line. Excludes any empty ranges that result from lines that are only
524 present in content1. Relies on mdiff's idea of where the line endings are in
525 present in content1. Relies on mdiff's idea of where the line endings are in
525 the string.
526 the string.
526
527
527 >>> from mercurial import pycompat
528 >>> from mercurial import pycompat
528 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
529 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
529 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
530 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
530 >>> difflineranges2(b'', b'')
531 >>> difflineranges2(b'', b'')
531 []
532 []
532 >>> difflineranges2(b'a', b'')
533 >>> difflineranges2(b'a', b'')
533 []
534 []
534 >>> difflineranges2(b'', b'A')
535 >>> difflineranges2(b'', b'A')
535 [(1, 1)]
536 [(1, 1)]
536 >>> difflineranges2(b'a', b'a')
537 >>> difflineranges2(b'a', b'a')
537 []
538 []
538 >>> difflineranges2(b'a', b'A')
539 >>> difflineranges2(b'a', b'A')
539 [(1, 1)]
540 [(1, 1)]
540 >>> difflineranges2(b'ab', b'')
541 >>> difflineranges2(b'ab', b'')
541 []
542 []
542 >>> difflineranges2(b'', b'AB')
543 >>> difflineranges2(b'', b'AB')
543 [(1, 2)]
544 [(1, 2)]
544 >>> difflineranges2(b'abc', b'ac')
545 >>> difflineranges2(b'abc', b'ac')
545 []
546 []
546 >>> difflineranges2(b'ab', b'aCb')
547 >>> difflineranges2(b'ab', b'aCb')
547 [(2, 2)]
548 [(2, 2)]
548 >>> difflineranges2(b'abc', b'aBc')
549 >>> difflineranges2(b'abc', b'aBc')
549 [(2, 2)]
550 [(2, 2)]
550 >>> difflineranges2(b'ab', b'AB')
551 >>> difflineranges2(b'ab', b'AB')
551 [(1, 2)]
552 [(1, 2)]
552 >>> difflineranges2(b'abcde', b'aBcDe')
553 >>> difflineranges2(b'abcde', b'aBcDe')
553 [(2, 2), (4, 4)]
554 [(2, 2), (4, 4)]
554 >>> difflineranges2(b'abcde', b'aBCDe')
555 >>> difflineranges2(b'abcde', b'aBCDe')
555 [(2, 4)]
556 [(2, 4)]
556 """
557 """
557 ranges = []
558 ranges = []
558 for lines, kind in mdiff.allblocks(content1, content2):
559 for lines, kind in mdiff.allblocks(content1, content2):
559 firstline, lastline = lines[2:4]
560 firstline, lastline = lines[2:4]
560 if kind == b'!' and firstline != lastline:
561 if kind == b'!' and firstline != lastline:
561 ranges.append((firstline + 1, lastline))
562 ranges.append((firstline + 1, lastline))
562 return ranges
563 return ranges
563
564
564
565
565 def getbasectxs(repo, opts, revstofix):
566 def getbasectxs(repo, opts, revstofix):
566 """Returns a map of the base contexts for each revision
567 """Returns a map of the base contexts for each revision
567
568
568 The base contexts determine which lines are considered modified when we
569 The base contexts determine which lines are considered modified when we
569 attempt to fix just the modified lines in a file. It also determines which
570 attempt to fix just the modified lines in a file. It also determines which
570 files we attempt to fix, so it is important to compute this even when
571 files we attempt to fix, so it is important to compute this even when
571 --whole is used.
572 --whole is used.
572 """
573 """
573 # The --base flag overrides the usual logic, and we give every revision
574 # The --base flag overrides the usual logic, and we give every revision
574 # exactly the set of baserevs that the user specified.
575 # exactly the set of baserevs that the user specified.
575 if opts.get(b'base'):
576 if opts.get(b'base'):
576 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
577 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
577 if not baserevs:
578 if not baserevs:
578 baserevs = {nullrev}
579 baserevs = {nullrev}
579 basectxs = {repo[rev] for rev in baserevs}
580 basectxs = {repo[rev] for rev in baserevs}
580 return {rev: basectxs for rev in revstofix}
581 return {rev: basectxs for rev in revstofix}
581
582
582 # Proceed in topological order so that we can easily determine each
583 # Proceed in topological order so that we can easily determine each
583 # revision's baserevs by looking at its parents and their baserevs.
584 # revision's baserevs by looking at its parents and their baserevs.
584 basectxs = collections.defaultdict(set)
585 basectxs = collections.defaultdict(set)
585 for rev in sorted(revstofix):
586 for rev in sorted(revstofix):
586 ctx = repo[rev]
587 ctx = repo[rev]
587 for pctx in ctx.parents():
588 for pctx in ctx.parents():
588 if pctx.rev() in basectxs:
589 if pctx.rev() in basectxs:
589 basectxs[rev].update(basectxs[pctx.rev()])
590 basectxs[rev].update(basectxs[pctx.rev()])
590 else:
591 else:
591 basectxs[rev].add(pctx)
592 basectxs[rev].add(pctx)
592 return basectxs
593 return basectxs
593
594
594
595
595 def fixfile(ui, repo, opts, fixers, fixctx, path, basectxs):
596 def fixfile(ui, repo, opts, fixers, fixctx, path, basectxs):
596 """Run any configured fixers that should affect the file in this context
597 """Run any configured fixers that should affect the file in this context
597
598
598 Returns the file content that results from applying the fixers in some order
599 Returns the file content that results from applying the fixers in some order
599 starting with the file's content in the fixctx. Fixers that support line
600 starting with the file's content in the fixctx. Fixers that support line
600 ranges will affect lines that have changed relative to any of the basectxs
601 ranges will affect lines that have changed relative to any of the basectxs
601 (i.e. they will only avoid lines that are common to all basectxs).
602 (i.e. they will only avoid lines that are common to all basectxs).
602
603
603 A fixer tool's stdout will become the file's new content if and only if it
604 A fixer tool's stdout will become the file's new content if and only if it
604 exits with code zero. The fixer tool's working directory is the repository's
605 exits with code zero. The fixer tool's working directory is the repository's
605 root.
606 root.
606 """
607 """
607 metadata = {}
608 metadata = {}
608 newdata = fixctx[path].data()
609 newdata = fixctx[path].data()
609 for fixername, fixer in pycompat.iteritems(fixers):
610 for fixername, fixer in pycompat.iteritems(fixers):
610 if fixer.affects(opts, fixctx, path):
611 if fixer.affects(opts, fixctx, path):
611 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
612 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
612 command = fixer.command(ui, path, ranges)
613 command = fixer.command(ui, path, ranges)
613 if command is None:
614 if command is None:
614 continue
615 continue
615 ui.debug(b'subprocess: %s\n' % (command,))
616 ui.debug(b'subprocess: %s\n' % (command,))
616 proc = subprocess.Popen(
617 proc = subprocess.Popen(
617 procutil.tonativestr(command),
618 procutil.tonativestr(command),
618 shell=True,
619 shell=True,
619 cwd=procutil.tonativestr(repo.root),
620 cwd=procutil.tonativestr(repo.root),
620 stdin=subprocess.PIPE,
621 stdin=subprocess.PIPE,
621 stdout=subprocess.PIPE,
622 stdout=subprocess.PIPE,
622 stderr=subprocess.PIPE,
623 stderr=subprocess.PIPE,
623 )
624 )
624 stdout, stderr = proc.communicate(newdata)
625 stdout, stderr = proc.communicate(newdata)
625 if stderr:
626 if stderr:
626 showstderr(ui, fixctx.rev(), fixername, stderr)
627 showstderr(ui, fixctx.rev(), fixername, stderr)
627 newerdata = stdout
628 newerdata = stdout
628 if fixer.shouldoutputmetadata():
629 if fixer.shouldoutputmetadata():
629 try:
630 try:
630 metadatajson, newerdata = stdout.split(b'\0', 1)
631 metadatajson, newerdata = stdout.split(b'\0', 1)
631 metadata[fixername] = pycompat.json_loads(metadatajson)
632 metadata[fixername] = pycompat.json_loads(metadatajson)
632 except ValueError:
633 except ValueError:
633 ui.warn(
634 ui.warn(
634 _(b'ignored invalid output from fixer tool: %s\n')
635 _(b'ignored invalid output from fixer tool: %s\n')
635 % (fixername,)
636 % (fixername,)
636 )
637 )
637 continue
638 continue
638 else:
639 else:
639 metadata[fixername] = None
640 metadata[fixername] = None
640 if proc.returncode == 0:
641 if proc.returncode == 0:
641 newdata = newerdata
642 newdata = newerdata
642 else:
643 else:
643 if not stderr:
644 if not stderr:
644 message = _(b'exited with status %d\n') % (proc.returncode,)
645 message = _(b'exited with status %d\n') % (proc.returncode,)
645 showstderr(ui, fixctx.rev(), fixername, message)
646 showstderr(ui, fixctx.rev(), fixername, message)
646 checktoolfailureaction(
647 checktoolfailureaction(
647 ui,
648 ui,
648 _(b'no fixes will be applied'),
649 _(b'no fixes will be applied'),
649 hint=_(
650 hint=_(
650 b'use --config fix.failure=continue to apply any '
651 b'use --config fix.failure=continue to apply any '
651 b'successful fixes anyway'
652 b'successful fixes anyway'
652 ),
653 ),
653 )
654 )
654 return metadata, newdata
655 return metadata, newdata
655
656
656
657
657 def showstderr(ui, rev, fixername, stderr):
658 def showstderr(ui, rev, fixername, stderr):
658 """Writes the lines of the stderr string as warnings on the ui
659 """Writes the lines of the stderr string as warnings on the ui
659
660
660 Uses the revision number and fixername to give more context to each line of
661 Uses the revision number and fixername to give more context to each line of
661 the error message. Doesn't include file names, since those take up a lot of
662 the error message. Doesn't include file names, since those take up a lot of
662 space and would tend to be included in the error message if they were
663 space and would tend to be included in the error message if they were
663 relevant.
664 relevant.
664 """
665 """
665 for line in re.split(b'[\r\n]+', stderr):
666 for line in re.split(b'[\r\n]+', stderr):
666 if line:
667 if line:
667 ui.warn(b'[')
668 ui.warn(b'[')
668 if rev is None:
669 if rev is None:
669 ui.warn(_(b'wdir'), label=b'evolve.rev')
670 ui.warn(_(b'wdir'), label=b'evolve.rev')
670 else:
671 else:
671 ui.warn(b'%d' % rev, label=b'evolve.rev')
672 ui.warn(b'%d' % rev, label=b'evolve.rev')
672 ui.warn(b'] %s: %s\n' % (fixername, line))
673 ui.warn(b'] %s: %s\n' % (fixername, line))
673
674
674
675
675 def writeworkingdir(repo, ctx, filedata, replacements):
676 def writeworkingdir(repo, ctx, filedata, replacements):
676 """Write new content to the working copy and check out the new p1 if any
677 """Write new content to the working copy and check out the new p1 if any
677
678
678 We check out a new revision if and only if we fixed something in both the
679 We check out a new revision if and only if we fixed something in both the
679 working directory and its parent revision. This avoids the need for a full
680 working directory and its parent revision. This avoids the need for a full
680 update/merge, and means that the working directory simply isn't affected
681 update/merge, and means that the working directory simply isn't affected
681 unless the --working-dir flag is given.
682 unless the --working-dir flag is given.
682
683
683 Directly updates the dirstate for the affected files.
684 Directly updates the dirstate for the affected files.
684 """
685 """
685 for path, data in pycompat.iteritems(filedata):
686 for path, data in pycompat.iteritems(filedata):
686 fctx = ctx[path]
687 fctx = ctx[path]
687 fctx.write(data, fctx.flags())
688 fctx.write(data, fctx.flags())
688 if repo.dirstate[path] == b'n':
689 if repo.dirstate[path] == b'n':
689 repo.dirstate.normallookup(path)
690 repo.dirstate.normallookup(path)
690
691
691 oldparentnodes = repo.dirstate.parents()
692 oldparentnodes = repo.dirstate.parents()
692 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
693 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
693 if newparentnodes != oldparentnodes:
694 if newparentnodes != oldparentnodes:
694 repo.setparents(*newparentnodes)
695 repo.setparents(*newparentnodes)
695
696
696
697
697 def replacerev(ui, repo, ctx, filedata, replacements):
698 def replacerev(ui, repo, ctx, filedata, replacements):
698 """Commit a new revision like the given one, but with file content changes
699 """Commit a new revision like the given one, but with file content changes
699
700
700 "ctx" is the original revision to be replaced by a modified one.
701 "ctx" is the original revision to be replaced by a modified one.
701
702
702 "filedata" is a dict that maps paths to their new file content. All other
703 "filedata" is a dict that maps paths to their new file content. All other
703 paths will be recreated from the original revision without changes.
704 paths will be recreated from the original revision without changes.
704 "filedata" may contain paths that didn't exist in the original revision;
705 "filedata" may contain paths that didn't exist in the original revision;
705 they will be added.
706 they will be added.
706
707
707 "replacements" is a dict that maps a single node to a single node, and it is
708 "replacements" is a dict that maps a single node to a single node, and it is
708 updated to indicate the original revision is replaced by the newly created
709 updated to indicate the original revision is replaced by the newly created
709 one. No entry is added if the replacement's node already exists.
710 one. No entry is added if the replacement's node already exists.
710
711
711 The new revision has the same parents as the old one, unless those parents
712 The new revision has the same parents as the old one, unless those parents
712 have already been replaced, in which case those replacements are the parents
713 have already been replaced, in which case those replacements are the parents
713 of this new revision. Thus, if revisions are replaced in topological order,
714 of this new revision. Thus, if revisions are replaced in topological order,
714 there is no need to rebase them into the original topology later.
715 there is no need to rebase them into the original topology later.
715 """
716 """
716
717
717 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
718 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
718 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
719 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
719 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
720 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
720 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
721 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
721
722
722 # We don't want to create a revision that has no changes from the original,
723 # We don't want to create a revision that has no changes from the original,
723 # but we should if the original revision's parent has been replaced.
724 # but we should if the original revision's parent has been replaced.
724 # Otherwise, we would produce an orphan that needs no actual human
725 # Otherwise, we would produce an orphan that needs no actual human
725 # intervention to evolve. We can't rely on commit() to avoid creating the
726 # intervention to evolve. We can't rely on commit() to avoid creating the
726 # un-needed revision because the extra field added below produces a new hash
727 # un-needed revision because the extra field added below produces a new hash
727 # regardless of file content changes.
728 # regardless of file content changes.
728 if (
729 if (
729 not filedata
730 not filedata
730 and p1ctx.node() not in replacements
731 and p1ctx.node() not in replacements
731 and p2ctx.node() not in replacements
732 and p2ctx.node() not in replacements
732 ):
733 ):
733 return
734 return
734
735
735 extra = ctx.extra().copy()
736 extra = ctx.extra().copy()
736 extra[b'fix_source'] = ctx.hex()
737 extra[b'fix_source'] = ctx.hex()
737
738
738 wctx = context.overlayworkingctx(repo)
739 wctx = context.overlayworkingctx(repo)
739 wctx.setbase(repo[newp1node])
740 wctx.setbase(repo[newp1node])
740 merge.revert_to(ctx, wc=wctx)
741 merge.revert_to(ctx, wc=wctx)
741 copies.graftcopies(wctx, ctx, ctx.p1())
742 copies.graftcopies(wctx, ctx, ctx.p1())
742
743
743 for path in filedata.keys():
744 for path in filedata.keys():
744 fctx = ctx[path]
745 fctx = ctx[path]
745 copysource = fctx.copysource()
746 copysource = fctx.copysource()
746 wctx.write(path, filedata[path], flags=fctx.flags())
747 wctx.write(path, filedata[path], flags=fctx.flags())
747 if copysource:
748 if copysource:
748 wctx.markcopied(path, copysource)
749 wctx.markcopied(path, copysource)
749
750
750 memctx = wctx.tomemctx(
751 memctx = wctx.tomemctx(
751 text=ctx.description(),
752 text=ctx.description(),
752 branch=ctx.branch(),
753 branch=ctx.branch(),
753 extra=extra,
754 extra=extra,
754 date=ctx.date(),
755 date=ctx.date(),
755 parents=(newp1node, newp2node),
756 parents=(newp1node, newp2node),
756 user=ctx.user(),
757 user=ctx.user(),
757 )
758 )
758
759
759 sucnode = memctx.commit()
760 sucnode = memctx.commit()
760 prenode = ctx.node()
761 prenode = ctx.node()
761 if prenode == sucnode:
762 if prenode == sucnode:
762 ui.debug(b'node %s already existed\n' % (ctx.hex()))
763 ui.debug(b'node %s already existed\n' % (ctx.hex()))
763 else:
764 else:
764 replacements[ctx.node()] = sucnode
765 replacements[ctx.node()] = sucnode
765
766
766
767
767 def getfixers(ui):
768 def getfixers(ui):
768 """Returns a map of configured fixer tools indexed by their names
769 """Returns a map of configured fixer tools indexed by their names
769
770
770 Each value is a Fixer object with methods that implement the behavior of the
771 Each value is a Fixer object with methods that implement the behavior of the
771 fixer's config suboptions. Does not validate the config values.
772 fixer's config suboptions. Does not validate the config values.
772 """
773 """
773 fixers = {}
774 fixers = {}
774 for name in fixernames(ui):
775 for name in fixernames(ui):
775 enabled = ui.configbool(b'fix', name + b':enabled')
776 enabled = ui.configbool(b'fix', name + b':enabled')
776 command = ui.config(b'fix', name + b':command')
777 command = ui.config(b'fix', name + b':command')
777 pattern = ui.config(b'fix', name + b':pattern')
778 pattern = ui.config(b'fix', name + b':pattern')
778 linerange = ui.config(b'fix', name + b':linerange')
779 linerange = ui.config(b'fix', name + b':linerange')
779 priority = ui.configint(b'fix', name + b':priority')
780 priority = ui.configint(b'fix', name + b':priority')
780 metadata = ui.configbool(b'fix', name + b':metadata')
781 metadata = ui.configbool(b'fix', name + b':metadata')
781 skipclean = ui.configbool(b'fix', name + b':skipclean')
782 skipclean = ui.configbool(b'fix', name + b':skipclean')
782 # Don't use a fixer if it has no pattern configured. It would be
783 # Don't use a fixer if it has no pattern configured. It would be
783 # dangerous to let it affect all files. It would be pointless to let it
784 # dangerous to let it affect all files. It would be pointless to let it
784 # affect no files. There is no reasonable subset of files to use as the
785 # affect no files. There is no reasonable subset of files to use as the
785 # default.
786 # default.
786 if command is None:
787 if command is None:
787 ui.warn(
788 ui.warn(
788 _(b'fixer tool has no command configuration: %s\n') % (name,)
789 _(b'fixer tool has no command configuration: %s\n') % (name,)
789 )
790 )
790 elif pattern is None:
791 elif pattern is None:
791 ui.warn(
792 ui.warn(
792 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
793 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
793 )
794 )
794 elif not enabled:
795 elif not enabled:
795 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
796 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
796 else:
797 else:
797 fixers[name] = Fixer(
798 fixers[name] = Fixer(
798 command, pattern, linerange, priority, metadata, skipclean
799 command, pattern, linerange, priority, metadata, skipclean
799 )
800 )
800 return collections.OrderedDict(
801 return collections.OrderedDict(
801 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
802 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
802 )
803 )
803
804
804
805
805 def fixernames(ui):
806 def fixernames(ui):
806 """Returns the names of [fix] config options that have suboptions"""
807 """Returns the names of [fix] config options that have suboptions"""
807 names = set()
808 names = set()
808 for k, v in ui.configitems(b'fix'):
809 for k, v in ui.configitems(b'fix'):
809 if b':' in k:
810 if b':' in k:
810 names.add(k.split(b':', 1)[0])
811 names.add(k.split(b':', 1)[0])
811 return names
812 return names
812
813
813
814
814 class Fixer(object):
815 class Fixer(object):
815 """Wraps the raw config values for a fixer with methods"""
816 """Wraps the raw config values for a fixer with methods"""
816
817
817 def __init__(
818 def __init__(
818 self, command, pattern, linerange, priority, metadata, skipclean
819 self, command, pattern, linerange, priority, metadata, skipclean
819 ):
820 ):
820 self._command = command
821 self._command = command
821 self._pattern = pattern
822 self._pattern = pattern
822 self._linerange = linerange
823 self._linerange = linerange
823 self._priority = priority
824 self._priority = priority
824 self._metadata = metadata
825 self._metadata = metadata
825 self._skipclean = skipclean
826 self._skipclean = skipclean
826
827
827 def affects(self, opts, fixctx, path):
828 def affects(self, opts, fixctx, path):
828 """Should this fixer run on the file at the given path and context?"""
829 """Should this fixer run on the file at the given path and context?"""
829 repo = fixctx.repo()
830 repo = fixctx.repo()
830 matcher = matchmod.match(
831 matcher = matchmod.match(
831 repo.root, repo.root, [self._pattern], ctx=fixctx
832 repo.root, repo.root, [self._pattern], ctx=fixctx
832 )
833 )
833 return matcher(path)
834 return matcher(path)
834
835
835 def shouldoutputmetadata(self):
836 def shouldoutputmetadata(self):
836 """Should the stdout of this fixer start with JSON and a null byte?"""
837 """Should the stdout of this fixer start with JSON and a null byte?"""
837 return self._metadata
838 return self._metadata
838
839
839 def command(self, ui, path, ranges):
840 def command(self, ui, path, ranges):
840 """A shell command to use to invoke this fixer on the given file/lines
841 """A shell command to use to invoke this fixer on the given file/lines
841
842
842 May return None if there is no appropriate command to run for the given
843 May return None if there is no appropriate command to run for the given
843 parameters.
844 parameters.
844 """
845 """
845 expand = cmdutil.rendercommandtemplate
846 expand = cmdutil.rendercommandtemplate
846 parts = [
847 parts = [
847 expand(
848 expand(
848 ui,
849 ui,
849 self._command,
850 self._command,
850 {b'rootpath': path, b'basename': os.path.basename(path)},
851 {b'rootpath': path, b'basename': os.path.basename(path)},
851 )
852 )
852 ]
853 ]
853 if self._linerange:
854 if self._linerange:
854 if self._skipclean and not ranges:
855 if self._skipclean and not ranges:
855 # No line ranges to fix, so don't run the fixer.
856 # No line ranges to fix, so don't run the fixer.
856 return None
857 return None
857 for first, last in ranges:
858 for first, last in ranges:
858 parts.append(
859 parts.append(
859 expand(
860 expand(
860 ui, self._linerange, {b'first': first, b'last': last}
861 ui, self._linerange, {b'first': first, b'last': last}
861 )
862 )
862 )
863 )
863 return b' '.join(parts)
864 return b' '.join(parts)
General Comments 0
You need to be logged in to leave comments. Login now