##// END OF EJS Templates
fix: use cmdutil.check_at_most_one_arg()...
Martin von Zweigbergk -
r44349:dda49ec2 default
parent child Browse files
Show More
@@ -1,882 +1,881 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:pattern=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. Any output on standard error
22 fixed file content is expected on standard output. Any output on standard error
23 will be displayed as a warning. If the exit status is not zero, the file will
23 will be displayed as a warning. If the exit status is not zero, the file will
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
25 status but no standard error output. Some values may be substituted into the
26 command::
26 command::
27
27
28 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
29 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
30
30
31 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
32 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
33 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
34 substituted into the command::
34 substituted into the command::
35
35
36 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
37 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
38
38
39 Deleted sections of a file will be ignored by :linerange, because there is no
39 Deleted sections of a file will be ignored by :linerange, because there is no
40 corresponding line range in the version being fixed.
40 corresponding line range in the version being fixed.
41
41
42 By default, tools that set :linerange will only be executed if there is at least
42 By default, tools that set :linerange will only be executed if there is at least
43 one changed line range. This is meant to prevent accidents like running a code
43 one changed line range. This is meant to prevent accidents like running a code
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 to false.
46 to false.
47
47
48 The :pattern suboption determines which files will be passed through each
48 The :pattern suboption determines which files will be passed through each
49 configured tool. See :hg:`help patterns` for possible values. However, all
49 configured tool. See :hg:`help patterns` for possible values. However, all
50 patterns are relative to the repo root, even if that text says they are relative
50 patterns are relative to the repo root, even if that text says they are relative
51 to the current working directory. If there are file arguments to :hg:`fix`, the
51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 intersection of these patterns is used.
52 intersection of these patterns is used.
53
53
54 There is also a configurable limit for the maximum size of file that will be
54 There is also a configurable limit for the maximum size of file that will be
55 processed by :hg:`fix`::
55 processed by :hg:`fix`::
56
56
57 [fix]
57 [fix]
58 maxfilesize = 2MB
58 maxfilesize = 2MB
59
59
60 Normally, execution of configured tools will continue after a failure (indicated
60 Normally, execution of configured tools will continue after a failure (indicated
61 by a non-zero exit status). It can also be configured to abort after the first
61 by a non-zero exit status). It can also be configured to abort after the first
62 such failure, so that no files will be affected if any tool fails. This abort
62 such failure, so that no files will be affected if any tool fails. This abort
63 will also cause :hg:`fix` to exit with a non-zero status::
63 will also cause :hg:`fix` to exit with a non-zero status::
64
64
65 [fix]
65 [fix]
66 failure = abort
66 failure = abort
67
67
68 When multiple tools are configured to affect a file, they execute in an order
68 When multiple tools are configured to affect a file, they execute in an order
69 defined by the :priority suboption. The priority suboption has a default value
69 defined by the :priority suboption. The priority suboption has a default value
70 of zero for each tool. Tools are executed in order of descending priority. The
70 of zero for each tool. Tools are executed in order of descending priority. The
71 execution order of tools with equal priority is unspecified. For example, you
71 execution order of tools with equal priority is unspecified. For example, you
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 in a text file by ensuring that 'sort' runs before 'head'::
73 in a text file by ensuring that 'sort' runs before 'head'::
74
74
75 [fix]
75 [fix]
76 sort:command = sort -n
76 sort:command = sort -n
77 head:command = head -n 10
77 head:command = head -n 10
78 sort:pattern = numbers.txt
78 sort:pattern = numbers.txt
79 head:pattern = numbers.txt
79 head:pattern = numbers.txt
80 sort:priority = 2
80 sort:priority = 2
81 head:priority = 1
81 head:priority = 1
82
82
83 To account for changes made by each tool, the line numbers used for incremental
83 To account for changes made by each tool, the line numbers used for incremental
84 formatting are recomputed before executing the next tool. So, each tool may see
84 formatting are recomputed before executing the next tool. So, each tool may see
85 different values for the arguments added by the :linerange suboption.
85 different values for the arguments added by the :linerange suboption.
86
86
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 content. The metadata must be placed before the file content on stdout,
88 content. The metadata must be placed before the file content on stdout,
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 is expected to produce this metadata encoding if and only if the :metadata
91 is expected to produce this metadata encoding if and only if the :metadata
92 suboption is true::
92 suboption is true::
93
93
94 [fix]
94 [fix]
95 tool:command = tool --prepend-json-metadata
95 tool:command = tool --prepend-json-metadata
96 tool:metadata = true
96 tool:metadata = true
97
97
98 The metadata values are passed to hooks, which can be used to print summaries or
98 The metadata values are passed to hooks, which can be used to print summaries or
99 perform other post-fixing work. The supported hooks are::
99 perform other post-fixing work. The supported hooks are::
100
100
101 "postfixfile"
101 "postfixfile"
102 Run once for each file in each revision where any fixer tools made changes
102 Run once for each file in each revision where any fixer tools made changes
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 tools that affected the file. Fixer tools that didn't affect the file have a
105 tools that affected the file. Fixer tools that didn't affect the file have a
106 valueof None. Only fixer tools that executed are present in the metadata.
106 valueof None. Only fixer tools that executed are present in the metadata.
107
107
108 "postfix"
108 "postfix"
109 Run once after all files and revisions have been handled. Provides
109 Run once after all files and revisions have been handled. Provides
110 "$HG_REPLACEMENTS" with information about what revisions were created and
110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 files in the working copy were updated. Provides a list "$HG_METADATA"
112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 mapping fixer tool names to lists of metadata values returned from
113 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
114 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
115 previously passed to the "postfixfile" hook.
116
116
117 Fixer tools are run the in repository's root directory. This allows them to read
117 Fixer tools are run the in repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125 from __future__ import absolute_import
125 from __future__ import absolute_import
126
126
127 import collections
127 import collections
128 import itertools
128 import itertools
129 import os
129 import os
130 import re
130 import re
131 import subprocess
131 import subprocess
132
132
133 from mercurial.i18n import _
133 from mercurial.i18n import _
134 from mercurial.node import nullrev
134 from mercurial.node import nullrev
135 from mercurial.node import wdirrev
135 from mercurial.node import wdirrev
136
136
137 from mercurial.utils import procutil
137 from mercurial.utils import procutil
138
138
139 from mercurial import (
139 from mercurial import (
140 cmdutil,
140 cmdutil,
141 context,
141 context,
142 copies,
142 copies,
143 error,
143 error,
144 match as matchmod,
144 match as matchmod,
145 mdiff,
145 mdiff,
146 merge,
146 merge,
147 obsolete,
147 obsolete,
148 pycompat,
148 pycompat,
149 registrar,
149 registrar,
150 scmutil,
150 scmutil,
151 util,
151 util,
152 worker,
152 worker,
153 )
153 )
154
154
155 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
155 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
156 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
156 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
157 # be specifying the version(s) of Mercurial they are tested with, or
157 # be specifying the version(s) of Mercurial they are tested with, or
158 # leave the attribute unspecified.
158 # leave the attribute unspecified.
159 testedwith = b'ships-with-hg-core'
159 testedwith = b'ships-with-hg-core'
160
160
161 cmdtable = {}
161 cmdtable = {}
162 command = registrar.command(cmdtable)
162 command = registrar.command(cmdtable)
163
163
164 configtable = {}
164 configtable = {}
165 configitem = registrar.configitem(configtable)
165 configitem = registrar.configitem(configtable)
166
166
167 # Register the suboptions allowed for each configured fixer, and default values.
167 # Register the suboptions allowed for each configured fixer, and default values.
168 FIXER_ATTRS = {
168 FIXER_ATTRS = {
169 b'command': None,
169 b'command': None,
170 b'linerange': None,
170 b'linerange': None,
171 b'pattern': None,
171 b'pattern': None,
172 b'priority': 0,
172 b'priority': 0,
173 b'metadata': False,
173 b'metadata': False,
174 b'skipclean': True,
174 b'skipclean': True,
175 b'enabled': True,
175 b'enabled': True,
176 }
176 }
177
177
178 for key, default in FIXER_ATTRS.items():
178 for key, default in FIXER_ATTRS.items():
179 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
179 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
180
180
181 # A good default size allows most source code files to be fixed, but avoids
181 # A good default size allows most source code files to be fixed, but avoids
182 # letting fixer tools choke on huge inputs, which could be surprising to the
182 # letting fixer tools choke on huge inputs, which could be surprising to the
183 # user.
183 # user.
184 configitem(b'fix', b'maxfilesize', default=b'2MB')
184 configitem(b'fix', b'maxfilesize', default=b'2MB')
185
185
186 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
186 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
187 # This helps users do shell scripts that stop when a fixer tool signals a
187 # This helps users do shell scripts that stop when a fixer tool signals a
188 # problem.
188 # problem.
189 configitem(b'fix', b'failure', default=b'continue')
189 configitem(b'fix', b'failure', default=b'continue')
190
190
191
191
192 def checktoolfailureaction(ui, message, hint=None):
192 def checktoolfailureaction(ui, message, hint=None):
193 """Abort with 'message' if fix.failure=abort"""
193 """Abort with 'message' if fix.failure=abort"""
194 action = ui.config(b'fix', b'failure')
194 action = ui.config(b'fix', b'failure')
195 if action not in (b'continue', b'abort'):
195 if action not in (b'continue', b'abort'):
196 raise error.Abort(
196 raise error.Abort(
197 _(b'unknown fix.failure action: %s') % (action,),
197 _(b'unknown fix.failure action: %s') % (action,),
198 hint=_(b'use "continue" or "abort"'),
198 hint=_(b'use "continue" or "abort"'),
199 )
199 )
200 if action == b'abort':
200 if action == b'abort':
201 raise error.Abort(message, hint=hint)
201 raise error.Abort(message, hint=hint)
202
202
203
203
204 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
204 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
205 baseopt = (
205 baseopt = (
206 b'',
206 b'',
207 b'base',
207 b'base',
208 [],
208 [],
209 _(
209 _(
210 b'revisions to diff against (overrides automatic '
210 b'revisions to diff against (overrides automatic '
211 b'selection, and applies to every revision being '
211 b'selection, and applies to every revision being '
212 b'fixed)'
212 b'fixed)'
213 ),
213 ),
214 _(b'REV'),
214 _(b'REV'),
215 )
215 )
216 revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
216 revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
217 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
217 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
218 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
218 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
219 usage = _(b'[OPTION]... [FILE]...')
219 usage = _(b'[OPTION]... [FILE]...')
220
220
221
221
222 @command(
222 @command(
223 b'fix',
223 b'fix',
224 [allopt, baseopt, revopt, wdiropt, wholeopt],
224 [allopt, baseopt, revopt, wdiropt, wholeopt],
225 usage,
225 usage,
226 helpcategory=command.CATEGORY_FILE_CONTENTS,
226 helpcategory=command.CATEGORY_FILE_CONTENTS,
227 )
227 )
228 def fix(ui, repo, *pats, **opts):
228 def fix(ui, repo, *pats, **opts):
229 """rewrite file content in changesets or working directory
229 """rewrite file content in changesets or working directory
230
230
231 Runs any configured tools to fix the content of files. Only affects files
231 Runs any configured tools to fix the content of files. Only affects files
232 with changes, unless file arguments are provided. Only affects changed lines
232 with changes, unless file arguments are provided. Only affects changed lines
233 of files, unless the --whole flag is used. Some tools may always affect the
233 of files, unless the --whole flag is used. Some tools may always affect the
234 whole file regardless of --whole.
234 whole file regardless of --whole.
235
235
236 If revisions are specified with --rev, those revisions will be checked, and
236 If revisions are specified with --rev, those revisions will be checked, and
237 they may be replaced with new revisions that have fixed file content. It is
237 they may be replaced with new revisions that have fixed file content. It is
238 desirable to specify all descendants of each specified revision, so that the
238 desirable to specify all descendants of each specified revision, so that the
239 fixes propagate to the descendants. If all descendants are fixed at the same
239 fixes propagate to the descendants. If all descendants are fixed at the same
240 time, no merging, rebasing, or evolution will be required.
240 time, no merging, rebasing, or evolution will be required.
241
241
242 If --working-dir is used, files with uncommitted changes in the working copy
242 If --working-dir is used, files with uncommitted changes in the working copy
243 will be fixed. If the checked-out revision is also fixed, the working
243 will be fixed. If the checked-out revision is also fixed, the working
244 directory will update to the replacement revision.
244 directory will update to the replacement revision.
245
245
246 When determining what lines of each file to fix at each revision, the whole
246 When determining what lines of each file to fix at each revision, the whole
247 set of revisions being fixed is considered, so that fixes to earlier
247 set of revisions being fixed is considered, so that fixes to earlier
248 revisions are not forgotten in later ones. The --base flag can be used to
248 revisions are not forgotten in later ones. The --base flag can be used to
249 override this default behavior, though it is not usually desirable to do so.
249 override this default behavior, though it is not usually desirable to do so.
250 """
250 """
251 opts = pycompat.byteskwargs(opts)
251 opts = pycompat.byteskwargs(opts)
252 cmdutil.check_at_most_one_arg(opts, b'all', b'rev')
252 if opts[b'all']:
253 if opts[b'all']:
253 if opts[b'rev']:
254 raise error.Abort(_(b'cannot specify both "--rev" and "--all"'))
255 opts[b'rev'] = [b'not public() and not obsolete()']
254 opts[b'rev'] = [b'not public() and not obsolete()']
256 opts[b'working_dir'] = True
255 opts[b'working_dir'] = True
257 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
256 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
258 revstofix = getrevstofix(ui, repo, opts)
257 revstofix = getrevstofix(ui, repo, opts)
259 basectxs = getbasectxs(repo, opts, revstofix)
258 basectxs = getbasectxs(repo, opts, revstofix)
260 workqueue, numitems = getworkqueue(
259 workqueue, numitems = getworkqueue(
261 ui, repo, pats, opts, revstofix, basectxs
260 ui, repo, pats, opts, revstofix, basectxs
262 )
261 )
263 fixers = getfixers(ui)
262 fixers = getfixers(ui)
264
263
265 # There are no data dependencies between the workers fixing each file
264 # There are no data dependencies between the workers fixing each file
266 # revision, so we can use all available parallelism.
265 # revision, so we can use all available parallelism.
267 def getfixes(items):
266 def getfixes(items):
268 for rev, path in items:
267 for rev, path in items:
269 ctx = repo[rev]
268 ctx = repo[rev]
270 olddata = ctx[path].data()
269 olddata = ctx[path].data()
271 metadata, newdata = fixfile(
270 metadata, newdata = fixfile(
272 ui, repo, opts, fixers, ctx, path, basectxs[rev]
271 ui, repo, opts, fixers, ctx, path, basectxs[rev]
273 )
272 )
274 # Don't waste memory/time passing unchanged content back, but
273 # Don't waste memory/time passing unchanged content back, but
275 # produce one result per item either way.
274 # produce one result per item either way.
276 yield (
275 yield (
277 rev,
276 rev,
278 path,
277 path,
279 metadata,
278 metadata,
280 newdata if newdata != olddata else None,
279 newdata if newdata != olddata else None,
281 )
280 )
282
281
283 results = worker.worker(
282 results = worker.worker(
284 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
283 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
285 )
284 )
286
285
287 # We have to hold on to the data for each successor revision in memory
286 # We have to hold on to the data for each successor revision in memory
288 # until all its parents are committed. We ensure this by committing and
287 # until all its parents are committed. We ensure this by committing and
289 # freeing memory for the revisions in some topological order. This
288 # freeing memory for the revisions in some topological order. This
290 # leaves a little bit of memory efficiency on the table, but also makes
289 # leaves a little bit of memory efficiency on the table, but also makes
291 # the tests deterministic. It might also be considered a feature since
290 # the tests deterministic. It might also be considered a feature since
292 # it makes the results more easily reproducible.
291 # it makes the results more easily reproducible.
293 filedata = collections.defaultdict(dict)
292 filedata = collections.defaultdict(dict)
294 aggregatemetadata = collections.defaultdict(list)
293 aggregatemetadata = collections.defaultdict(list)
295 replacements = {}
294 replacements = {}
296 wdirwritten = False
295 wdirwritten = False
297 commitorder = sorted(revstofix, reverse=True)
296 commitorder = sorted(revstofix, reverse=True)
298 with ui.makeprogress(
297 with ui.makeprogress(
299 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
298 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
300 ) as progress:
299 ) as progress:
301 for rev, path, filerevmetadata, newdata in results:
300 for rev, path, filerevmetadata, newdata in results:
302 progress.increment(item=path)
301 progress.increment(item=path)
303 for fixername, fixermetadata in filerevmetadata.items():
302 for fixername, fixermetadata in filerevmetadata.items():
304 aggregatemetadata[fixername].append(fixermetadata)
303 aggregatemetadata[fixername].append(fixermetadata)
305 if newdata is not None:
304 if newdata is not None:
306 filedata[rev][path] = newdata
305 filedata[rev][path] = newdata
307 hookargs = {
306 hookargs = {
308 b'rev': rev,
307 b'rev': rev,
309 b'path': path,
308 b'path': path,
310 b'metadata': filerevmetadata,
309 b'metadata': filerevmetadata,
311 }
310 }
312 repo.hook(
311 repo.hook(
313 b'postfixfile',
312 b'postfixfile',
314 throw=False,
313 throw=False,
315 **pycompat.strkwargs(hookargs)
314 **pycompat.strkwargs(hookargs)
316 )
315 )
317 numitems[rev] -= 1
316 numitems[rev] -= 1
318 # Apply the fixes for this and any other revisions that are
317 # Apply the fixes for this and any other revisions that are
319 # ready and sitting at the front of the queue. Using a loop here
318 # ready and sitting at the front of the queue. Using a loop here
320 # prevents the queue from being blocked by the first revision to
319 # prevents the queue from being blocked by the first revision to
321 # be ready out of order.
320 # be ready out of order.
322 while commitorder and not numitems[commitorder[-1]]:
321 while commitorder and not numitems[commitorder[-1]]:
323 rev = commitorder.pop()
322 rev = commitorder.pop()
324 ctx = repo[rev]
323 ctx = repo[rev]
325 if rev == wdirrev:
324 if rev == wdirrev:
326 writeworkingdir(repo, ctx, filedata[rev], replacements)
325 writeworkingdir(repo, ctx, filedata[rev], replacements)
327 wdirwritten = bool(filedata[rev])
326 wdirwritten = bool(filedata[rev])
328 else:
327 else:
329 replacerev(ui, repo, ctx, filedata[rev], replacements)
328 replacerev(ui, repo, ctx, filedata[rev], replacements)
330 del filedata[rev]
329 del filedata[rev]
331
330
332 cleanup(repo, replacements, wdirwritten)
331 cleanup(repo, replacements, wdirwritten)
333 hookargs = {
332 hookargs = {
334 b'replacements': replacements,
333 b'replacements': replacements,
335 b'wdirwritten': wdirwritten,
334 b'wdirwritten': wdirwritten,
336 b'metadata': aggregatemetadata,
335 b'metadata': aggregatemetadata,
337 }
336 }
338 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
337 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
339
338
340
339
341 def cleanup(repo, replacements, wdirwritten):
340 def cleanup(repo, replacements, wdirwritten):
342 """Calls scmutil.cleanupnodes() with the given replacements.
341 """Calls scmutil.cleanupnodes() with the given replacements.
343
342
344 "replacements" is a dict from nodeid to nodeid, with one key and one value
343 "replacements" is a dict from nodeid to nodeid, with one key and one value
345 for every revision that was affected by fixing. This is slightly different
344 for every revision that was affected by fixing. This is slightly different
346 from cleanupnodes().
345 from cleanupnodes().
347
346
348 "wdirwritten" is a bool which tells whether the working copy was affected by
347 "wdirwritten" is a bool which tells whether the working copy was affected by
349 fixing, since it has no entry in "replacements".
348 fixing, since it has no entry in "replacements".
350
349
351 Useful as a hook point for extending "hg fix" with output summarizing the
350 Useful as a hook point for extending "hg fix" with output summarizing the
352 effects of the command, though we choose not to output anything here.
351 effects of the command, though we choose not to output anything here.
353 """
352 """
354 replacements = {
353 replacements = {
355 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
354 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
356 }
355 }
357 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
356 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
358
357
359
358
360 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
359 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
361 """"Constructs the list of files to be fixed at specific revisions
360 """"Constructs the list of files to be fixed at specific revisions
362
361
363 It is up to the caller how to consume the work items, and the only
362 It is up to the caller how to consume the work items, and the only
364 dependence between them is that replacement revisions must be committed in
363 dependence between them is that replacement revisions must be committed in
365 topological order. Each work item represents a file in the working copy or
364 topological order. Each work item represents a file in the working copy or
366 in some revision that should be fixed and written back to the working copy
365 in some revision that should be fixed and written back to the working copy
367 or into a replacement revision.
366 or into a replacement revision.
368
367
369 Work items for the same revision are grouped together, so that a worker
368 Work items for the same revision are grouped together, so that a worker
370 pool starting with the first N items in parallel is likely to finish the
369 pool starting with the first N items in parallel is likely to finish the
371 first revision's work before other revisions. This can allow us to write
370 first revision's work before other revisions. This can allow us to write
372 the result to disk and reduce memory footprint. At time of writing, the
371 the result to disk and reduce memory footprint. At time of writing, the
373 partition strategy in worker.py seems favorable to this. We also sort the
372 partition strategy in worker.py seems favorable to this. We also sort the
374 items by ascending revision number to match the order in which we commit
373 items by ascending revision number to match the order in which we commit
375 the fixes later.
374 the fixes later.
376 """
375 """
377 workqueue = []
376 workqueue = []
378 numitems = collections.defaultdict(int)
377 numitems = collections.defaultdict(int)
379 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
378 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
380 for rev in sorted(revstofix):
379 for rev in sorted(revstofix):
381 fixctx = repo[rev]
380 fixctx = repo[rev]
382 match = scmutil.match(fixctx, pats, opts)
381 match = scmutil.match(fixctx, pats, opts)
383 for path in sorted(
382 for path in sorted(
384 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
383 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
385 ):
384 ):
386 fctx = fixctx[path]
385 fctx = fixctx[path]
387 if fctx.islink():
386 if fctx.islink():
388 continue
387 continue
389 if fctx.size() > maxfilesize:
388 if fctx.size() > maxfilesize:
390 ui.warn(
389 ui.warn(
391 _(b'ignoring file larger than %s: %s\n')
390 _(b'ignoring file larger than %s: %s\n')
392 % (util.bytecount(maxfilesize), path)
391 % (util.bytecount(maxfilesize), path)
393 )
392 )
394 continue
393 continue
395 workqueue.append((rev, path))
394 workqueue.append((rev, path))
396 numitems[rev] += 1
395 numitems[rev] += 1
397 return workqueue, numitems
396 return workqueue, numitems
398
397
399
398
400 def getrevstofix(ui, repo, opts):
399 def getrevstofix(ui, repo, opts):
401 """Returns the set of revision numbers that should be fixed"""
400 """Returns the set of revision numbers that should be fixed"""
402 revs = set(scmutil.revrange(repo, opts[b'rev']))
401 revs = set(scmutil.revrange(repo, opts[b'rev']))
403 for rev in revs:
402 for rev in revs:
404 checkfixablectx(ui, repo, repo[rev])
403 checkfixablectx(ui, repo, repo[rev])
405 if revs:
404 if revs:
406 cmdutil.checkunfinished(repo)
405 cmdutil.checkunfinished(repo)
407 checknodescendants(repo, revs)
406 checknodescendants(repo, revs)
408 if opts.get(b'working_dir'):
407 if opts.get(b'working_dir'):
409 revs.add(wdirrev)
408 revs.add(wdirrev)
410 if list(merge.mergestate.read(repo).unresolved()):
409 if list(merge.mergestate.read(repo).unresolved()):
411 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
410 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
412 if not revs:
411 if not revs:
413 raise error.Abort(
412 raise error.Abort(
414 b'no changesets specified', hint=b'use --rev or --working-dir'
413 b'no changesets specified', hint=b'use --rev or --working-dir'
415 )
414 )
416 return revs
415 return revs
417
416
418
417
419 def checknodescendants(repo, revs):
418 def checknodescendants(repo, revs):
420 if not obsolete.isenabled(repo, obsolete.allowunstableopt) and repo.revs(
419 if not obsolete.isenabled(repo, obsolete.allowunstableopt) and repo.revs(
421 b'(%ld::) - (%ld)', revs, revs
420 b'(%ld::) - (%ld)', revs, revs
422 ):
421 ):
423 raise error.Abort(
422 raise error.Abort(
424 _(b'can only fix a changeset together with all its descendants')
423 _(b'can only fix a changeset together with all its descendants')
425 )
424 )
426
425
427
426
428 def checkfixablectx(ui, repo, ctx):
427 def checkfixablectx(ui, repo, ctx):
429 """Aborts if the revision shouldn't be replaced with a fixed one."""
428 """Aborts if the revision shouldn't be replaced with a fixed one."""
430 if not ctx.mutable():
429 if not ctx.mutable():
431 raise error.Abort(
430 raise error.Abort(
432 b'can\'t fix immutable changeset %s'
431 b'can\'t fix immutable changeset %s'
433 % (scmutil.formatchangeid(ctx),)
432 % (scmutil.formatchangeid(ctx),)
434 )
433 )
435 if ctx.obsolete():
434 if ctx.obsolete():
436 # It would be better to actually check if the revision has a successor.
435 # It would be better to actually check if the revision has a successor.
437 allowdivergence = ui.configbool(
436 allowdivergence = ui.configbool(
438 b'experimental', b'evolution.allowdivergence'
437 b'experimental', b'evolution.allowdivergence'
439 )
438 )
440 if not allowdivergence:
439 if not allowdivergence:
441 raise error.Abort(
440 raise error.Abort(
442 b'fixing obsolete revision could cause divergence'
441 b'fixing obsolete revision could cause divergence'
443 )
442 )
444
443
445
444
446 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
445 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
447 """Returns the set of files that should be fixed in a context
446 """Returns the set of files that should be fixed in a context
448
447
449 The result depends on the base contexts; we include any file that has
448 The result depends on the base contexts; we include any file that has
450 changed relative to any of the base contexts. Base contexts should be
449 changed relative to any of the base contexts. Base contexts should be
451 ancestors of the context being fixed.
450 ancestors of the context being fixed.
452 """
451 """
453 files = set()
452 files = set()
454 for basectx in basectxs:
453 for basectx in basectxs:
455 stat = basectx.status(
454 stat = basectx.status(
456 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
455 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
457 )
456 )
458 files.update(
457 files.update(
459 set(
458 set(
460 itertools.chain(
459 itertools.chain(
461 stat.added, stat.modified, stat.clean, stat.unknown
460 stat.added, stat.modified, stat.clean, stat.unknown
462 )
461 )
463 )
462 )
464 )
463 )
465 return files
464 return files
466
465
467
466
468 def lineranges(opts, path, basectxs, fixctx, content2):
467 def lineranges(opts, path, basectxs, fixctx, content2):
469 """Returns the set of line ranges that should be fixed in a file
468 """Returns the set of line ranges that should be fixed in a file
470
469
471 Of the form [(10, 20), (30, 40)].
470 Of the form [(10, 20), (30, 40)].
472
471
473 This depends on the given base contexts; we must consider lines that have
472 This depends on the given base contexts; we must consider lines that have
474 changed versus any of the base contexts, and whether the file has been
473 changed versus any of the base contexts, and whether the file has been
475 renamed versus any of them.
474 renamed versus any of them.
476
475
477 Another way to understand this is that we exclude line ranges that are
476 Another way to understand this is that we exclude line ranges that are
478 common to the file in all base contexts.
477 common to the file in all base contexts.
479 """
478 """
480 if opts.get(b'whole'):
479 if opts.get(b'whole'):
481 # Return a range containing all lines. Rely on the diff implementation's
480 # Return a range containing all lines. Rely on the diff implementation's
482 # idea of how many lines are in the file, instead of reimplementing it.
481 # idea of how many lines are in the file, instead of reimplementing it.
483 return difflineranges(b'', content2)
482 return difflineranges(b'', content2)
484
483
485 rangeslist = []
484 rangeslist = []
486 for basectx in basectxs:
485 for basectx in basectxs:
487 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
486 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
488 if basepath in basectx:
487 if basepath in basectx:
489 content1 = basectx[basepath].data()
488 content1 = basectx[basepath].data()
490 else:
489 else:
491 content1 = b''
490 content1 = b''
492 rangeslist.extend(difflineranges(content1, content2))
491 rangeslist.extend(difflineranges(content1, content2))
493 return unionranges(rangeslist)
492 return unionranges(rangeslist)
494
493
495
494
496 def unionranges(rangeslist):
495 def unionranges(rangeslist):
497 """Return the union of some closed intervals
496 """Return the union of some closed intervals
498
497
499 >>> unionranges([])
498 >>> unionranges([])
500 []
499 []
501 >>> unionranges([(1, 100)])
500 >>> unionranges([(1, 100)])
502 [(1, 100)]
501 [(1, 100)]
503 >>> unionranges([(1, 100), (1, 100)])
502 >>> unionranges([(1, 100), (1, 100)])
504 [(1, 100)]
503 [(1, 100)]
505 >>> unionranges([(1, 100), (2, 100)])
504 >>> unionranges([(1, 100), (2, 100)])
506 [(1, 100)]
505 [(1, 100)]
507 >>> unionranges([(1, 99), (1, 100)])
506 >>> unionranges([(1, 99), (1, 100)])
508 [(1, 100)]
507 [(1, 100)]
509 >>> unionranges([(1, 100), (40, 60)])
508 >>> unionranges([(1, 100), (40, 60)])
510 [(1, 100)]
509 [(1, 100)]
511 >>> unionranges([(1, 49), (50, 100)])
510 >>> unionranges([(1, 49), (50, 100)])
512 [(1, 100)]
511 [(1, 100)]
513 >>> unionranges([(1, 48), (50, 100)])
512 >>> unionranges([(1, 48), (50, 100)])
514 [(1, 48), (50, 100)]
513 [(1, 48), (50, 100)]
515 >>> unionranges([(1, 2), (3, 4), (5, 6)])
514 >>> unionranges([(1, 2), (3, 4), (5, 6)])
516 [(1, 6)]
515 [(1, 6)]
517 """
516 """
518 rangeslist = sorted(set(rangeslist))
517 rangeslist = sorted(set(rangeslist))
519 unioned = []
518 unioned = []
520 if rangeslist:
519 if rangeslist:
521 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
520 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
522 for a, b in rangeslist:
521 for a, b in rangeslist:
523 c, d = unioned[-1]
522 c, d = unioned[-1]
524 if a > d + 1:
523 if a > d + 1:
525 unioned.append((a, b))
524 unioned.append((a, b))
526 else:
525 else:
527 unioned[-1] = (c, max(b, d))
526 unioned[-1] = (c, max(b, d))
528 return unioned
527 return unioned
529
528
530
529
531 def difflineranges(content1, content2):
530 def difflineranges(content1, content2):
532 """Return list of line number ranges in content2 that differ from content1.
531 """Return list of line number ranges in content2 that differ from content1.
533
532
534 Line numbers are 1-based. The numbers are the first and last line contained
533 Line numbers are 1-based. The numbers are the first and last line contained
535 in the range. Single-line ranges have the same line number for the first and
534 in the range. Single-line ranges have the same line number for the first and
536 last line. Excludes any empty ranges that result from lines that are only
535 last line. Excludes any empty ranges that result from lines that are only
537 present in content1. Relies on mdiff's idea of where the line endings are in
536 present in content1. Relies on mdiff's idea of where the line endings are in
538 the string.
537 the string.
539
538
540 >>> from mercurial import pycompat
539 >>> from mercurial import pycompat
541 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
540 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
542 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
541 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
543 >>> difflineranges2(b'', b'')
542 >>> difflineranges2(b'', b'')
544 []
543 []
545 >>> difflineranges2(b'a', b'')
544 >>> difflineranges2(b'a', b'')
546 []
545 []
547 >>> difflineranges2(b'', b'A')
546 >>> difflineranges2(b'', b'A')
548 [(1, 1)]
547 [(1, 1)]
549 >>> difflineranges2(b'a', b'a')
548 >>> difflineranges2(b'a', b'a')
550 []
549 []
551 >>> difflineranges2(b'a', b'A')
550 >>> difflineranges2(b'a', b'A')
552 [(1, 1)]
551 [(1, 1)]
553 >>> difflineranges2(b'ab', b'')
552 >>> difflineranges2(b'ab', b'')
554 []
553 []
555 >>> difflineranges2(b'', b'AB')
554 >>> difflineranges2(b'', b'AB')
556 [(1, 2)]
555 [(1, 2)]
557 >>> difflineranges2(b'abc', b'ac')
556 >>> difflineranges2(b'abc', b'ac')
558 []
557 []
559 >>> difflineranges2(b'ab', b'aCb')
558 >>> difflineranges2(b'ab', b'aCb')
560 [(2, 2)]
559 [(2, 2)]
561 >>> difflineranges2(b'abc', b'aBc')
560 >>> difflineranges2(b'abc', b'aBc')
562 [(2, 2)]
561 [(2, 2)]
563 >>> difflineranges2(b'ab', b'AB')
562 >>> difflineranges2(b'ab', b'AB')
564 [(1, 2)]
563 [(1, 2)]
565 >>> difflineranges2(b'abcde', b'aBcDe')
564 >>> difflineranges2(b'abcde', b'aBcDe')
566 [(2, 2), (4, 4)]
565 [(2, 2), (4, 4)]
567 >>> difflineranges2(b'abcde', b'aBCDe')
566 >>> difflineranges2(b'abcde', b'aBCDe')
568 [(2, 4)]
567 [(2, 4)]
569 """
568 """
570 ranges = []
569 ranges = []
571 for lines, kind in mdiff.allblocks(content1, content2):
570 for lines, kind in mdiff.allblocks(content1, content2):
572 firstline, lastline = lines[2:4]
571 firstline, lastline = lines[2:4]
573 if kind == b'!' and firstline != lastline:
572 if kind == b'!' and firstline != lastline:
574 ranges.append((firstline + 1, lastline))
573 ranges.append((firstline + 1, lastline))
575 return ranges
574 return ranges
576
575
577
576
578 def getbasectxs(repo, opts, revstofix):
577 def getbasectxs(repo, opts, revstofix):
579 """Returns a map of the base contexts for each revision
578 """Returns a map of the base contexts for each revision
580
579
581 The base contexts determine which lines are considered modified when we
580 The base contexts determine which lines are considered modified when we
582 attempt to fix just the modified lines in a file. It also determines which
581 attempt to fix just the modified lines in a file. It also determines which
583 files we attempt to fix, so it is important to compute this even when
582 files we attempt to fix, so it is important to compute this even when
584 --whole is used.
583 --whole is used.
585 """
584 """
586 # The --base flag overrides the usual logic, and we give every revision
585 # The --base flag overrides the usual logic, and we give every revision
587 # exactly the set of baserevs that the user specified.
586 # exactly the set of baserevs that the user specified.
588 if opts.get(b'base'):
587 if opts.get(b'base'):
589 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
588 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
590 if not baserevs:
589 if not baserevs:
591 baserevs = {nullrev}
590 baserevs = {nullrev}
592 basectxs = {repo[rev] for rev in baserevs}
591 basectxs = {repo[rev] for rev in baserevs}
593 return {rev: basectxs for rev in revstofix}
592 return {rev: basectxs for rev in revstofix}
594
593
595 # Proceed in topological order so that we can easily determine each
594 # Proceed in topological order so that we can easily determine each
596 # revision's baserevs by looking at its parents and their baserevs.
595 # revision's baserevs by looking at its parents and their baserevs.
597 basectxs = collections.defaultdict(set)
596 basectxs = collections.defaultdict(set)
598 for rev in sorted(revstofix):
597 for rev in sorted(revstofix):
599 ctx = repo[rev]
598 ctx = repo[rev]
600 for pctx in ctx.parents():
599 for pctx in ctx.parents():
601 if pctx.rev() in basectxs:
600 if pctx.rev() in basectxs:
602 basectxs[rev].update(basectxs[pctx.rev()])
601 basectxs[rev].update(basectxs[pctx.rev()])
603 else:
602 else:
604 basectxs[rev].add(pctx)
603 basectxs[rev].add(pctx)
605 return basectxs
604 return basectxs
606
605
607
606
608 def fixfile(ui, repo, opts, fixers, fixctx, path, basectxs):
607 def fixfile(ui, repo, opts, fixers, fixctx, path, basectxs):
609 """Run any configured fixers that should affect the file in this context
608 """Run any configured fixers that should affect the file in this context
610
609
611 Returns the file content that results from applying the fixers in some order
610 Returns the file content that results from applying the fixers in some order
612 starting with the file's content in the fixctx. Fixers that support line
611 starting with the file's content in the fixctx. Fixers that support line
613 ranges will affect lines that have changed relative to any of the basectxs
612 ranges will affect lines that have changed relative to any of the basectxs
614 (i.e. they will only avoid lines that are common to all basectxs).
613 (i.e. they will only avoid lines that are common to all basectxs).
615
614
616 A fixer tool's stdout will become the file's new content if and only if it
615 A fixer tool's stdout will become the file's new content if and only if it
617 exits with code zero. The fixer tool's working directory is the repository's
616 exits with code zero. The fixer tool's working directory is the repository's
618 root.
617 root.
619 """
618 """
620 metadata = {}
619 metadata = {}
621 newdata = fixctx[path].data()
620 newdata = fixctx[path].data()
622 for fixername, fixer in pycompat.iteritems(fixers):
621 for fixername, fixer in pycompat.iteritems(fixers):
623 if fixer.affects(opts, fixctx, path):
622 if fixer.affects(opts, fixctx, path):
624 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
623 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
625 command = fixer.command(ui, path, ranges)
624 command = fixer.command(ui, path, ranges)
626 if command is None:
625 if command is None:
627 continue
626 continue
628 ui.debug(b'subprocess: %s\n' % (command,))
627 ui.debug(b'subprocess: %s\n' % (command,))
629 proc = subprocess.Popen(
628 proc = subprocess.Popen(
630 procutil.tonativestr(command),
629 procutil.tonativestr(command),
631 shell=True,
630 shell=True,
632 cwd=procutil.tonativestr(repo.root),
631 cwd=procutil.tonativestr(repo.root),
633 stdin=subprocess.PIPE,
632 stdin=subprocess.PIPE,
634 stdout=subprocess.PIPE,
633 stdout=subprocess.PIPE,
635 stderr=subprocess.PIPE,
634 stderr=subprocess.PIPE,
636 )
635 )
637 stdout, stderr = proc.communicate(newdata)
636 stdout, stderr = proc.communicate(newdata)
638 if stderr:
637 if stderr:
639 showstderr(ui, fixctx.rev(), fixername, stderr)
638 showstderr(ui, fixctx.rev(), fixername, stderr)
640 newerdata = stdout
639 newerdata = stdout
641 if fixer.shouldoutputmetadata():
640 if fixer.shouldoutputmetadata():
642 try:
641 try:
643 metadatajson, newerdata = stdout.split(b'\0', 1)
642 metadatajson, newerdata = stdout.split(b'\0', 1)
644 metadata[fixername] = pycompat.json_loads(metadatajson)
643 metadata[fixername] = pycompat.json_loads(metadatajson)
645 except ValueError:
644 except ValueError:
646 ui.warn(
645 ui.warn(
647 _(b'ignored invalid output from fixer tool: %s\n')
646 _(b'ignored invalid output from fixer tool: %s\n')
648 % (fixername,)
647 % (fixername,)
649 )
648 )
650 continue
649 continue
651 else:
650 else:
652 metadata[fixername] = None
651 metadata[fixername] = None
653 if proc.returncode == 0:
652 if proc.returncode == 0:
654 newdata = newerdata
653 newdata = newerdata
655 else:
654 else:
656 if not stderr:
655 if not stderr:
657 message = _(b'exited with status %d\n') % (proc.returncode,)
656 message = _(b'exited with status %d\n') % (proc.returncode,)
658 showstderr(ui, fixctx.rev(), fixername, message)
657 showstderr(ui, fixctx.rev(), fixername, message)
659 checktoolfailureaction(
658 checktoolfailureaction(
660 ui,
659 ui,
661 _(b'no fixes will be applied'),
660 _(b'no fixes will be applied'),
662 hint=_(
661 hint=_(
663 b'use --config fix.failure=continue to apply any '
662 b'use --config fix.failure=continue to apply any '
664 b'successful fixes anyway'
663 b'successful fixes anyway'
665 ),
664 ),
666 )
665 )
667 return metadata, newdata
666 return metadata, newdata
668
667
669
668
670 def showstderr(ui, rev, fixername, stderr):
669 def showstderr(ui, rev, fixername, stderr):
671 """Writes the lines of the stderr string as warnings on the ui
670 """Writes the lines of the stderr string as warnings on the ui
672
671
673 Uses the revision number and fixername to give more context to each line of
672 Uses the revision number and fixername to give more context to each line of
674 the error message. Doesn't include file names, since those take up a lot of
673 the error message. Doesn't include file names, since those take up a lot of
675 space and would tend to be included in the error message if they were
674 space and would tend to be included in the error message if they were
676 relevant.
675 relevant.
677 """
676 """
678 for line in re.split(b'[\r\n]+', stderr):
677 for line in re.split(b'[\r\n]+', stderr):
679 if line:
678 if line:
680 ui.warn(b'[')
679 ui.warn(b'[')
681 if rev is None:
680 if rev is None:
682 ui.warn(_(b'wdir'), label=b'evolve.rev')
681 ui.warn(_(b'wdir'), label=b'evolve.rev')
683 else:
682 else:
684 ui.warn(b'%d' % rev, label=b'evolve.rev')
683 ui.warn(b'%d' % rev, label=b'evolve.rev')
685 ui.warn(b'] %s: %s\n' % (fixername, line))
684 ui.warn(b'] %s: %s\n' % (fixername, line))
686
685
687
686
688 def writeworkingdir(repo, ctx, filedata, replacements):
687 def writeworkingdir(repo, ctx, filedata, replacements):
689 """Write new content to the working copy and check out the new p1 if any
688 """Write new content to the working copy and check out the new p1 if any
690
689
691 We check out a new revision if and only if we fixed something in both the
690 We check out a new revision if and only if we fixed something in both the
692 working directory and its parent revision. This avoids the need for a full
691 working directory and its parent revision. This avoids the need for a full
693 update/merge, and means that the working directory simply isn't affected
692 update/merge, and means that the working directory simply isn't affected
694 unless the --working-dir flag is given.
693 unless the --working-dir flag is given.
695
694
696 Directly updates the dirstate for the affected files.
695 Directly updates the dirstate for the affected files.
697 """
696 """
698 for path, data in pycompat.iteritems(filedata):
697 for path, data in pycompat.iteritems(filedata):
699 fctx = ctx[path]
698 fctx = ctx[path]
700 fctx.write(data, fctx.flags())
699 fctx.write(data, fctx.flags())
701 if repo.dirstate[path] == b'n':
700 if repo.dirstate[path] == b'n':
702 repo.dirstate.normallookup(path)
701 repo.dirstate.normallookup(path)
703
702
704 oldparentnodes = repo.dirstate.parents()
703 oldparentnodes = repo.dirstate.parents()
705 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
704 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
706 if newparentnodes != oldparentnodes:
705 if newparentnodes != oldparentnodes:
707 repo.setparents(*newparentnodes)
706 repo.setparents(*newparentnodes)
708
707
709
708
710 def replacerev(ui, repo, ctx, filedata, replacements):
709 def replacerev(ui, repo, ctx, filedata, replacements):
711 """Commit a new revision like the given one, but with file content changes
710 """Commit a new revision like the given one, but with file content changes
712
711
713 "ctx" is the original revision to be replaced by a modified one.
712 "ctx" is the original revision to be replaced by a modified one.
714
713
715 "filedata" is a dict that maps paths to their new file content. All other
714 "filedata" is a dict that maps paths to their new file content. All other
716 paths will be recreated from the original revision without changes.
715 paths will be recreated from the original revision without changes.
717 "filedata" may contain paths that didn't exist in the original revision;
716 "filedata" may contain paths that didn't exist in the original revision;
718 they will be added.
717 they will be added.
719
718
720 "replacements" is a dict that maps a single node to a single node, and it is
719 "replacements" is a dict that maps a single node to a single node, and it is
721 updated to indicate the original revision is replaced by the newly created
720 updated to indicate the original revision is replaced by the newly created
722 one. No entry is added if the replacement's node already exists.
721 one. No entry is added if the replacement's node already exists.
723
722
724 The new revision has the same parents as the old one, unless those parents
723 The new revision has the same parents as the old one, unless those parents
725 have already been replaced, in which case those replacements are the parents
724 have already been replaced, in which case those replacements are the parents
726 of this new revision. Thus, if revisions are replaced in topological order,
725 of this new revision. Thus, if revisions are replaced in topological order,
727 there is no need to rebase them into the original topology later.
726 there is no need to rebase them into the original topology later.
728 """
727 """
729
728
730 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
729 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
731 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
730 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
732 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
731 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
733 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
732 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
734
733
735 # We don't want to create a revision that has no changes from the original,
734 # We don't want to create a revision that has no changes from the original,
736 # but we should if the original revision's parent has been replaced.
735 # but we should if the original revision's parent has been replaced.
737 # Otherwise, we would produce an orphan that needs no actual human
736 # Otherwise, we would produce an orphan that needs no actual human
738 # intervention to evolve. We can't rely on commit() to avoid creating the
737 # intervention to evolve. We can't rely on commit() to avoid creating the
739 # un-needed revision because the extra field added below produces a new hash
738 # un-needed revision because the extra field added below produces a new hash
740 # regardless of file content changes.
739 # regardless of file content changes.
741 if (
740 if (
742 not filedata
741 not filedata
743 and p1ctx.node() not in replacements
742 and p1ctx.node() not in replacements
744 and p2ctx.node() not in replacements
743 and p2ctx.node() not in replacements
745 ):
744 ):
746 return
745 return
747
746
748 def filectxfn(repo, memctx, path):
747 def filectxfn(repo, memctx, path):
749 if path not in ctx:
748 if path not in ctx:
750 return None
749 return None
751 fctx = ctx[path]
750 fctx = ctx[path]
752 copysource = fctx.copysource()
751 copysource = fctx.copysource()
753 return context.memfilectx(
752 return context.memfilectx(
754 repo,
753 repo,
755 memctx,
754 memctx,
756 path=fctx.path(),
755 path=fctx.path(),
757 data=filedata.get(path, fctx.data()),
756 data=filedata.get(path, fctx.data()),
758 islink=fctx.islink(),
757 islink=fctx.islink(),
759 isexec=fctx.isexec(),
758 isexec=fctx.isexec(),
760 copysource=copysource,
759 copysource=copysource,
761 )
760 )
762
761
763 extra = ctx.extra().copy()
762 extra = ctx.extra().copy()
764 extra[b'fix_source'] = ctx.hex()
763 extra[b'fix_source'] = ctx.hex()
765
764
766 memctx = context.memctx(
765 memctx = context.memctx(
767 repo,
766 repo,
768 parents=(newp1node, newp2node),
767 parents=(newp1node, newp2node),
769 text=ctx.description(),
768 text=ctx.description(),
770 files=set(ctx.files()) | set(filedata.keys()),
769 files=set(ctx.files()) | set(filedata.keys()),
771 filectxfn=filectxfn,
770 filectxfn=filectxfn,
772 user=ctx.user(),
771 user=ctx.user(),
773 date=ctx.date(),
772 date=ctx.date(),
774 extra=extra,
773 extra=extra,
775 branch=ctx.branch(),
774 branch=ctx.branch(),
776 editor=None,
775 editor=None,
777 )
776 )
778 sucnode = memctx.commit()
777 sucnode = memctx.commit()
779 prenode = ctx.node()
778 prenode = ctx.node()
780 if prenode == sucnode:
779 if prenode == sucnode:
781 ui.debug(b'node %s already existed\n' % (ctx.hex()))
780 ui.debug(b'node %s already existed\n' % (ctx.hex()))
782 else:
781 else:
783 replacements[ctx.node()] = sucnode
782 replacements[ctx.node()] = sucnode
784
783
785
784
786 def getfixers(ui):
785 def getfixers(ui):
787 """Returns a map of configured fixer tools indexed by their names
786 """Returns a map of configured fixer tools indexed by their names
788
787
789 Each value is a Fixer object with methods that implement the behavior of the
788 Each value is a Fixer object with methods that implement the behavior of the
790 fixer's config suboptions. Does not validate the config values.
789 fixer's config suboptions. Does not validate the config values.
791 """
790 """
792 fixers = {}
791 fixers = {}
793 for name in fixernames(ui):
792 for name in fixernames(ui):
794 enabled = ui.configbool(b'fix', name + b':enabled')
793 enabled = ui.configbool(b'fix', name + b':enabled')
795 command = ui.config(b'fix', name + b':command')
794 command = ui.config(b'fix', name + b':command')
796 pattern = ui.config(b'fix', name + b':pattern')
795 pattern = ui.config(b'fix', name + b':pattern')
797 linerange = ui.config(b'fix', name + b':linerange')
796 linerange = ui.config(b'fix', name + b':linerange')
798 priority = ui.configint(b'fix', name + b':priority')
797 priority = ui.configint(b'fix', name + b':priority')
799 metadata = ui.configbool(b'fix', name + b':metadata')
798 metadata = ui.configbool(b'fix', name + b':metadata')
800 skipclean = ui.configbool(b'fix', name + b':skipclean')
799 skipclean = ui.configbool(b'fix', name + b':skipclean')
801 # Don't use a fixer if it has no pattern configured. It would be
800 # Don't use a fixer if it has no pattern configured. It would be
802 # dangerous to let it affect all files. It would be pointless to let it
801 # dangerous to let it affect all files. It would be pointless to let it
803 # affect no files. There is no reasonable subset of files to use as the
802 # affect no files. There is no reasonable subset of files to use as the
804 # default.
803 # default.
805 if command is None:
804 if command is None:
806 ui.warn(
805 ui.warn(
807 _(b'fixer tool has no command configuration: %s\n') % (name,)
806 _(b'fixer tool has no command configuration: %s\n') % (name,)
808 )
807 )
809 elif pattern is None:
808 elif pattern is None:
810 ui.warn(
809 ui.warn(
811 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
810 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
812 )
811 )
813 elif not enabled:
812 elif not enabled:
814 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
813 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
815 else:
814 else:
816 fixers[name] = Fixer(
815 fixers[name] = Fixer(
817 command, pattern, linerange, priority, metadata, skipclean
816 command, pattern, linerange, priority, metadata, skipclean
818 )
817 )
819 return collections.OrderedDict(
818 return collections.OrderedDict(
820 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
819 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
821 )
820 )
822
821
823
822
824 def fixernames(ui):
823 def fixernames(ui):
825 """Returns the names of [fix] config options that have suboptions"""
824 """Returns the names of [fix] config options that have suboptions"""
826 names = set()
825 names = set()
827 for k, v in ui.configitems(b'fix'):
826 for k, v in ui.configitems(b'fix'):
828 if b':' in k:
827 if b':' in k:
829 names.add(k.split(b':', 1)[0])
828 names.add(k.split(b':', 1)[0])
830 return names
829 return names
831
830
832
831
833 class Fixer(object):
832 class Fixer(object):
834 """Wraps the raw config values for a fixer with methods"""
833 """Wraps the raw config values for a fixer with methods"""
835
834
836 def __init__(
835 def __init__(
837 self, command, pattern, linerange, priority, metadata, skipclean
836 self, command, pattern, linerange, priority, metadata, skipclean
838 ):
837 ):
839 self._command = command
838 self._command = command
840 self._pattern = pattern
839 self._pattern = pattern
841 self._linerange = linerange
840 self._linerange = linerange
842 self._priority = priority
841 self._priority = priority
843 self._metadata = metadata
842 self._metadata = metadata
844 self._skipclean = skipclean
843 self._skipclean = skipclean
845
844
846 def affects(self, opts, fixctx, path):
845 def affects(self, opts, fixctx, path):
847 """Should this fixer run on the file at the given path and context?"""
846 """Should this fixer run on the file at the given path and context?"""
848 repo = fixctx.repo()
847 repo = fixctx.repo()
849 matcher = matchmod.match(
848 matcher = matchmod.match(
850 repo.root, repo.root, [self._pattern], ctx=fixctx
849 repo.root, repo.root, [self._pattern], ctx=fixctx
851 )
850 )
852 return matcher(path)
851 return matcher(path)
853
852
854 def shouldoutputmetadata(self):
853 def shouldoutputmetadata(self):
855 """Should the stdout of this fixer start with JSON and a null byte?"""
854 """Should the stdout of this fixer start with JSON and a null byte?"""
856 return self._metadata
855 return self._metadata
857
856
858 def command(self, ui, path, ranges):
857 def command(self, ui, path, ranges):
859 """A shell command to use to invoke this fixer on the given file/lines
858 """A shell command to use to invoke this fixer on the given file/lines
860
859
861 May return None if there is no appropriate command to run for the given
860 May return None if there is no appropriate command to run for the given
862 parameters.
861 parameters.
863 """
862 """
864 expand = cmdutil.rendercommandtemplate
863 expand = cmdutil.rendercommandtemplate
865 parts = [
864 parts = [
866 expand(
865 expand(
867 ui,
866 ui,
868 self._command,
867 self._command,
869 {b'rootpath': path, b'basename': os.path.basename(path)},
868 {b'rootpath': path, b'basename': os.path.basename(path)},
870 )
869 )
871 ]
870 ]
872 if self._linerange:
871 if self._linerange:
873 if self._skipclean and not ranges:
872 if self._skipclean and not ranges:
874 # No line ranges to fix, so don't run the fixer.
873 # No line ranges to fix, so don't run the fixer.
875 return None
874 return None
876 for first, last in ranges:
875 for first, last in ranges:
877 parts.append(
876 parts.append(
878 expand(
877 expand(
879 ui, self._linerange, {b'first': first, b'last': last}
878 ui, self._linerange, {b'first': first, b'last': last}
880 )
879 )
881 )
880 )
882 return b' '.join(parts)
881 return b' '.join(parts)
General Comments 0
You need to be logged in to leave comments. Login now