##// END OF EJS Templates
fix: rewrite writeworkingdir() to explicitly not work with merges...
Martin von Zweigbergk -
r48566:3feda1e7 stable
parent child Browse files
Show More
@@ -1,940 +1,943 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:pattern=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. Any output on standard error
22 fixed file content is expected on standard output. Any output on standard error
23 will be displayed as a warning. If the exit status is not zero, the file will
23 will be displayed as a warning. If the exit status is not zero, the file will
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
25 status but no standard error output. Some values may be substituted into the
26 command::
26 command::
27
27
28 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
29 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
30
30
31 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
32 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
33 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
34 substituted into the command::
34 substituted into the command::
35
35
36 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
37 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
38
38
39 Deleted sections of a file will be ignored by :linerange, because there is no
39 Deleted sections of a file will be ignored by :linerange, because there is no
40 corresponding line range in the version being fixed.
40 corresponding line range in the version being fixed.
41
41
42 By default, tools that set :linerange will only be executed if there is at least
42 By default, tools that set :linerange will only be executed if there is at least
43 one changed line range. This is meant to prevent accidents like running a code
43 one changed line range. This is meant to prevent accidents like running a code
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 to false.
46 to false.
47
47
48 The :pattern suboption determines which files will be passed through each
48 The :pattern suboption determines which files will be passed through each
49 configured tool. See :hg:`help patterns` for possible values. However, all
49 configured tool. See :hg:`help patterns` for possible values. However, all
50 patterns are relative to the repo root, even if that text says they are relative
50 patterns are relative to the repo root, even if that text says they are relative
51 to the current working directory. If there are file arguments to :hg:`fix`, the
51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 intersection of these patterns is used.
52 intersection of these patterns is used.
53
53
54 There is also a configurable limit for the maximum size of file that will be
54 There is also a configurable limit for the maximum size of file that will be
55 processed by :hg:`fix`::
55 processed by :hg:`fix`::
56
56
57 [fix]
57 [fix]
58 maxfilesize = 2MB
58 maxfilesize = 2MB
59
59
60 Normally, execution of configured tools will continue after a failure (indicated
60 Normally, execution of configured tools will continue after a failure (indicated
61 by a non-zero exit status). It can also be configured to abort after the first
61 by a non-zero exit status). It can also be configured to abort after the first
62 such failure, so that no files will be affected if any tool fails. This abort
62 such failure, so that no files will be affected if any tool fails. This abort
63 will also cause :hg:`fix` to exit with a non-zero status::
63 will also cause :hg:`fix` to exit with a non-zero status::
64
64
65 [fix]
65 [fix]
66 failure = abort
66 failure = abort
67
67
68 When multiple tools are configured to affect a file, they execute in an order
68 When multiple tools are configured to affect a file, they execute in an order
69 defined by the :priority suboption. The priority suboption has a default value
69 defined by the :priority suboption. The priority suboption has a default value
70 of zero for each tool. Tools are executed in order of descending priority. The
70 of zero for each tool. Tools are executed in order of descending priority. The
71 execution order of tools with equal priority is unspecified. For example, you
71 execution order of tools with equal priority is unspecified. For example, you
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 in a text file by ensuring that 'sort' runs before 'head'::
73 in a text file by ensuring that 'sort' runs before 'head'::
74
74
75 [fix]
75 [fix]
76 sort:command = sort -n
76 sort:command = sort -n
77 head:command = head -n 10
77 head:command = head -n 10
78 sort:pattern = numbers.txt
78 sort:pattern = numbers.txt
79 head:pattern = numbers.txt
79 head:pattern = numbers.txt
80 sort:priority = 2
80 sort:priority = 2
81 head:priority = 1
81 head:priority = 1
82
82
83 To account for changes made by each tool, the line numbers used for incremental
83 To account for changes made by each tool, the line numbers used for incremental
84 formatting are recomputed before executing the next tool. So, each tool may see
84 formatting are recomputed before executing the next tool. So, each tool may see
85 different values for the arguments added by the :linerange suboption.
85 different values for the arguments added by the :linerange suboption.
86
86
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 content. The metadata must be placed before the file content on stdout,
88 content. The metadata must be placed before the file content on stdout,
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 is expected to produce this metadata encoding if and only if the :metadata
91 is expected to produce this metadata encoding if and only if the :metadata
92 suboption is true::
92 suboption is true::
93
93
94 [fix]
94 [fix]
95 tool:command = tool --prepend-json-metadata
95 tool:command = tool --prepend-json-metadata
96 tool:metadata = true
96 tool:metadata = true
97
97
98 The metadata values are passed to hooks, which can be used to print summaries or
98 The metadata values are passed to hooks, which can be used to print summaries or
99 perform other post-fixing work. The supported hooks are::
99 perform other post-fixing work. The supported hooks are::
100
100
101 "postfixfile"
101 "postfixfile"
102 Run once for each file in each revision where any fixer tools made changes
102 Run once for each file in each revision where any fixer tools made changes
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 tools that affected the file. Fixer tools that didn't affect the file have a
105 tools that affected the file. Fixer tools that didn't affect the file have a
106 value of None. Only fixer tools that executed are present in the metadata.
106 value of None. Only fixer tools that executed are present in the metadata.
107
107
108 "postfix"
108 "postfix"
109 Run once after all files and revisions have been handled. Provides
109 Run once after all files and revisions have been handled. Provides
110 "$HG_REPLACEMENTS" with information about what revisions were created and
110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 files in the working copy were updated. Provides a list "$HG_METADATA"
112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 mapping fixer tool names to lists of metadata values returned from
113 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
114 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
115 previously passed to the "postfixfile" hook.
116
116
117 Fixer tools are run in the repository's root directory. This allows them to read
117 Fixer tools are run in the repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125 from __future__ import absolute_import
125 from __future__ import absolute_import
126
126
127 import collections
127 import collections
128 import itertools
128 import itertools
129 import os
129 import os
130 import re
130 import re
131 import subprocess
131 import subprocess
132
132
133 from mercurial.i18n import _
133 from mercurial.i18n import _
134 from mercurial.node import (
134 from mercurial.node import (
135 nullid,
135 nullrev,
136 nullrev,
136 wdirrev,
137 wdirrev,
137 )
138 )
138
139
139 from mercurial.utils import procutil
140 from mercurial.utils import procutil
140
141
141 from mercurial import (
142 from mercurial import (
142 cmdutil,
143 cmdutil,
143 context,
144 context,
144 copies,
145 copies,
145 error,
146 error,
146 match as matchmod,
147 match as matchmod,
147 mdiff,
148 mdiff,
148 merge,
149 merge,
149 mergestate as mergestatemod,
150 mergestate as mergestatemod,
150 pycompat,
151 pycompat,
151 registrar,
152 registrar,
152 rewriteutil,
153 rewriteutil,
153 scmutil,
154 scmutil,
154 util,
155 util,
155 worker,
156 worker,
156 )
157 )
157
158
158 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
159 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
159 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
160 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
160 # be specifying the version(s) of Mercurial they are tested with, or
161 # be specifying the version(s) of Mercurial they are tested with, or
161 # leave the attribute unspecified.
162 # leave the attribute unspecified.
162 testedwith = b'ships-with-hg-core'
163 testedwith = b'ships-with-hg-core'
163
164
164 cmdtable = {}
165 cmdtable = {}
165 command = registrar.command(cmdtable)
166 command = registrar.command(cmdtable)
166
167
167 configtable = {}
168 configtable = {}
168 configitem = registrar.configitem(configtable)
169 configitem = registrar.configitem(configtable)
169
170
170 # Register the suboptions allowed for each configured fixer, and default values.
171 # Register the suboptions allowed for each configured fixer, and default values.
171 FIXER_ATTRS = {
172 FIXER_ATTRS = {
172 b'command': None,
173 b'command': None,
173 b'linerange': None,
174 b'linerange': None,
174 b'pattern': None,
175 b'pattern': None,
175 b'priority': 0,
176 b'priority': 0,
176 b'metadata': False,
177 b'metadata': False,
177 b'skipclean': True,
178 b'skipclean': True,
178 b'enabled': True,
179 b'enabled': True,
179 }
180 }
180
181
181 for key, default in FIXER_ATTRS.items():
182 for key, default in FIXER_ATTRS.items():
182 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
183 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
183
184
184 # A good default size allows most source code files to be fixed, but avoids
185 # A good default size allows most source code files to be fixed, but avoids
185 # letting fixer tools choke on huge inputs, which could be surprising to the
186 # letting fixer tools choke on huge inputs, which could be surprising to the
186 # user.
187 # user.
187 configitem(b'fix', b'maxfilesize', default=b'2MB')
188 configitem(b'fix', b'maxfilesize', default=b'2MB')
188
189
189 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
190 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
190 # This helps users do shell scripts that stop when a fixer tool signals a
191 # This helps users do shell scripts that stop when a fixer tool signals a
191 # problem.
192 # problem.
192 configitem(b'fix', b'failure', default=b'continue')
193 configitem(b'fix', b'failure', default=b'continue')
193
194
194
195
195 def checktoolfailureaction(ui, message, hint=None):
196 def checktoolfailureaction(ui, message, hint=None):
196 """Abort with 'message' if fix.failure=abort"""
197 """Abort with 'message' if fix.failure=abort"""
197 action = ui.config(b'fix', b'failure')
198 action = ui.config(b'fix', b'failure')
198 if action not in (b'continue', b'abort'):
199 if action not in (b'continue', b'abort'):
199 raise error.Abort(
200 raise error.Abort(
200 _(b'unknown fix.failure action: %s') % (action,),
201 _(b'unknown fix.failure action: %s') % (action,),
201 hint=_(b'use "continue" or "abort"'),
202 hint=_(b'use "continue" or "abort"'),
202 )
203 )
203 if action == b'abort':
204 if action == b'abort':
204 raise error.Abort(message, hint=hint)
205 raise error.Abort(message, hint=hint)
205
206
206
207
207 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
208 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
208 baseopt = (
209 baseopt = (
209 b'',
210 b'',
210 b'base',
211 b'base',
211 [],
212 [],
212 _(
213 _(
213 b'revisions to diff against (overrides automatic '
214 b'revisions to diff against (overrides automatic '
214 b'selection, and applies to every revision being '
215 b'selection, and applies to every revision being '
215 b'fixed)'
216 b'fixed)'
216 ),
217 ),
217 _(b'REV'),
218 _(b'REV'),
218 )
219 )
219 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
220 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
220 sourceopt = (
221 sourceopt = (
221 b's',
222 b's',
222 b'source',
223 b'source',
223 [],
224 [],
224 _(b'fix the specified revisions and their descendants'),
225 _(b'fix the specified revisions and their descendants'),
225 _(b'REV'),
226 _(b'REV'),
226 )
227 )
227 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
228 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
228 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
229 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
229 usage = _(b'[OPTION]... [FILE]...')
230 usage = _(b'[OPTION]... [FILE]...')
230
231
231
232
232 @command(
233 @command(
233 b'fix',
234 b'fix',
234 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
235 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
235 usage,
236 usage,
236 helpcategory=command.CATEGORY_FILE_CONTENTS,
237 helpcategory=command.CATEGORY_FILE_CONTENTS,
237 )
238 )
238 def fix(ui, repo, *pats, **opts):
239 def fix(ui, repo, *pats, **opts):
239 """rewrite file content in changesets or working directory
240 """rewrite file content in changesets or working directory
240
241
241 Runs any configured tools to fix the content of files. Only affects files
242 Runs any configured tools to fix the content of files. Only affects files
242 with changes, unless file arguments are provided. Only affects changed lines
243 with changes, unless file arguments are provided. Only affects changed lines
243 of files, unless the --whole flag is used. Some tools may always affect the
244 of files, unless the --whole flag is used. Some tools may always affect the
244 whole file regardless of --whole.
245 whole file regardless of --whole.
245
246
246 If --working-dir is used, files with uncommitted changes in the working copy
247 If --working-dir is used, files with uncommitted changes in the working copy
247 will be fixed. Note that no backup are made.
248 will be fixed. Note that no backup are made.
248
249
249 If revisions are specified with --source, those revisions and their
250 If revisions are specified with --source, those revisions and their
250 descendants will be checked, and they may be replaced with new revisions
251 descendants will be checked, and they may be replaced with new revisions
251 that have fixed file content. By automatically including the descendants,
252 that have fixed file content. By automatically including the descendants,
252 no merging, rebasing, or evolution will be required. If an ancestor of the
253 no merging, rebasing, or evolution will be required. If an ancestor of the
253 working copy is included, then the working copy itself will also be fixed,
254 working copy is included, then the working copy itself will also be fixed,
254 and the working copy will be updated to the fixed parent.
255 and the working copy will be updated to the fixed parent.
255
256
256 When determining what lines of each file to fix at each revision, the whole
257 When determining what lines of each file to fix at each revision, the whole
257 set of revisions being fixed is considered, so that fixes to earlier
258 set of revisions being fixed is considered, so that fixes to earlier
258 revisions are not forgotten in later ones. The --base flag can be used to
259 revisions are not forgotten in later ones. The --base flag can be used to
259 override this default behavior, though it is not usually desirable to do so.
260 override this default behavior, though it is not usually desirable to do so.
260 """
261 """
261 opts = pycompat.byteskwargs(opts)
262 opts = pycompat.byteskwargs(opts)
262 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
263 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
263 cmdutil.check_incompatible_arguments(
264 cmdutil.check_incompatible_arguments(
264 opts, b'working_dir', [b'all', b'source']
265 opts, b'working_dir', [b'all', b'source']
265 )
266 )
266
267
267 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
268 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
268 revstofix = getrevstofix(ui, repo, opts)
269 revstofix = getrevstofix(ui, repo, opts)
269 basectxs = getbasectxs(repo, opts, revstofix)
270 basectxs = getbasectxs(repo, opts, revstofix)
270 workqueue, numitems = getworkqueue(
271 workqueue, numitems = getworkqueue(
271 ui, repo, pats, opts, revstofix, basectxs
272 ui, repo, pats, opts, revstofix, basectxs
272 )
273 )
273 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
274 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
274 fixers = getfixers(ui)
275 fixers = getfixers(ui)
275
276
276 # Rather than letting each worker independently fetch the files
277 # Rather than letting each worker independently fetch the files
277 # (which also would add complications for shared/keepalive
278 # (which also would add complications for shared/keepalive
278 # connections), prefetch them all first.
279 # connections), prefetch them all first.
279 _prefetchfiles(repo, workqueue, basepaths)
280 _prefetchfiles(repo, workqueue, basepaths)
280
281
281 # There are no data dependencies between the workers fixing each file
282 # There are no data dependencies between the workers fixing each file
282 # revision, so we can use all available parallelism.
283 # revision, so we can use all available parallelism.
283 def getfixes(items):
284 def getfixes(items):
284 for rev, path in items:
285 for rev, path in items:
285 ctx = repo[rev]
286 ctx = repo[rev]
286 olddata = ctx[path].data()
287 olddata = ctx[path].data()
287 metadata, newdata = fixfile(
288 metadata, newdata = fixfile(
288 ui, repo, opts, fixers, ctx, path, basepaths, basectxs[rev]
289 ui, repo, opts, fixers, ctx, path, basepaths, basectxs[rev]
289 )
290 )
290 # Don't waste memory/time passing unchanged content back, but
291 # Don't waste memory/time passing unchanged content back, but
291 # produce one result per item either way.
292 # produce one result per item either way.
292 yield (
293 yield (
293 rev,
294 rev,
294 path,
295 path,
295 metadata,
296 metadata,
296 newdata if newdata != olddata else None,
297 newdata if newdata != olddata else None,
297 )
298 )
298
299
299 results = worker.worker(
300 results = worker.worker(
300 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
301 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
301 )
302 )
302
303
303 # We have to hold on to the data for each successor revision in memory
304 # We have to hold on to the data for each successor revision in memory
304 # until all its parents are committed. We ensure this by committing and
305 # until all its parents are committed. We ensure this by committing and
305 # freeing memory for the revisions in some topological order. This
306 # freeing memory for the revisions in some topological order. This
306 # leaves a little bit of memory efficiency on the table, but also makes
307 # leaves a little bit of memory efficiency on the table, but also makes
307 # the tests deterministic. It might also be considered a feature since
308 # the tests deterministic. It might also be considered a feature since
308 # it makes the results more easily reproducible.
309 # it makes the results more easily reproducible.
309 filedata = collections.defaultdict(dict)
310 filedata = collections.defaultdict(dict)
310 aggregatemetadata = collections.defaultdict(list)
311 aggregatemetadata = collections.defaultdict(list)
311 replacements = {}
312 replacements = {}
312 wdirwritten = False
313 wdirwritten = False
313 commitorder = sorted(revstofix, reverse=True)
314 commitorder = sorted(revstofix, reverse=True)
314 with ui.makeprogress(
315 with ui.makeprogress(
315 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
316 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
316 ) as progress:
317 ) as progress:
317 for rev, path, filerevmetadata, newdata in results:
318 for rev, path, filerevmetadata, newdata in results:
318 progress.increment(item=path)
319 progress.increment(item=path)
319 for fixername, fixermetadata in filerevmetadata.items():
320 for fixername, fixermetadata in filerevmetadata.items():
320 aggregatemetadata[fixername].append(fixermetadata)
321 aggregatemetadata[fixername].append(fixermetadata)
321 if newdata is not None:
322 if newdata is not None:
322 filedata[rev][path] = newdata
323 filedata[rev][path] = newdata
323 hookargs = {
324 hookargs = {
324 b'rev': rev,
325 b'rev': rev,
325 b'path': path,
326 b'path': path,
326 b'metadata': filerevmetadata,
327 b'metadata': filerevmetadata,
327 }
328 }
328 repo.hook(
329 repo.hook(
329 b'postfixfile',
330 b'postfixfile',
330 throw=False,
331 throw=False,
331 **pycompat.strkwargs(hookargs)
332 **pycompat.strkwargs(hookargs)
332 )
333 )
333 numitems[rev] -= 1
334 numitems[rev] -= 1
334 # Apply the fixes for this and any other revisions that are
335 # Apply the fixes for this and any other revisions that are
335 # ready and sitting at the front of the queue. Using a loop here
336 # ready and sitting at the front of the queue. Using a loop here
336 # prevents the queue from being blocked by the first revision to
337 # prevents the queue from being blocked by the first revision to
337 # be ready out of order.
338 # be ready out of order.
338 while commitorder and not numitems[commitorder[-1]]:
339 while commitorder and not numitems[commitorder[-1]]:
339 rev = commitorder.pop()
340 rev = commitorder.pop()
340 ctx = repo[rev]
341 ctx = repo[rev]
341 if rev == wdirrev:
342 if rev == wdirrev:
342 writeworkingdir(repo, ctx, filedata[rev], replacements)
343 writeworkingdir(repo, ctx, filedata[rev], replacements)
343 wdirwritten = bool(filedata[rev])
344 wdirwritten = bool(filedata[rev])
344 else:
345 else:
345 replacerev(ui, repo, ctx, filedata[rev], replacements)
346 replacerev(ui, repo, ctx, filedata[rev], replacements)
346 del filedata[rev]
347 del filedata[rev]
347
348
348 cleanup(repo, replacements, wdirwritten)
349 cleanup(repo, replacements, wdirwritten)
349 hookargs = {
350 hookargs = {
350 b'replacements': replacements,
351 b'replacements': replacements,
351 b'wdirwritten': wdirwritten,
352 b'wdirwritten': wdirwritten,
352 b'metadata': aggregatemetadata,
353 b'metadata': aggregatemetadata,
353 }
354 }
354 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
355 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
355
356
356
357
357 def cleanup(repo, replacements, wdirwritten):
358 def cleanup(repo, replacements, wdirwritten):
358 """Calls scmutil.cleanupnodes() with the given replacements.
359 """Calls scmutil.cleanupnodes() with the given replacements.
359
360
360 "replacements" is a dict from nodeid to nodeid, with one key and one value
361 "replacements" is a dict from nodeid to nodeid, with one key and one value
361 for every revision that was affected by fixing. This is slightly different
362 for every revision that was affected by fixing. This is slightly different
362 from cleanupnodes().
363 from cleanupnodes().
363
364
364 "wdirwritten" is a bool which tells whether the working copy was affected by
365 "wdirwritten" is a bool which tells whether the working copy was affected by
365 fixing, since it has no entry in "replacements".
366 fixing, since it has no entry in "replacements".
366
367
367 Useful as a hook point for extending "hg fix" with output summarizing the
368 Useful as a hook point for extending "hg fix" with output summarizing the
368 effects of the command, though we choose not to output anything here.
369 effects of the command, though we choose not to output anything here.
369 """
370 """
370 replacements = {
371 replacements = {
371 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
372 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
372 }
373 }
373 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
374 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
374
375
375
376
376 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
377 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
377 """Constructs the list of files to be fixed at specific revisions
378 """Constructs the list of files to be fixed at specific revisions
378
379
379 It is up to the caller how to consume the work items, and the only
380 It is up to the caller how to consume the work items, and the only
380 dependence between them is that replacement revisions must be committed in
381 dependence between them is that replacement revisions must be committed in
381 topological order. Each work item represents a file in the working copy or
382 topological order. Each work item represents a file in the working copy or
382 in some revision that should be fixed and written back to the working copy
383 in some revision that should be fixed and written back to the working copy
383 or into a replacement revision.
384 or into a replacement revision.
384
385
385 Work items for the same revision are grouped together, so that a worker
386 Work items for the same revision are grouped together, so that a worker
386 pool starting with the first N items in parallel is likely to finish the
387 pool starting with the first N items in parallel is likely to finish the
387 first revision's work before other revisions. This can allow us to write
388 first revision's work before other revisions. This can allow us to write
388 the result to disk and reduce memory footprint. At time of writing, the
389 the result to disk and reduce memory footprint. At time of writing, the
389 partition strategy in worker.py seems favorable to this. We also sort the
390 partition strategy in worker.py seems favorable to this. We also sort the
390 items by ascending revision number to match the order in which we commit
391 items by ascending revision number to match the order in which we commit
391 the fixes later.
392 the fixes later.
392 """
393 """
393 workqueue = []
394 workqueue = []
394 numitems = collections.defaultdict(int)
395 numitems = collections.defaultdict(int)
395 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
396 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
396 for rev in sorted(revstofix):
397 for rev in sorted(revstofix):
397 fixctx = repo[rev]
398 fixctx = repo[rev]
398 match = scmutil.match(fixctx, pats, opts)
399 match = scmutil.match(fixctx, pats, opts)
399 for path in sorted(
400 for path in sorted(
400 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
401 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
401 ):
402 ):
402 fctx = fixctx[path]
403 fctx = fixctx[path]
403 if fctx.islink():
404 if fctx.islink():
404 continue
405 continue
405 if fctx.size() > maxfilesize:
406 if fctx.size() > maxfilesize:
406 ui.warn(
407 ui.warn(
407 _(b'ignoring file larger than %s: %s\n')
408 _(b'ignoring file larger than %s: %s\n')
408 % (util.bytecount(maxfilesize), path)
409 % (util.bytecount(maxfilesize), path)
409 )
410 )
410 continue
411 continue
411 workqueue.append((rev, path))
412 workqueue.append((rev, path))
412 numitems[rev] += 1
413 numitems[rev] += 1
413 return workqueue, numitems
414 return workqueue, numitems
414
415
415
416
416 def getrevstofix(ui, repo, opts):
417 def getrevstofix(ui, repo, opts):
417 """Returns the set of revision numbers that should be fixed"""
418 """Returns the set of revision numbers that should be fixed"""
418 if opts[b'all']:
419 if opts[b'all']:
419 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
420 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
420 elif opts[b'source']:
421 elif opts[b'source']:
421 source_revs = scmutil.revrange(repo, opts[b'source'])
422 source_revs = scmutil.revrange(repo, opts[b'source'])
422 revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
423 revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
423 if wdirrev in source_revs:
424 if wdirrev in source_revs:
424 # `wdir()::` is currently empty, so manually add wdir
425 # `wdir()::` is currently empty, so manually add wdir
425 revs.add(wdirrev)
426 revs.add(wdirrev)
426 if repo[b'.'].rev() in revs:
427 if repo[b'.'].rev() in revs:
427 revs.add(wdirrev)
428 revs.add(wdirrev)
428 else:
429 else:
429 revs = set(scmutil.revrange(repo, opts[b'rev']))
430 revs = set(scmutil.revrange(repo, opts[b'rev']))
430 if opts.get(b'working_dir'):
431 if opts.get(b'working_dir'):
431 revs.add(wdirrev)
432 revs.add(wdirrev)
432 for rev in revs:
433 for rev in revs:
433 checkfixablectx(ui, repo, repo[rev])
434 checkfixablectx(ui, repo, repo[rev])
434 # Allow fixing only wdir() even if there's an unfinished operation
435 # Allow fixing only wdir() even if there's an unfinished operation
435 if not (len(revs) == 1 and wdirrev in revs):
436 if not (len(revs) == 1 and wdirrev in revs):
436 cmdutil.checkunfinished(repo)
437 cmdutil.checkunfinished(repo)
437 rewriteutil.precheck(repo, revs, b'fix')
438 rewriteutil.precheck(repo, revs, b'fix')
438 if (
439 if (
439 wdirrev in revs
440 wdirrev in revs
440 and mergestatemod.mergestate.read(repo).unresolvedcount()
441 and mergestatemod.mergestate.read(repo).unresolvedcount()
441 ):
442 ):
442 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
443 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
443 if not revs:
444 if not revs:
444 raise error.Abort(
445 raise error.Abort(
445 b'no changesets specified', hint=b'use --source or --working-dir'
446 b'no changesets specified', hint=b'use --source or --working-dir'
446 )
447 )
447 return revs
448 return revs
448
449
449
450
450 def checkfixablectx(ui, repo, ctx):
451 def checkfixablectx(ui, repo, ctx):
451 """Aborts if the revision shouldn't be replaced with a fixed one."""
452 """Aborts if the revision shouldn't be replaced with a fixed one."""
452 if ctx.obsolete():
453 if ctx.obsolete():
453 # It would be better to actually check if the revision has a successor.
454 # It would be better to actually check if the revision has a successor.
454 allowdivergence = ui.configbool(
455 allowdivergence = ui.configbool(
455 b'experimental', b'evolution.allowdivergence'
456 b'experimental', b'evolution.allowdivergence'
456 )
457 )
457 if not allowdivergence:
458 if not allowdivergence:
458 raise error.Abort(
459 raise error.Abort(
459 b'fixing obsolete revision could cause divergence'
460 b'fixing obsolete revision could cause divergence'
460 )
461 )
461
462
462
463
463 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
464 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
464 """Returns the set of files that should be fixed in a context
465 """Returns the set of files that should be fixed in a context
465
466
466 The result depends on the base contexts; we include any file that has
467 The result depends on the base contexts; we include any file that has
467 changed relative to any of the base contexts. Base contexts should be
468 changed relative to any of the base contexts. Base contexts should be
468 ancestors of the context being fixed.
469 ancestors of the context being fixed.
469 """
470 """
470 files = set()
471 files = set()
471 for basectx in basectxs:
472 for basectx in basectxs:
472 stat = basectx.status(
473 stat = basectx.status(
473 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
474 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
474 )
475 )
475 files.update(
476 files.update(
476 set(
477 set(
477 itertools.chain(
478 itertools.chain(
478 stat.added, stat.modified, stat.clean, stat.unknown
479 stat.added, stat.modified, stat.clean, stat.unknown
479 )
480 )
480 )
481 )
481 )
482 )
482 return files
483 return files
483
484
484
485
485 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
486 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
486 """Returns the set of line ranges that should be fixed in a file
487 """Returns the set of line ranges that should be fixed in a file
487
488
488 Of the form [(10, 20), (30, 40)].
489 Of the form [(10, 20), (30, 40)].
489
490
490 This depends on the given base contexts; we must consider lines that have
491 This depends on the given base contexts; we must consider lines that have
491 changed versus any of the base contexts, and whether the file has been
492 changed versus any of the base contexts, and whether the file has been
492 renamed versus any of them.
493 renamed versus any of them.
493
494
494 Another way to understand this is that we exclude line ranges that are
495 Another way to understand this is that we exclude line ranges that are
495 common to the file in all base contexts.
496 common to the file in all base contexts.
496 """
497 """
497 if opts.get(b'whole'):
498 if opts.get(b'whole'):
498 # Return a range containing all lines. Rely on the diff implementation's
499 # Return a range containing all lines. Rely on the diff implementation's
499 # idea of how many lines are in the file, instead of reimplementing it.
500 # idea of how many lines are in the file, instead of reimplementing it.
500 return difflineranges(b'', content2)
501 return difflineranges(b'', content2)
501
502
502 rangeslist = []
503 rangeslist = []
503 for basectx in basectxs:
504 for basectx in basectxs:
504 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
505 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
505
506
506 if basepath in basectx:
507 if basepath in basectx:
507 content1 = basectx[basepath].data()
508 content1 = basectx[basepath].data()
508 else:
509 else:
509 content1 = b''
510 content1 = b''
510 rangeslist.extend(difflineranges(content1, content2))
511 rangeslist.extend(difflineranges(content1, content2))
511 return unionranges(rangeslist)
512 return unionranges(rangeslist)
512
513
513
514
514 def getbasepaths(repo, opts, workqueue, basectxs):
515 def getbasepaths(repo, opts, workqueue, basectxs):
515 if opts.get(b'whole'):
516 if opts.get(b'whole'):
516 # Base paths will never be fetched for line range determination.
517 # Base paths will never be fetched for line range determination.
517 return {}
518 return {}
518
519
519 basepaths = {}
520 basepaths = {}
520 for rev, path in workqueue:
521 for rev, path in workqueue:
521 fixctx = repo[rev]
522 fixctx = repo[rev]
522 for basectx in basectxs[rev]:
523 for basectx in basectxs[rev]:
523 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
524 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
524 if basepath in basectx:
525 if basepath in basectx:
525 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
526 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
526 return basepaths
527 return basepaths
527
528
528
529
529 def unionranges(rangeslist):
530 def unionranges(rangeslist):
530 """Return the union of some closed intervals
531 """Return the union of some closed intervals
531
532
532 >>> unionranges([])
533 >>> unionranges([])
533 []
534 []
534 >>> unionranges([(1, 100)])
535 >>> unionranges([(1, 100)])
535 [(1, 100)]
536 [(1, 100)]
536 >>> unionranges([(1, 100), (1, 100)])
537 >>> unionranges([(1, 100), (1, 100)])
537 [(1, 100)]
538 [(1, 100)]
538 >>> unionranges([(1, 100), (2, 100)])
539 >>> unionranges([(1, 100), (2, 100)])
539 [(1, 100)]
540 [(1, 100)]
540 >>> unionranges([(1, 99), (1, 100)])
541 >>> unionranges([(1, 99), (1, 100)])
541 [(1, 100)]
542 [(1, 100)]
542 >>> unionranges([(1, 100), (40, 60)])
543 >>> unionranges([(1, 100), (40, 60)])
543 [(1, 100)]
544 [(1, 100)]
544 >>> unionranges([(1, 49), (50, 100)])
545 >>> unionranges([(1, 49), (50, 100)])
545 [(1, 100)]
546 [(1, 100)]
546 >>> unionranges([(1, 48), (50, 100)])
547 >>> unionranges([(1, 48), (50, 100)])
547 [(1, 48), (50, 100)]
548 [(1, 48), (50, 100)]
548 >>> unionranges([(1, 2), (3, 4), (5, 6)])
549 >>> unionranges([(1, 2), (3, 4), (5, 6)])
549 [(1, 6)]
550 [(1, 6)]
550 """
551 """
551 rangeslist = sorted(set(rangeslist))
552 rangeslist = sorted(set(rangeslist))
552 unioned = []
553 unioned = []
553 if rangeslist:
554 if rangeslist:
554 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
555 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
555 for a, b in rangeslist:
556 for a, b in rangeslist:
556 c, d = unioned[-1]
557 c, d = unioned[-1]
557 if a > d + 1:
558 if a > d + 1:
558 unioned.append((a, b))
559 unioned.append((a, b))
559 else:
560 else:
560 unioned[-1] = (c, max(b, d))
561 unioned[-1] = (c, max(b, d))
561 return unioned
562 return unioned
562
563
563
564
564 def difflineranges(content1, content2):
565 def difflineranges(content1, content2):
565 """Return list of line number ranges in content2 that differ from content1.
566 """Return list of line number ranges in content2 that differ from content1.
566
567
567 Line numbers are 1-based. The numbers are the first and last line contained
568 Line numbers are 1-based. The numbers are the first and last line contained
568 in the range. Single-line ranges have the same line number for the first and
569 in the range. Single-line ranges have the same line number for the first and
569 last line. Excludes any empty ranges that result from lines that are only
570 last line. Excludes any empty ranges that result from lines that are only
570 present in content1. Relies on mdiff's idea of where the line endings are in
571 present in content1. Relies on mdiff's idea of where the line endings are in
571 the string.
572 the string.
572
573
573 >>> from mercurial import pycompat
574 >>> from mercurial import pycompat
574 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
575 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
575 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
576 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
576 >>> difflineranges2(b'', b'')
577 >>> difflineranges2(b'', b'')
577 []
578 []
578 >>> difflineranges2(b'a', b'')
579 >>> difflineranges2(b'a', b'')
579 []
580 []
580 >>> difflineranges2(b'', b'A')
581 >>> difflineranges2(b'', b'A')
581 [(1, 1)]
582 [(1, 1)]
582 >>> difflineranges2(b'a', b'a')
583 >>> difflineranges2(b'a', b'a')
583 []
584 []
584 >>> difflineranges2(b'a', b'A')
585 >>> difflineranges2(b'a', b'A')
585 [(1, 1)]
586 [(1, 1)]
586 >>> difflineranges2(b'ab', b'')
587 >>> difflineranges2(b'ab', b'')
587 []
588 []
588 >>> difflineranges2(b'', b'AB')
589 >>> difflineranges2(b'', b'AB')
589 [(1, 2)]
590 [(1, 2)]
590 >>> difflineranges2(b'abc', b'ac')
591 >>> difflineranges2(b'abc', b'ac')
591 []
592 []
592 >>> difflineranges2(b'ab', b'aCb')
593 >>> difflineranges2(b'ab', b'aCb')
593 [(2, 2)]
594 [(2, 2)]
594 >>> difflineranges2(b'abc', b'aBc')
595 >>> difflineranges2(b'abc', b'aBc')
595 [(2, 2)]
596 [(2, 2)]
596 >>> difflineranges2(b'ab', b'AB')
597 >>> difflineranges2(b'ab', b'AB')
597 [(1, 2)]
598 [(1, 2)]
598 >>> difflineranges2(b'abcde', b'aBcDe')
599 >>> difflineranges2(b'abcde', b'aBcDe')
599 [(2, 2), (4, 4)]
600 [(2, 2), (4, 4)]
600 >>> difflineranges2(b'abcde', b'aBCDe')
601 >>> difflineranges2(b'abcde', b'aBCDe')
601 [(2, 4)]
602 [(2, 4)]
602 """
603 """
603 ranges = []
604 ranges = []
604 for lines, kind in mdiff.allblocks(content1, content2):
605 for lines, kind in mdiff.allblocks(content1, content2):
605 firstline, lastline = lines[2:4]
606 firstline, lastline = lines[2:4]
606 if kind == b'!' and firstline != lastline:
607 if kind == b'!' and firstline != lastline:
607 ranges.append((firstline + 1, lastline))
608 ranges.append((firstline + 1, lastline))
608 return ranges
609 return ranges
609
610
610
611
611 def getbasectxs(repo, opts, revstofix):
612 def getbasectxs(repo, opts, revstofix):
612 """Returns a map of the base contexts for each revision
613 """Returns a map of the base contexts for each revision
613
614
614 The base contexts determine which lines are considered modified when we
615 The base contexts determine which lines are considered modified when we
615 attempt to fix just the modified lines in a file. It also determines which
616 attempt to fix just the modified lines in a file. It also determines which
616 files we attempt to fix, so it is important to compute this even when
617 files we attempt to fix, so it is important to compute this even when
617 --whole is used.
618 --whole is used.
618 """
619 """
619 # The --base flag overrides the usual logic, and we give every revision
620 # The --base flag overrides the usual logic, and we give every revision
620 # exactly the set of baserevs that the user specified.
621 # exactly the set of baserevs that the user specified.
621 if opts.get(b'base'):
622 if opts.get(b'base'):
622 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
623 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
623 if not baserevs:
624 if not baserevs:
624 baserevs = {nullrev}
625 baserevs = {nullrev}
625 basectxs = {repo[rev] for rev in baserevs}
626 basectxs = {repo[rev] for rev in baserevs}
626 return {rev: basectxs for rev in revstofix}
627 return {rev: basectxs for rev in revstofix}
627
628
628 # Proceed in topological order so that we can easily determine each
629 # Proceed in topological order so that we can easily determine each
629 # revision's baserevs by looking at its parents and their baserevs.
630 # revision's baserevs by looking at its parents and their baserevs.
630 basectxs = collections.defaultdict(set)
631 basectxs = collections.defaultdict(set)
631 for rev in sorted(revstofix):
632 for rev in sorted(revstofix):
632 ctx = repo[rev]
633 ctx = repo[rev]
633 for pctx in ctx.parents():
634 for pctx in ctx.parents():
634 if pctx.rev() in basectxs:
635 if pctx.rev() in basectxs:
635 basectxs[rev].update(basectxs[pctx.rev()])
636 basectxs[rev].update(basectxs[pctx.rev()])
636 else:
637 else:
637 basectxs[rev].add(pctx)
638 basectxs[rev].add(pctx)
638 return basectxs
639 return basectxs
639
640
640
641
641 def _prefetchfiles(repo, workqueue, basepaths):
642 def _prefetchfiles(repo, workqueue, basepaths):
642 toprefetch = set()
643 toprefetch = set()
643
644
644 # Prefetch the files that will be fixed.
645 # Prefetch the files that will be fixed.
645 for rev, path in workqueue:
646 for rev, path in workqueue:
646 if rev == wdirrev:
647 if rev == wdirrev:
647 continue
648 continue
648 toprefetch.add((rev, path))
649 toprefetch.add((rev, path))
649
650
650 # Prefetch the base contents for lineranges().
651 # Prefetch the base contents for lineranges().
651 for (baserev, fixrev, path), basepath in basepaths.items():
652 for (baserev, fixrev, path), basepath in basepaths.items():
652 toprefetch.add((baserev, basepath))
653 toprefetch.add((baserev, basepath))
653
654
654 if toprefetch:
655 if toprefetch:
655 scmutil.prefetchfiles(
656 scmutil.prefetchfiles(
656 repo,
657 repo,
657 [
658 [
658 (rev, scmutil.matchfiles(repo, [path]))
659 (rev, scmutil.matchfiles(repo, [path]))
659 for rev, path in toprefetch
660 for rev, path in toprefetch
660 ],
661 ],
661 )
662 )
662
663
663
664
664 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
665 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
665 """Run any configured fixers that should affect the file in this context
666 """Run any configured fixers that should affect the file in this context
666
667
667 Returns the file content that results from applying the fixers in some order
668 Returns the file content that results from applying the fixers in some order
668 starting with the file's content in the fixctx. Fixers that support line
669 starting with the file's content in the fixctx. Fixers that support line
669 ranges will affect lines that have changed relative to any of the basectxs
670 ranges will affect lines that have changed relative to any of the basectxs
670 (i.e. they will only avoid lines that are common to all basectxs).
671 (i.e. they will only avoid lines that are common to all basectxs).
671
672
672 A fixer tool's stdout will become the file's new content if and only if it
673 A fixer tool's stdout will become the file's new content if and only if it
673 exits with code zero. The fixer tool's working directory is the repository's
674 exits with code zero. The fixer tool's working directory is the repository's
674 root.
675 root.
675 """
676 """
676 metadata = {}
677 metadata = {}
677 newdata = fixctx[path].data()
678 newdata = fixctx[path].data()
678 for fixername, fixer in pycompat.iteritems(fixers):
679 for fixername, fixer in pycompat.iteritems(fixers):
679 if fixer.affects(opts, fixctx, path):
680 if fixer.affects(opts, fixctx, path):
680 ranges = lineranges(
681 ranges = lineranges(
681 opts, path, basepaths, basectxs, fixctx, newdata
682 opts, path, basepaths, basectxs, fixctx, newdata
682 )
683 )
683 command = fixer.command(ui, path, ranges)
684 command = fixer.command(ui, path, ranges)
684 if command is None:
685 if command is None:
685 continue
686 continue
686 ui.debug(b'subprocess: %s\n' % (command,))
687 ui.debug(b'subprocess: %s\n' % (command,))
687 proc = subprocess.Popen(
688 proc = subprocess.Popen(
688 procutil.tonativestr(command),
689 procutil.tonativestr(command),
689 shell=True,
690 shell=True,
690 cwd=procutil.tonativestr(repo.root),
691 cwd=procutil.tonativestr(repo.root),
691 stdin=subprocess.PIPE,
692 stdin=subprocess.PIPE,
692 stdout=subprocess.PIPE,
693 stdout=subprocess.PIPE,
693 stderr=subprocess.PIPE,
694 stderr=subprocess.PIPE,
694 )
695 )
695 stdout, stderr = proc.communicate(newdata)
696 stdout, stderr = proc.communicate(newdata)
696 if stderr:
697 if stderr:
697 showstderr(ui, fixctx.rev(), fixername, stderr)
698 showstderr(ui, fixctx.rev(), fixername, stderr)
698 newerdata = stdout
699 newerdata = stdout
699 if fixer.shouldoutputmetadata():
700 if fixer.shouldoutputmetadata():
700 try:
701 try:
701 metadatajson, newerdata = stdout.split(b'\0', 1)
702 metadatajson, newerdata = stdout.split(b'\0', 1)
702 metadata[fixername] = pycompat.json_loads(metadatajson)
703 metadata[fixername] = pycompat.json_loads(metadatajson)
703 except ValueError:
704 except ValueError:
704 ui.warn(
705 ui.warn(
705 _(b'ignored invalid output from fixer tool: %s\n')
706 _(b'ignored invalid output from fixer tool: %s\n')
706 % (fixername,)
707 % (fixername,)
707 )
708 )
708 continue
709 continue
709 else:
710 else:
710 metadata[fixername] = None
711 metadata[fixername] = None
711 if proc.returncode == 0:
712 if proc.returncode == 0:
712 newdata = newerdata
713 newdata = newerdata
713 else:
714 else:
714 if not stderr:
715 if not stderr:
715 message = _(b'exited with status %d\n') % (proc.returncode,)
716 message = _(b'exited with status %d\n') % (proc.returncode,)
716 showstderr(ui, fixctx.rev(), fixername, message)
717 showstderr(ui, fixctx.rev(), fixername, message)
717 checktoolfailureaction(
718 checktoolfailureaction(
718 ui,
719 ui,
719 _(b'no fixes will be applied'),
720 _(b'no fixes will be applied'),
720 hint=_(
721 hint=_(
721 b'use --config fix.failure=continue to apply any '
722 b'use --config fix.failure=continue to apply any '
722 b'successful fixes anyway'
723 b'successful fixes anyway'
723 ),
724 ),
724 )
725 )
725 return metadata, newdata
726 return metadata, newdata
726
727
727
728
728 def showstderr(ui, rev, fixername, stderr):
729 def showstderr(ui, rev, fixername, stderr):
729 """Writes the lines of the stderr string as warnings on the ui
730 """Writes the lines of the stderr string as warnings on the ui
730
731
731 Uses the revision number and fixername to give more context to each line of
732 Uses the revision number and fixername to give more context to each line of
732 the error message. Doesn't include file names, since those take up a lot of
733 the error message. Doesn't include file names, since those take up a lot of
733 space and would tend to be included in the error message if they were
734 space and would tend to be included in the error message if they were
734 relevant.
735 relevant.
735 """
736 """
736 for line in re.split(b'[\r\n]+', stderr):
737 for line in re.split(b'[\r\n]+', stderr):
737 if line:
738 if line:
738 ui.warn(b'[')
739 ui.warn(b'[')
739 if rev is None:
740 if rev is None:
740 ui.warn(_(b'wdir'), label=b'evolve.rev')
741 ui.warn(_(b'wdir'), label=b'evolve.rev')
741 else:
742 else:
742 ui.warn(b'%d' % rev, label=b'evolve.rev')
743 ui.warn(b'%d' % rev, label=b'evolve.rev')
743 ui.warn(b'] %s: %s\n' % (fixername, line))
744 ui.warn(b'] %s: %s\n' % (fixername, line))
744
745
745
746
746 def writeworkingdir(repo, ctx, filedata, replacements):
747 def writeworkingdir(repo, ctx, filedata, replacements):
747 """Write new content to the working copy and check out the new p1 if any
748 """Write new content to the working copy and check out the new p1 if any
748
749
749 We check out a new revision if and only if we fixed something in both the
750 We check out a new revision if and only if we fixed something in both the
750 working directory and its parent revision. This avoids the need for a full
751 working directory and its parent revision. This avoids the need for a full
751 update/merge, and means that the working directory simply isn't affected
752 update/merge, and means that the working directory simply isn't affected
752 unless the --working-dir flag is given.
753 unless the --working-dir flag is given.
753
754
754 Directly updates the dirstate for the affected files.
755 Directly updates the dirstate for the affected files.
755 """
756 """
757 assert repo.dirstate.p2() == nullid
758
756 for path, data in pycompat.iteritems(filedata):
759 for path, data in pycompat.iteritems(filedata):
757 fctx = ctx[path]
760 fctx = ctx[path]
758 fctx.write(data, fctx.flags())
761 fctx.write(data, fctx.flags())
759 if repo.dirstate[path] == b'n':
762 if repo.dirstate[path] == b'n':
760 repo.dirstate.set_possibly_dirty(path)
763 repo.dirstate.set_possibly_dirty(path)
761
764
762 oldparentnodes = repo.dirstate.parents()
765 oldp1 = repo.dirstate.p1()
763 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
766 newp1 = replacements.get(oldp1, oldp1)
764 if newparentnodes != oldparentnodes:
767 if newp1 != oldp1:
765 repo.setparents(*newparentnodes)
768 repo.setparents(newp1, nullid)
766
769
767
770
768 def replacerev(ui, repo, ctx, filedata, replacements):
771 def replacerev(ui, repo, ctx, filedata, replacements):
769 """Commit a new revision like the given one, but with file content changes
772 """Commit a new revision like the given one, but with file content changes
770
773
771 "ctx" is the original revision to be replaced by a modified one.
774 "ctx" is the original revision to be replaced by a modified one.
772
775
773 "filedata" is a dict that maps paths to their new file content. All other
776 "filedata" is a dict that maps paths to their new file content. All other
774 paths will be recreated from the original revision without changes.
777 paths will be recreated from the original revision without changes.
775 "filedata" may contain paths that didn't exist in the original revision;
778 "filedata" may contain paths that didn't exist in the original revision;
776 they will be added.
779 they will be added.
777
780
778 "replacements" is a dict that maps a single node to a single node, and it is
781 "replacements" is a dict that maps a single node to a single node, and it is
779 updated to indicate the original revision is replaced by the newly created
782 updated to indicate the original revision is replaced by the newly created
780 one. No entry is added if the replacement's node already exists.
783 one. No entry is added if the replacement's node already exists.
781
784
782 The new revision has the same parents as the old one, unless those parents
785 The new revision has the same parents as the old one, unless those parents
783 have already been replaced, in which case those replacements are the parents
786 have already been replaced, in which case those replacements are the parents
784 of this new revision. Thus, if revisions are replaced in topological order,
787 of this new revision. Thus, if revisions are replaced in topological order,
785 there is no need to rebase them into the original topology later.
788 there is no need to rebase them into the original topology later.
786 """
789 """
787
790
788 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
791 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
789 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
792 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
790 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
793 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
791 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
794 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
792
795
793 # We don't want to create a revision that has no changes from the original,
796 # We don't want to create a revision that has no changes from the original,
794 # but we should if the original revision's parent has been replaced.
797 # but we should if the original revision's parent has been replaced.
795 # Otherwise, we would produce an orphan that needs no actual human
798 # Otherwise, we would produce an orphan that needs no actual human
796 # intervention to evolve. We can't rely on commit() to avoid creating the
799 # intervention to evolve. We can't rely on commit() to avoid creating the
797 # un-needed revision because the extra field added below produces a new hash
800 # un-needed revision because the extra field added below produces a new hash
798 # regardless of file content changes.
801 # regardless of file content changes.
799 if (
802 if (
800 not filedata
803 not filedata
801 and p1ctx.node() not in replacements
804 and p1ctx.node() not in replacements
802 and p2ctx.node() not in replacements
805 and p2ctx.node() not in replacements
803 ):
806 ):
804 return
807 return
805
808
806 extra = ctx.extra().copy()
809 extra = ctx.extra().copy()
807 extra[b'fix_source'] = ctx.hex()
810 extra[b'fix_source'] = ctx.hex()
808
811
809 wctx = context.overlayworkingctx(repo)
812 wctx = context.overlayworkingctx(repo)
810 wctx.setbase(repo[newp1node])
813 wctx.setbase(repo[newp1node])
811 merge.revert_to(ctx, wc=wctx)
814 merge.revert_to(ctx, wc=wctx)
812 copies.graftcopies(wctx, ctx, ctx.p1())
815 copies.graftcopies(wctx, ctx, ctx.p1())
813
816
814 for path in filedata.keys():
817 for path in filedata.keys():
815 fctx = ctx[path]
818 fctx = ctx[path]
816 copysource = fctx.copysource()
819 copysource = fctx.copysource()
817 wctx.write(path, filedata[path], flags=fctx.flags())
820 wctx.write(path, filedata[path], flags=fctx.flags())
818 if copysource:
821 if copysource:
819 wctx.markcopied(path, copysource)
822 wctx.markcopied(path, copysource)
820
823
821 desc = rewriteutil.update_hash_refs(
824 desc = rewriteutil.update_hash_refs(
822 repo,
825 repo,
823 ctx.description(),
826 ctx.description(),
824 {oldnode: [newnode] for oldnode, newnode in replacements.items()},
827 {oldnode: [newnode] for oldnode, newnode in replacements.items()},
825 )
828 )
826
829
827 memctx = wctx.tomemctx(
830 memctx = wctx.tomemctx(
828 text=desc,
831 text=desc,
829 branch=ctx.branch(),
832 branch=ctx.branch(),
830 extra=extra,
833 extra=extra,
831 date=ctx.date(),
834 date=ctx.date(),
832 parents=(newp1node, newp2node),
835 parents=(newp1node, newp2node),
833 user=ctx.user(),
836 user=ctx.user(),
834 )
837 )
835
838
836 sucnode = memctx.commit()
839 sucnode = memctx.commit()
837 prenode = ctx.node()
840 prenode = ctx.node()
838 if prenode == sucnode:
841 if prenode == sucnode:
839 ui.debug(b'node %s already existed\n' % (ctx.hex()))
842 ui.debug(b'node %s already existed\n' % (ctx.hex()))
840 else:
843 else:
841 replacements[ctx.node()] = sucnode
844 replacements[ctx.node()] = sucnode
842
845
843
846
844 def getfixers(ui):
847 def getfixers(ui):
845 """Returns a map of configured fixer tools indexed by their names
848 """Returns a map of configured fixer tools indexed by their names
846
849
847 Each value is a Fixer object with methods that implement the behavior of the
850 Each value is a Fixer object with methods that implement the behavior of the
848 fixer's config suboptions. Does not validate the config values.
851 fixer's config suboptions. Does not validate the config values.
849 """
852 """
850 fixers = {}
853 fixers = {}
851 for name in fixernames(ui):
854 for name in fixernames(ui):
852 enabled = ui.configbool(b'fix', name + b':enabled')
855 enabled = ui.configbool(b'fix', name + b':enabled')
853 command = ui.config(b'fix', name + b':command')
856 command = ui.config(b'fix', name + b':command')
854 pattern = ui.config(b'fix', name + b':pattern')
857 pattern = ui.config(b'fix', name + b':pattern')
855 linerange = ui.config(b'fix', name + b':linerange')
858 linerange = ui.config(b'fix', name + b':linerange')
856 priority = ui.configint(b'fix', name + b':priority')
859 priority = ui.configint(b'fix', name + b':priority')
857 metadata = ui.configbool(b'fix', name + b':metadata')
860 metadata = ui.configbool(b'fix', name + b':metadata')
858 skipclean = ui.configbool(b'fix', name + b':skipclean')
861 skipclean = ui.configbool(b'fix', name + b':skipclean')
859 # Don't use a fixer if it has no pattern configured. It would be
862 # Don't use a fixer if it has no pattern configured. It would be
860 # dangerous to let it affect all files. It would be pointless to let it
863 # dangerous to let it affect all files. It would be pointless to let it
861 # affect no files. There is no reasonable subset of files to use as the
864 # affect no files. There is no reasonable subset of files to use as the
862 # default.
865 # default.
863 if command is None:
866 if command is None:
864 ui.warn(
867 ui.warn(
865 _(b'fixer tool has no command configuration: %s\n') % (name,)
868 _(b'fixer tool has no command configuration: %s\n') % (name,)
866 )
869 )
867 elif pattern is None:
870 elif pattern is None:
868 ui.warn(
871 ui.warn(
869 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
872 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
870 )
873 )
871 elif not enabled:
874 elif not enabled:
872 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
875 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
873 else:
876 else:
874 fixers[name] = Fixer(
877 fixers[name] = Fixer(
875 command, pattern, linerange, priority, metadata, skipclean
878 command, pattern, linerange, priority, metadata, skipclean
876 )
879 )
877 return collections.OrderedDict(
880 return collections.OrderedDict(
878 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
881 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
879 )
882 )
880
883
881
884
882 def fixernames(ui):
885 def fixernames(ui):
883 """Returns the names of [fix] config options that have suboptions"""
886 """Returns the names of [fix] config options that have suboptions"""
884 names = set()
887 names = set()
885 for k, v in ui.configitems(b'fix'):
888 for k, v in ui.configitems(b'fix'):
886 if b':' in k:
889 if b':' in k:
887 names.add(k.split(b':', 1)[0])
890 names.add(k.split(b':', 1)[0])
888 return names
891 return names
889
892
890
893
891 class Fixer(object):
894 class Fixer(object):
892 """Wraps the raw config values for a fixer with methods"""
895 """Wraps the raw config values for a fixer with methods"""
893
896
894 def __init__(
897 def __init__(
895 self, command, pattern, linerange, priority, metadata, skipclean
898 self, command, pattern, linerange, priority, metadata, skipclean
896 ):
899 ):
897 self._command = command
900 self._command = command
898 self._pattern = pattern
901 self._pattern = pattern
899 self._linerange = linerange
902 self._linerange = linerange
900 self._priority = priority
903 self._priority = priority
901 self._metadata = metadata
904 self._metadata = metadata
902 self._skipclean = skipclean
905 self._skipclean = skipclean
903
906
904 def affects(self, opts, fixctx, path):
907 def affects(self, opts, fixctx, path):
905 """Should this fixer run on the file at the given path and context?"""
908 """Should this fixer run on the file at the given path and context?"""
906 repo = fixctx.repo()
909 repo = fixctx.repo()
907 matcher = matchmod.match(
910 matcher = matchmod.match(
908 repo.root, repo.root, [self._pattern], ctx=fixctx
911 repo.root, repo.root, [self._pattern], ctx=fixctx
909 )
912 )
910 return matcher(path)
913 return matcher(path)
911
914
912 def shouldoutputmetadata(self):
915 def shouldoutputmetadata(self):
913 """Should the stdout of this fixer start with JSON and a null byte?"""
916 """Should the stdout of this fixer start with JSON and a null byte?"""
914 return self._metadata
917 return self._metadata
915
918
916 def command(self, ui, path, ranges):
919 def command(self, ui, path, ranges):
917 """A shell command to use to invoke this fixer on the given file/lines
920 """A shell command to use to invoke this fixer on the given file/lines
918
921
919 May return None if there is no appropriate command to run for the given
922 May return None if there is no appropriate command to run for the given
920 parameters.
923 parameters.
921 """
924 """
922 expand = cmdutil.rendercommandtemplate
925 expand = cmdutil.rendercommandtemplate
923 parts = [
926 parts = [
924 expand(
927 expand(
925 ui,
928 ui,
926 self._command,
929 self._command,
927 {b'rootpath': path, b'basename': os.path.basename(path)},
930 {b'rootpath': path, b'basename': os.path.basename(path)},
928 )
931 )
929 ]
932 ]
930 if self._linerange:
933 if self._linerange:
931 if self._skipclean and not ranges:
934 if self._skipclean and not ranges:
932 # No line ranges to fix, so don't run the fixer.
935 # No line ranges to fix, so don't run the fixer.
933 return None
936 return None
934 for first, last in ranges:
937 for first, last in ranges:
935 parts.append(
938 parts.append(
936 expand(
939 expand(
937 ui, self._linerange, {b'first': first, b'last': last}
940 ui, self._linerange, {b'first': first, b'last': last}
938 )
941 )
939 )
942 )
940 return b' '.join(parts)
943 return b' '.join(parts)
General Comments 0
You need to be logged in to leave comments. Login now