##// END OF EJS Templates
fix: prefetch file contents...
Rodrigo Damazio Bovendorp -
r45615:263cf0f6 default draft
parent child Browse files
Show More
@@ -1,903 +1,926 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:pattern=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. Any output on standard error
22 fixed file content is expected on standard output. Any output on standard error
23 will be displayed as a warning. If the exit status is not zero, the file will
23 will be displayed as a warning. If the exit status is not zero, the file will
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
25 status but no standard error output. Some values may be substituted into the
26 command::
26 command::
27
27
28 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
29 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
30
30
31 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
32 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
33 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
34 substituted into the command::
34 substituted into the command::
35
35
36 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
37 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
38
38
39 Deleted sections of a file will be ignored by :linerange, because there is no
39 Deleted sections of a file will be ignored by :linerange, because there is no
40 corresponding line range in the version being fixed.
40 corresponding line range in the version being fixed.
41
41
42 By default, tools that set :linerange will only be executed if there is at least
42 By default, tools that set :linerange will only be executed if there is at least
43 one changed line range. This is meant to prevent accidents like running a code
43 one changed line range. This is meant to prevent accidents like running a code
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 to false.
46 to false.
47
47
48 The :pattern suboption determines which files will be passed through each
48 The :pattern suboption determines which files will be passed through each
49 configured tool. See :hg:`help patterns` for possible values. However, all
49 configured tool. See :hg:`help patterns` for possible values. However, all
50 patterns are relative to the repo root, even if that text says they are relative
50 patterns are relative to the repo root, even if that text says they are relative
51 to the current working directory. If there are file arguments to :hg:`fix`, the
51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 intersection of these patterns is used.
52 intersection of these patterns is used.
53
53
54 There is also a configurable limit for the maximum size of file that will be
54 There is also a configurable limit for the maximum size of file that will be
55 processed by :hg:`fix`::
55 processed by :hg:`fix`::
56
56
57 [fix]
57 [fix]
58 maxfilesize = 2MB
58 maxfilesize = 2MB
59
59
60 Normally, execution of configured tools will continue after a failure (indicated
60 Normally, execution of configured tools will continue after a failure (indicated
61 by a non-zero exit status). It can also be configured to abort after the first
61 by a non-zero exit status). It can also be configured to abort after the first
62 such failure, so that no files will be affected if any tool fails. This abort
62 such failure, so that no files will be affected if any tool fails. This abort
63 will also cause :hg:`fix` to exit with a non-zero status::
63 will also cause :hg:`fix` to exit with a non-zero status::
64
64
65 [fix]
65 [fix]
66 failure = abort
66 failure = abort
67
67
68 When multiple tools are configured to affect a file, they execute in an order
68 When multiple tools are configured to affect a file, they execute in an order
69 defined by the :priority suboption. The priority suboption has a default value
69 defined by the :priority suboption. The priority suboption has a default value
70 of zero for each tool. Tools are executed in order of descending priority. The
70 of zero for each tool. Tools are executed in order of descending priority. The
71 execution order of tools with equal priority is unspecified. For example, you
71 execution order of tools with equal priority is unspecified. For example, you
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 in a text file by ensuring that 'sort' runs before 'head'::
73 in a text file by ensuring that 'sort' runs before 'head'::
74
74
75 [fix]
75 [fix]
76 sort:command = sort -n
76 sort:command = sort -n
77 head:command = head -n 10
77 head:command = head -n 10
78 sort:pattern = numbers.txt
78 sort:pattern = numbers.txt
79 head:pattern = numbers.txt
79 head:pattern = numbers.txt
80 sort:priority = 2
80 sort:priority = 2
81 head:priority = 1
81 head:priority = 1
82
82
83 To account for changes made by each tool, the line numbers used for incremental
83 To account for changes made by each tool, the line numbers used for incremental
84 formatting are recomputed before executing the next tool. So, each tool may see
84 formatting are recomputed before executing the next tool. So, each tool may see
85 different values for the arguments added by the :linerange suboption.
85 different values for the arguments added by the :linerange suboption.
86
86
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 content. The metadata must be placed before the file content on stdout,
88 content. The metadata must be placed before the file content on stdout,
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 is expected to produce this metadata encoding if and only if the :metadata
91 is expected to produce this metadata encoding if and only if the :metadata
92 suboption is true::
92 suboption is true::
93
93
94 [fix]
94 [fix]
95 tool:command = tool --prepend-json-metadata
95 tool:command = tool --prepend-json-metadata
96 tool:metadata = true
96 tool:metadata = true
97
97
98 The metadata values are passed to hooks, which can be used to print summaries or
98 The metadata values are passed to hooks, which can be used to print summaries or
99 perform other post-fixing work. The supported hooks are::
99 perform other post-fixing work. The supported hooks are::
100
100
101 "postfixfile"
101 "postfixfile"
102 Run once for each file in each revision where any fixer tools made changes
102 Run once for each file in each revision where any fixer tools made changes
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 tools that affected the file. Fixer tools that didn't affect the file have a
105 tools that affected the file. Fixer tools that didn't affect the file have a
106 value of None. Only fixer tools that executed are present in the metadata.
106 value of None. Only fixer tools that executed are present in the metadata.
107
107
108 "postfix"
108 "postfix"
109 Run once after all files and revisions have been handled. Provides
109 Run once after all files and revisions have been handled. Provides
110 "$HG_REPLACEMENTS" with information about what revisions were created and
110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 files in the working copy were updated. Provides a list "$HG_METADATA"
112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 mapping fixer tool names to lists of metadata values returned from
113 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
114 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
115 previously passed to the "postfixfile" hook.
116
116
117 Fixer tools are run in the repository's root directory. This allows them to read
117 Fixer tools are run in the repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125 from __future__ import absolute_import
125 from __future__ import absolute_import
126
126
127 import collections
127 import collections
128 import itertools
128 import itertools
129 import os
129 import os
130 import re
130 import re
131 import subprocess
131 import subprocess
132
132
133 from mercurial.i18n import _
133 from mercurial.i18n import _
134 from mercurial.node import nullrev
134 from mercurial.node import nullrev
135 from mercurial.node import wdirrev
135 from mercurial.node import wdirrev
136
136
137 from mercurial.utils import procutil
137 from mercurial.utils import procutil
138
138
139 from mercurial import (
139 from mercurial import (
140 cmdutil,
140 cmdutil,
141 context,
141 context,
142 copies,
142 copies,
143 error,
143 error,
144 match as matchmod,
144 match as matchmod,
145 mdiff,
145 mdiff,
146 merge,
146 merge,
147 mergestate as mergestatemod,
147 mergestate as mergestatemod,
148 pycompat,
148 pycompat,
149 registrar,
149 registrar,
150 rewriteutil,
150 rewriteutil,
151 scmutil,
151 scmutil,
152 util,
152 util,
153 worker,
153 worker,
154 )
154 )
155
155
156 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
156 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
157 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
157 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
158 # be specifying the version(s) of Mercurial they are tested with, or
158 # be specifying the version(s) of Mercurial they are tested with, or
159 # leave the attribute unspecified.
159 # leave the attribute unspecified.
160 testedwith = b'ships-with-hg-core'
160 testedwith = b'ships-with-hg-core'
161
161
162 cmdtable = {}
162 cmdtable = {}
163 command = registrar.command(cmdtable)
163 command = registrar.command(cmdtable)
164
164
165 configtable = {}
165 configtable = {}
166 configitem = registrar.configitem(configtable)
166 configitem = registrar.configitem(configtable)
167
167
168 # Register the suboptions allowed for each configured fixer, and default values.
168 # Register the suboptions allowed for each configured fixer, and default values.
169 FIXER_ATTRS = {
169 FIXER_ATTRS = {
170 b'command': None,
170 b'command': None,
171 b'linerange': None,
171 b'linerange': None,
172 b'pattern': None,
172 b'pattern': None,
173 b'priority': 0,
173 b'priority': 0,
174 b'metadata': False,
174 b'metadata': False,
175 b'skipclean': True,
175 b'skipclean': True,
176 b'enabled': True,
176 b'enabled': True,
177 }
177 }
178
178
179 for key, default in FIXER_ATTRS.items():
179 for key, default in FIXER_ATTRS.items():
180 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
180 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
181
181
182 # A good default size allows most source code files to be fixed, but avoids
182 # A good default size allows most source code files to be fixed, but avoids
183 # letting fixer tools choke on huge inputs, which could be surprising to the
183 # letting fixer tools choke on huge inputs, which could be surprising to the
184 # user.
184 # user.
185 configitem(b'fix', b'maxfilesize', default=b'2MB')
185 configitem(b'fix', b'maxfilesize', default=b'2MB')
186
186
187 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
187 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
188 # This helps users do shell scripts that stop when a fixer tool signals a
188 # This helps users do shell scripts that stop when a fixer tool signals a
189 # problem.
189 # problem.
190 configitem(b'fix', b'failure', default=b'continue')
190 configitem(b'fix', b'failure', default=b'continue')
191
191
192
192
193 def checktoolfailureaction(ui, message, hint=None):
193 def checktoolfailureaction(ui, message, hint=None):
194 """Abort with 'message' if fix.failure=abort"""
194 """Abort with 'message' if fix.failure=abort"""
195 action = ui.config(b'fix', b'failure')
195 action = ui.config(b'fix', b'failure')
196 if action not in (b'continue', b'abort'):
196 if action not in (b'continue', b'abort'):
197 raise error.Abort(
197 raise error.Abort(
198 _(b'unknown fix.failure action: %s') % (action,),
198 _(b'unknown fix.failure action: %s') % (action,),
199 hint=_(b'use "continue" or "abort"'),
199 hint=_(b'use "continue" or "abort"'),
200 )
200 )
201 if action == b'abort':
201 if action == b'abort':
202 raise error.Abort(message, hint=hint)
202 raise error.Abort(message, hint=hint)
203
203
204
204
205 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
205 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
206 baseopt = (
206 baseopt = (
207 b'',
207 b'',
208 b'base',
208 b'base',
209 [],
209 [],
210 _(
210 _(
211 b'revisions to diff against (overrides automatic '
211 b'revisions to diff against (overrides automatic '
212 b'selection, and applies to every revision being '
212 b'selection, and applies to every revision being '
213 b'fixed)'
213 b'fixed)'
214 ),
214 ),
215 _(b'REV'),
215 _(b'REV'),
216 )
216 )
217 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
217 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
218 sourceopt = (
218 sourceopt = (
219 b's',
219 b's',
220 b'source',
220 b'source',
221 [],
221 [],
222 _(b'fix the specified revisions and their descendants'),
222 _(b'fix the specified revisions and their descendants'),
223 _(b'REV'),
223 _(b'REV'),
224 )
224 )
225 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
225 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
226 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
226 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
227 usage = _(b'[OPTION]... [FILE]...')
227 usage = _(b'[OPTION]... [FILE]...')
228
228
229
229
230 @command(
230 @command(
231 b'fix',
231 b'fix',
232 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
232 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
233 usage,
233 usage,
234 helpcategory=command.CATEGORY_FILE_CONTENTS,
234 helpcategory=command.CATEGORY_FILE_CONTENTS,
235 )
235 )
236 def fix(ui, repo, *pats, **opts):
236 def fix(ui, repo, *pats, **opts):
237 """rewrite file content in changesets or working directory
237 """rewrite file content in changesets or working directory
238
238
239 Runs any configured tools to fix the content of files. Only affects files
239 Runs any configured tools to fix the content of files. Only affects files
240 with changes, unless file arguments are provided. Only affects changed lines
240 with changes, unless file arguments are provided. Only affects changed lines
241 of files, unless the --whole flag is used. Some tools may always affect the
241 of files, unless the --whole flag is used. Some tools may always affect the
242 whole file regardless of --whole.
242 whole file regardless of --whole.
243
243
244 If revisions are specified with --rev, those revisions will be checked, and
244 If revisions are specified with --rev, those revisions will be checked, and
245 they may be replaced with new revisions that have fixed file content. It is
245 they may be replaced with new revisions that have fixed file content. It is
246 desirable to specify all descendants of each specified revision, so that the
246 desirable to specify all descendants of each specified revision, so that the
247 fixes propagate to the descendants. If all descendants are fixed at the same
247 fixes propagate to the descendants. If all descendants are fixed at the same
248 time, no merging, rebasing, or evolution will be required.
248 time, no merging, rebasing, or evolution will be required.
249
249
250 If --working-dir is used, files with uncommitted changes in the working copy
250 If --working-dir is used, files with uncommitted changes in the working copy
251 will be fixed. If the checked-out revision is also fixed, the working
251 will be fixed. If the checked-out revision is also fixed, the working
252 directory will update to the replacement revision.
252 directory will update to the replacement revision.
253
253
254 When determining what lines of each file to fix at each revision, the whole
254 When determining what lines of each file to fix at each revision, the whole
255 set of revisions being fixed is considered, so that fixes to earlier
255 set of revisions being fixed is considered, so that fixes to earlier
256 revisions are not forgotten in later ones. The --base flag can be used to
256 revisions are not forgotten in later ones. The --base flag can be used to
257 override this default behavior, though it is not usually desirable to do so.
257 override this default behavior, though it is not usually desirable to do so.
258 """
258 """
259 opts = pycompat.byteskwargs(opts)
259 opts = pycompat.byteskwargs(opts)
260 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
260 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
261 cmdutil.check_incompatible_arguments(
261 cmdutil.check_incompatible_arguments(
262 opts, b'working_dir', [b'all', b'source']
262 opts, b'working_dir', [b'all', b'source']
263 )
263 )
264
264
265 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
265 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
266 revstofix = getrevstofix(ui, repo, opts)
266 revstofix = getrevstofix(ui, repo, opts)
267 basectxs = getbasectxs(repo, opts, revstofix)
267 basectxs = getbasectxs(repo, opts, revstofix)
268 workqueue, numitems = getworkqueue(
268 workqueue, numitems = getworkqueue(
269 ui, repo, pats, opts, revstofix, basectxs
269 ui, repo, pats, opts, revstofix, basectxs
270 )
270 )
271 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
271 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
272 fixers = getfixers(ui)
272 fixers = getfixers(ui)
273
273
274 # Rather than letting each worker independently fetch the files
275 # (which also would add complications for shared/keepalive
276 # connections), prefetch them all first.
277 _prefetchfiles(repo, workqueue, basepaths)
278
274 # There are no data dependencies between the workers fixing each file
279 # There are no data dependencies between the workers fixing each file
275 # revision, so we can use all available parallelism.
280 # revision, so we can use all available parallelism.
276 def getfixes(items):
281 def getfixes(items):
277 for rev, path in items:
282 for rev, path in items:
278 ctx = repo[rev]
283 ctx = repo[rev]
279 olddata = ctx[path].data()
284 olddata = ctx[path].data()
280 metadata, newdata = fixfile(
285 metadata, newdata = fixfile(
281 ui, repo, opts, fixers, ctx, path, basepaths, basectxs[rev]
286 ui, repo, opts, fixers, ctx, path, basepaths, basectxs[rev]
282 )
287 )
283 # Don't waste memory/time passing unchanged content back, but
288 # Don't waste memory/time passing unchanged content back, but
284 # produce one result per item either way.
289 # produce one result per item either way.
285 yield (
290 yield (
286 rev,
291 rev,
287 path,
292 path,
288 metadata,
293 metadata,
289 newdata if newdata != olddata else None,
294 newdata if newdata != olddata else None,
290 )
295 )
291
296
292 results = worker.worker(
297 results = worker.worker(
293 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
298 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
294 )
299 )
295
300
296 # We have to hold on to the data for each successor revision in memory
301 # We have to hold on to the data for each successor revision in memory
297 # until all its parents are committed. We ensure this by committing and
302 # until all its parents are committed. We ensure this by committing and
298 # freeing memory for the revisions in some topological order. This
303 # freeing memory for the revisions in some topological order. This
299 # leaves a little bit of memory efficiency on the table, but also makes
304 # leaves a little bit of memory efficiency on the table, but also makes
300 # the tests deterministic. It might also be considered a feature since
305 # the tests deterministic. It might also be considered a feature since
301 # it makes the results more easily reproducible.
306 # it makes the results more easily reproducible.
302 filedata = collections.defaultdict(dict)
307 filedata = collections.defaultdict(dict)
303 aggregatemetadata = collections.defaultdict(list)
308 aggregatemetadata = collections.defaultdict(list)
304 replacements = {}
309 replacements = {}
305 wdirwritten = False
310 wdirwritten = False
306 commitorder = sorted(revstofix, reverse=True)
311 commitorder = sorted(revstofix, reverse=True)
307 with ui.makeprogress(
312 with ui.makeprogress(
308 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
313 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
309 ) as progress:
314 ) as progress:
310 for rev, path, filerevmetadata, newdata in results:
315 for rev, path, filerevmetadata, newdata in results:
311 progress.increment(item=path)
316 progress.increment(item=path)
312 for fixername, fixermetadata in filerevmetadata.items():
317 for fixername, fixermetadata in filerevmetadata.items():
313 aggregatemetadata[fixername].append(fixermetadata)
318 aggregatemetadata[fixername].append(fixermetadata)
314 if newdata is not None:
319 if newdata is not None:
315 filedata[rev][path] = newdata
320 filedata[rev][path] = newdata
316 hookargs = {
321 hookargs = {
317 b'rev': rev,
322 b'rev': rev,
318 b'path': path,
323 b'path': path,
319 b'metadata': filerevmetadata,
324 b'metadata': filerevmetadata,
320 }
325 }
321 repo.hook(
326 repo.hook(
322 b'postfixfile',
327 b'postfixfile',
323 throw=False,
328 throw=False,
324 **pycompat.strkwargs(hookargs)
329 **pycompat.strkwargs(hookargs)
325 )
330 )
326 numitems[rev] -= 1
331 numitems[rev] -= 1
327 # Apply the fixes for this and any other revisions that are
332 # Apply the fixes for this and any other revisions that are
328 # ready and sitting at the front of the queue. Using a loop here
333 # ready and sitting at the front of the queue. Using a loop here
329 # prevents the queue from being blocked by the first revision to
334 # prevents the queue from being blocked by the first revision to
330 # be ready out of order.
335 # be ready out of order.
331 while commitorder and not numitems[commitorder[-1]]:
336 while commitorder and not numitems[commitorder[-1]]:
332 rev = commitorder.pop()
337 rev = commitorder.pop()
333 ctx = repo[rev]
338 ctx = repo[rev]
334 if rev == wdirrev:
339 if rev == wdirrev:
335 writeworkingdir(repo, ctx, filedata[rev], replacements)
340 writeworkingdir(repo, ctx, filedata[rev], replacements)
336 wdirwritten = bool(filedata[rev])
341 wdirwritten = bool(filedata[rev])
337 else:
342 else:
338 replacerev(ui, repo, ctx, filedata[rev], replacements)
343 replacerev(ui, repo, ctx, filedata[rev], replacements)
339 del filedata[rev]
344 del filedata[rev]
340
345
341 cleanup(repo, replacements, wdirwritten)
346 cleanup(repo, replacements, wdirwritten)
342 hookargs = {
347 hookargs = {
343 b'replacements': replacements,
348 b'replacements': replacements,
344 b'wdirwritten': wdirwritten,
349 b'wdirwritten': wdirwritten,
345 b'metadata': aggregatemetadata,
350 b'metadata': aggregatemetadata,
346 }
351 }
347 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
352 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
348
353
349
354
350 def cleanup(repo, replacements, wdirwritten):
355 def cleanup(repo, replacements, wdirwritten):
351 """Calls scmutil.cleanupnodes() with the given replacements.
356 """Calls scmutil.cleanupnodes() with the given replacements.
352
357
353 "replacements" is a dict from nodeid to nodeid, with one key and one value
358 "replacements" is a dict from nodeid to nodeid, with one key and one value
354 for every revision that was affected by fixing. This is slightly different
359 for every revision that was affected by fixing. This is slightly different
355 from cleanupnodes().
360 from cleanupnodes().
356
361
357 "wdirwritten" is a bool which tells whether the working copy was affected by
362 "wdirwritten" is a bool which tells whether the working copy was affected by
358 fixing, since it has no entry in "replacements".
363 fixing, since it has no entry in "replacements".
359
364
360 Useful as a hook point for extending "hg fix" with output summarizing the
365 Useful as a hook point for extending "hg fix" with output summarizing the
361 effects of the command, though we choose not to output anything here.
366 effects of the command, though we choose not to output anything here.
362 """
367 """
363 replacements = {
368 replacements = {
364 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
369 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
365 }
370 }
366 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
371 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
367
372
368
373
369 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
374 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
370 """"Constructs the list of files to be fixed at specific revisions
375 """"Constructs the list of files to be fixed at specific revisions
371
376
372 It is up to the caller how to consume the work items, and the only
377 It is up to the caller how to consume the work items, and the only
373 dependence between them is that replacement revisions must be committed in
378 dependence between them is that replacement revisions must be committed in
374 topological order. Each work item represents a file in the working copy or
379 topological order. Each work item represents a file in the working copy or
375 in some revision that should be fixed and written back to the working copy
380 in some revision that should be fixed and written back to the working copy
376 or into a replacement revision.
381 or into a replacement revision.
377
382
378 Work items for the same revision are grouped together, so that a worker
383 Work items for the same revision are grouped together, so that a worker
379 pool starting with the first N items in parallel is likely to finish the
384 pool starting with the first N items in parallel is likely to finish the
380 first revision's work before other revisions. This can allow us to write
385 first revision's work before other revisions. This can allow us to write
381 the result to disk and reduce memory footprint. At time of writing, the
386 the result to disk and reduce memory footprint. At time of writing, the
382 partition strategy in worker.py seems favorable to this. We also sort the
387 partition strategy in worker.py seems favorable to this. We also sort the
383 items by ascending revision number to match the order in which we commit
388 items by ascending revision number to match the order in which we commit
384 the fixes later.
389 the fixes later.
385 """
390 """
386 workqueue = []
391 workqueue = []
387 numitems = collections.defaultdict(int)
392 numitems = collections.defaultdict(int)
388 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
393 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
389 for rev in sorted(revstofix):
394 for rev in sorted(revstofix):
390 fixctx = repo[rev]
395 fixctx = repo[rev]
391 match = scmutil.match(fixctx, pats, opts)
396 match = scmutil.match(fixctx, pats, opts)
392 for path in sorted(
397 for path in sorted(
393 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
398 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
394 ):
399 ):
395 fctx = fixctx[path]
400 fctx = fixctx[path]
396 if fctx.islink():
401 if fctx.islink():
397 continue
402 continue
398 if fctx.size() > maxfilesize:
403 if fctx.size() > maxfilesize:
399 ui.warn(
404 ui.warn(
400 _(b'ignoring file larger than %s: %s\n')
405 _(b'ignoring file larger than %s: %s\n')
401 % (util.bytecount(maxfilesize), path)
406 % (util.bytecount(maxfilesize), path)
402 )
407 )
403 continue
408 continue
404 workqueue.append((rev, path))
409 workqueue.append((rev, path))
405 numitems[rev] += 1
410 numitems[rev] += 1
406 return workqueue, numitems
411 return workqueue, numitems
407
412
408
413
409 def getrevstofix(ui, repo, opts):
414 def getrevstofix(ui, repo, opts):
410 """Returns the set of revision numbers that should be fixed"""
415 """Returns the set of revision numbers that should be fixed"""
411 if opts[b'all']:
416 if opts[b'all']:
412 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
417 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
413 elif opts[b'source']:
418 elif opts[b'source']:
414 source_revs = scmutil.revrange(repo, opts[b'source'])
419 source_revs = scmutil.revrange(repo, opts[b'source'])
415 revs = set(repo.revs(b'%ld::', source_revs))
420 revs = set(repo.revs(b'%ld::', source_revs))
416 if wdirrev in source_revs:
421 if wdirrev in source_revs:
417 # `wdir()::` is currently empty, so manually add wdir
422 # `wdir()::` is currently empty, so manually add wdir
418 revs.add(wdirrev)
423 revs.add(wdirrev)
419 if repo[b'.'].rev() in revs:
424 if repo[b'.'].rev() in revs:
420 revs.add(wdirrev)
425 revs.add(wdirrev)
421 else:
426 else:
422 revs = set(scmutil.revrange(repo, opts[b'rev']))
427 revs = set(scmutil.revrange(repo, opts[b'rev']))
423 if opts.get(b'working_dir'):
428 if opts.get(b'working_dir'):
424 revs.add(wdirrev)
429 revs.add(wdirrev)
425 for rev in revs:
430 for rev in revs:
426 checkfixablectx(ui, repo, repo[rev])
431 checkfixablectx(ui, repo, repo[rev])
427 # Allow fixing only wdir() even if there's an unfinished operation
432 # Allow fixing only wdir() even if there's an unfinished operation
428 if not (len(revs) == 1 and wdirrev in revs):
433 if not (len(revs) == 1 and wdirrev in revs):
429 cmdutil.checkunfinished(repo)
434 cmdutil.checkunfinished(repo)
430 rewriteutil.precheck(repo, revs, b'fix')
435 rewriteutil.precheck(repo, revs, b'fix')
431 if wdirrev in revs and list(
436 if wdirrev in revs and list(
432 mergestatemod.mergestate.read(repo).unresolved()
437 mergestatemod.mergestate.read(repo).unresolved()
433 ):
438 ):
434 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
439 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
435 if not revs:
440 if not revs:
436 raise error.Abort(
441 raise error.Abort(
437 b'no changesets specified', hint=b'use --rev or --working-dir'
442 b'no changesets specified', hint=b'use --rev or --working-dir'
438 )
443 )
439 return revs
444 return revs
440
445
441
446
442 def checkfixablectx(ui, repo, ctx):
447 def checkfixablectx(ui, repo, ctx):
443 """Aborts if the revision shouldn't be replaced with a fixed one."""
448 """Aborts if the revision shouldn't be replaced with a fixed one."""
444 if ctx.obsolete():
449 if ctx.obsolete():
445 # It would be better to actually check if the revision has a successor.
450 # It would be better to actually check if the revision has a successor.
446 allowdivergence = ui.configbool(
451 allowdivergence = ui.configbool(
447 b'experimental', b'evolution.allowdivergence'
452 b'experimental', b'evolution.allowdivergence'
448 )
453 )
449 if not allowdivergence:
454 if not allowdivergence:
450 raise error.Abort(
455 raise error.Abort(
451 b'fixing obsolete revision could cause divergence'
456 b'fixing obsolete revision could cause divergence'
452 )
457 )
453
458
454
459
455 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
460 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
456 """Returns the set of files that should be fixed in a context
461 """Returns the set of files that should be fixed in a context
457
462
458 The result depends on the base contexts; we include any file that has
463 The result depends on the base contexts; we include any file that has
459 changed relative to any of the base contexts. Base contexts should be
464 changed relative to any of the base contexts. Base contexts should be
460 ancestors of the context being fixed.
465 ancestors of the context being fixed.
461 """
466 """
462 files = set()
467 files = set()
463 for basectx in basectxs:
468 for basectx in basectxs:
464 stat = basectx.status(
469 stat = basectx.status(
465 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
470 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
466 )
471 )
467 files.update(
472 files.update(
468 set(
473 set(
469 itertools.chain(
474 itertools.chain(
470 stat.added, stat.modified, stat.clean, stat.unknown
475 stat.added, stat.modified, stat.clean, stat.unknown
471 )
476 )
472 )
477 )
473 )
478 )
474 return files
479 return files
475
480
476
481
477
482
478 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
483 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
479 """Returns the set of line ranges that should be fixed in a file
484 """Returns the set of line ranges that should be fixed in a file
480
485
481 Of the form [(10, 20), (30, 40)].
486 Of the form [(10, 20), (30, 40)].
482
487
483 This depends on the given base contexts; we must consider lines that have
488 This depends on the given base contexts; we must consider lines that have
484 changed versus any of the base contexts, and whether the file has been
489 changed versus any of the base contexts, and whether the file has been
485 renamed versus any of them.
490 renamed versus any of them.
486
491
487 Another way to understand this is that we exclude line ranges that are
492 Another way to understand this is that we exclude line ranges that are
488 common to the file in all base contexts.
493 common to the file in all base contexts.
489 """
494 """
490 if opts.get(b'whole'):
495 if opts.get(b'whole'):
491 # Return a range containing all lines. Rely on the diff implementation's
496 # Return a range containing all lines. Rely on the diff implementation's
492 # idea of how many lines are in the file, instead of reimplementing it.
497 # idea of how many lines are in the file, instead of reimplementing it.
493 return difflineranges(b'', content2)
498 return difflineranges(b'', content2)
494
499
495 rangeslist = []
500 rangeslist = []
496 for basectx in basectxs:
501 for basectx in basectxs:
497 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
502 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
498
503
499 if basepath in basectx:
504 if basepath in basectx:
500 content1 = basectx[basepath].data()
505 content1 = basectx[basepath].data()
501 else:
506 else:
502 content1 = b''
507 content1 = b''
503 rangeslist.extend(difflineranges(content1, content2))
508 rangeslist.extend(difflineranges(content1, content2))
504 return unionranges(rangeslist)
509 return unionranges(rangeslist)
505
510
506
511
507 def getbasepaths(repo, opts, workqueue, basectxs):
512 def getbasepaths(repo, opts, workqueue, basectxs):
508 if opts.get(b'whole'):
513 if opts.get(b'whole'):
509 # Base paths will never be fetched for line range determination.
514 # Base paths will never be fetched for line range determination.
510 return {}
515 return {}
511
516
512 basepaths = {}
517 basepaths = {}
513 for rev, path in workqueue:
518 for rev, path in workqueue:
514 fixctx = repo[rev]
519 fixctx = repo[rev]
515 for basectx in basectxs[rev]:
520 for basectx in basectxs[rev]:
516 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
521 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
517 if basepath in basectx:
522 if basepath in basectx:
518 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
523 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
519 return basepaths
524 return basepaths
520
525
521
526
522 def unionranges(rangeslist):
527 def unionranges(rangeslist):
523 """Return the union of some closed intervals
528 """Return the union of some closed intervals
524
529
525 >>> unionranges([])
530 >>> unionranges([])
526 []
531 []
527 >>> unionranges([(1, 100)])
532 >>> unionranges([(1, 100)])
528 [(1, 100)]
533 [(1, 100)]
529 >>> unionranges([(1, 100), (1, 100)])
534 >>> unionranges([(1, 100), (1, 100)])
530 [(1, 100)]
535 [(1, 100)]
531 >>> unionranges([(1, 100), (2, 100)])
536 >>> unionranges([(1, 100), (2, 100)])
532 [(1, 100)]
537 [(1, 100)]
533 >>> unionranges([(1, 99), (1, 100)])
538 >>> unionranges([(1, 99), (1, 100)])
534 [(1, 100)]
539 [(1, 100)]
535 >>> unionranges([(1, 100), (40, 60)])
540 >>> unionranges([(1, 100), (40, 60)])
536 [(1, 100)]
541 [(1, 100)]
537 >>> unionranges([(1, 49), (50, 100)])
542 >>> unionranges([(1, 49), (50, 100)])
538 [(1, 100)]
543 [(1, 100)]
539 >>> unionranges([(1, 48), (50, 100)])
544 >>> unionranges([(1, 48), (50, 100)])
540 [(1, 48), (50, 100)]
545 [(1, 48), (50, 100)]
541 >>> unionranges([(1, 2), (3, 4), (5, 6)])
546 >>> unionranges([(1, 2), (3, 4), (5, 6)])
542 [(1, 6)]
547 [(1, 6)]
543 """
548 """
544 rangeslist = sorted(set(rangeslist))
549 rangeslist = sorted(set(rangeslist))
545 unioned = []
550 unioned = []
546 if rangeslist:
551 if rangeslist:
547 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
552 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
548 for a, b in rangeslist:
553 for a, b in rangeslist:
549 c, d = unioned[-1]
554 c, d = unioned[-1]
550 if a > d + 1:
555 if a > d + 1:
551 unioned.append((a, b))
556 unioned.append((a, b))
552 else:
557 else:
553 unioned[-1] = (c, max(b, d))
558 unioned[-1] = (c, max(b, d))
554 return unioned
559 return unioned
555
560
556
561
557 def difflineranges(content1, content2):
562 def difflineranges(content1, content2):
558 """Return list of line number ranges in content2 that differ from content1.
563 """Return list of line number ranges in content2 that differ from content1.
559
564
560 Line numbers are 1-based. The numbers are the first and last line contained
565 Line numbers are 1-based. The numbers are the first and last line contained
561 in the range. Single-line ranges have the same line number for the first and
566 in the range. Single-line ranges have the same line number for the first and
562 last line. Excludes any empty ranges that result from lines that are only
567 last line. Excludes any empty ranges that result from lines that are only
563 present in content1. Relies on mdiff's idea of where the line endings are in
568 present in content1. Relies on mdiff's idea of where the line endings are in
564 the string.
569 the string.
565
570
566 >>> from mercurial import pycompat
571 >>> from mercurial import pycompat
567 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
572 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
568 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
573 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
569 >>> difflineranges2(b'', b'')
574 >>> difflineranges2(b'', b'')
570 []
575 []
571 >>> difflineranges2(b'a', b'')
576 >>> difflineranges2(b'a', b'')
572 []
577 []
573 >>> difflineranges2(b'', b'A')
578 >>> difflineranges2(b'', b'A')
574 [(1, 1)]
579 [(1, 1)]
575 >>> difflineranges2(b'a', b'a')
580 >>> difflineranges2(b'a', b'a')
576 []
581 []
577 >>> difflineranges2(b'a', b'A')
582 >>> difflineranges2(b'a', b'A')
578 [(1, 1)]
583 [(1, 1)]
579 >>> difflineranges2(b'ab', b'')
584 >>> difflineranges2(b'ab', b'')
580 []
585 []
581 >>> difflineranges2(b'', b'AB')
586 >>> difflineranges2(b'', b'AB')
582 [(1, 2)]
587 [(1, 2)]
583 >>> difflineranges2(b'abc', b'ac')
588 >>> difflineranges2(b'abc', b'ac')
584 []
589 []
585 >>> difflineranges2(b'ab', b'aCb')
590 >>> difflineranges2(b'ab', b'aCb')
586 [(2, 2)]
591 [(2, 2)]
587 >>> difflineranges2(b'abc', b'aBc')
592 >>> difflineranges2(b'abc', b'aBc')
588 [(2, 2)]
593 [(2, 2)]
589 >>> difflineranges2(b'ab', b'AB')
594 >>> difflineranges2(b'ab', b'AB')
590 [(1, 2)]
595 [(1, 2)]
591 >>> difflineranges2(b'abcde', b'aBcDe')
596 >>> difflineranges2(b'abcde', b'aBcDe')
592 [(2, 2), (4, 4)]
597 [(2, 2), (4, 4)]
593 >>> difflineranges2(b'abcde', b'aBCDe')
598 >>> difflineranges2(b'abcde', b'aBCDe')
594 [(2, 4)]
599 [(2, 4)]
595 """
600 """
596 ranges = []
601 ranges = []
597 for lines, kind in mdiff.allblocks(content1, content2):
602 for lines, kind in mdiff.allblocks(content1, content2):
598 firstline, lastline = lines[2:4]
603 firstline, lastline = lines[2:4]
599 if kind == b'!' and firstline != lastline:
604 if kind == b'!' and firstline != lastline:
600 ranges.append((firstline + 1, lastline))
605 ranges.append((firstline + 1, lastline))
601 return ranges
606 return ranges
602
607
603
608
604 def getbasectxs(repo, opts, revstofix):
609 def getbasectxs(repo, opts, revstofix):
605 """Returns a map of the base contexts for each revision
610 """Returns a map of the base contexts for each revision
606
611
607 The base contexts determine which lines are considered modified when we
612 The base contexts determine which lines are considered modified when we
608 attempt to fix just the modified lines in a file. It also determines which
613 attempt to fix just the modified lines in a file. It also determines which
609 files we attempt to fix, so it is important to compute this even when
614 files we attempt to fix, so it is important to compute this even when
610 --whole is used.
615 --whole is used.
611 """
616 """
612 # The --base flag overrides the usual logic, and we give every revision
617 # The --base flag overrides the usual logic, and we give every revision
613 # exactly the set of baserevs that the user specified.
618 # exactly the set of baserevs that the user specified.
614 if opts.get(b'base'):
619 if opts.get(b'base'):
615 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
620 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
616 if not baserevs:
621 if not baserevs:
617 baserevs = {nullrev}
622 baserevs = {nullrev}
618 basectxs = {repo[rev] for rev in baserevs}
623 basectxs = {repo[rev] for rev in baserevs}
619 return {rev: basectxs for rev in revstofix}
624 return {rev: basectxs for rev in revstofix}
620
625
621 # Proceed in topological order so that we can easily determine each
626 # Proceed in topological order so that we can easily determine each
622 # revision's baserevs by looking at its parents and their baserevs.
627 # revision's baserevs by looking at its parents and their baserevs.
623 basectxs = collections.defaultdict(set)
628 basectxs = collections.defaultdict(set)
624 for rev in sorted(revstofix):
629 for rev in sorted(revstofix):
625 ctx = repo[rev]
630 ctx = repo[rev]
626 for pctx in ctx.parents():
631 for pctx in ctx.parents():
627 if pctx.rev() in basectxs:
632 if pctx.rev() in basectxs:
628 basectxs[rev].update(basectxs[pctx.rev()])
633 basectxs[rev].update(basectxs[pctx.rev()])
629 else:
634 else:
630 basectxs[rev].add(pctx)
635 basectxs[rev].add(pctx)
631 return basectxs
636 return basectxs
632
637
638 def _prefetchfiles(repo, workqueue, basepaths):
639 toprefetch = set()
640
641 # Prefetch the files that will be fixed.
642 for rev, path in workqueue:
643 if rev == wdirrev:
644 continue
645 toprefetch.add((rev, path))
646
647 # Prefetch the base contents for lineranges().
648 for (baserev, fixrev, path), basepath in basepaths.items():
649 toprefetch.add((baserev, basepath))
650
651 if toprefetch:
652 scmutil.prefetchfiles(repo, [
653 (rev, scmutil.matchfiles(repo, [path])) for rev, path in toprefetch
654 ])
655
633
656
634 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
657 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
635 """Run any configured fixers that should affect the file in this context
658 """Run any configured fixers that should affect the file in this context
636
659
637 Returns the file content that results from applying the fixers in some order
660 Returns the file content that results from applying the fixers in some order
638 starting with the file's content in the fixctx. Fixers that support line
661 starting with the file's content in the fixctx. Fixers that support line
639 ranges will affect lines that have changed relative to any of the basectxs
662 ranges will affect lines that have changed relative to any of the basectxs
640 (i.e. they will only avoid lines that are common to all basectxs).
663 (i.e. they will only avoid lines that are common to all basectxs).
641
664
642 A fixer tool's stdout will become the file's new content if and only if it
665 A fixer tool's stdout will become the file's new content if and only if it
643 exits with code zero. The fixer tool's working directory is the repository's
666 exits with code zero. The fixer tool's working directory is the repository's
644 root.
667 root.
645 """
668 """
646 metadata = {}
669 metadata = {}
647 newdata = fixctx[path].data()
670 newdata = fixctx[path].data()
648 for fixername, fixer in pycompat.iteritems(fixers):
671 for fixername, fixer in pycompat.iteritems(fixers):
649 if fixer.affects(opts, fixctx, path):
672 if fixer.affects(opts, fixctx, path):
650 ranges = lineranges(
673 ranges = lineranges(
651 opts, path, basepaths, basectxs, fixctx, newdata)
674 opts, path, basepaths, basectxs, fixctx, newdata)
652 command = fixer.command(ui, path, ranges)
675 command = fixer.command(ui, path, ranges)
653 if command is None:
676 if command is None:
654 continue
677 continue
655 ui.debug(b'subprocess: %s\n' % (command,))
678 ui.debug(b'subprocess: %s\n' % (command,))
656 proc = subprocess.Popen(
679 proc = subprocess.Popen(
657 procutil.tonativestr(command),
680 procutil.tonativestr(command),
658 shell=True,
681 shell=True,
659 cwd=procutil.tonativestr(repo.root),
682 cwd=procutil.tonativestr(repo.root),
660 stdin=subprocess.PIPE,
683 stdin=subprocess.PIPE,
661 stdout=subprocess.PIPE,
684 stdout=subprocess.PIPE,
662 stderr=subprocess.PIPE,
685 stderr=subprocess.PIPE,
663 )
686 )
664 stdout, stderr = proc.communicate(newdata)
687 stdout, stderr = proc.communicate(newdata)
665 if stderr:
688 if stderr:
666 showstderr(ui, fixctx.rev(), fixername, stderr)
689 showstderr(ui, fixctx.rev(), fixername, stderr)
667 newerdata = stdout
690 newerdata = stdout
668 if fixer.shouldoutputmetadata():
691 if fixer.shouldoutputmetadata():
669 try:
692 try:
670 metadatajson, newerdata = stdout.split(b'\0', 1)
693 metadatajson, newerdata = stdout.split(b'\0', 1)
671 metadata[fixername] = pycompat.json_loads(metadatajson)
694 metadata[fixername] = pycompat.json_loads(metadatajson)
672 except ValueError:
695 except ValueError:
673 ui.warn(
696 ui.warn(
674 _(b'ignored invalid output from fixer tool: %s\n')
697 _(b'ignored invalid output from fixer tool: %s\n')
675 % (fixername,)
698 % (fixername,)
676 )
699 )
677 continue
700 continue
678 else:
701 else:
679 metadata[fixername] = None
702 metadata[fixername] = None
680 if proc.returncode == 0:
703 if proc.returncode == 0:
681 newdata = newerdata
704 newdata = newerdata
682 else:
705 else:
683 if not stderr:
706 if not stderr:
684 message = _(b'exited with status %d\n') % (proc.returncode,)
707 message = _(b'exited with status %d\n') % (proc.returncode,)
685 showstderr(ui, fixctx.rev(), fixername, message)
708 showstderr(ui, fixctx.rev(), fixername, message)
686 checktoolfailureaction(
709 checktoolfailureaction(
687 ui,
710 ui,
688 _(b'no fixes will be applied'),
711 _(b'no fixes will be applied'),
689 hint=_(
712 hint=_(
690 b'use --config fix.failure=continue to apply any '
713 b'use --config fix.failure=continue to apply any '
691 b'successful fixes anyway'
714 b'successful fixes anyway'
692 ),
715 ),
693 )
716 )
694 return metadata, newdata
717 return metadata, newdata
695
718
696
719
697 def showstderr(ui, rev, fixername, stderr):
720 def showstderr(ui, rev, fixername, stderr):
698 """Writes the lines of the stderr string as warnings on the ui
721 """Writes the lines of the stderr string as warnings on the ui
699
722
700 Uses the revision number and fixername to give more context to each line of
723 Uses the revision number and fixername to give more context to each line of
701 the error message. Doesn't include file names, since those take up a lot of
724 the error message. Doesn't include file names, since those take up a lot of
702 space and would tend to be included in the error message if they were
725 space and would tend to be included in the error message if they were
703 relevant.
726 relevant.
704 """
727 """
705 for line in re.split(b'[\r\n]+', stderr):
728 for line in re.split(b'[\r\n]+', stderr):
706 if line:
729 if line:
707 ui.warn(b'[')
730 ui.warn(b'[')
708 if rev is None:
731 if rev is None:
709 ui.warn(_(b'wdir'), label=b'evolve.rev')
732 ui.warn(_(b'wdir'), label=b'evolve.rev')
710 else:
733 else:
711 ui.warn(b'%d' % rev, label=b'evolve.rev')
734 ui.warn(b'%d' % rev, label=b'evolve.rev')
712 ui.warn(b'] %s: %s\n' % (fixername, line))
735 ui.warn(b'] %s: %s\n' % (fixername, line))
713
736
714
737
715 def writeworkingdir(repo, ctx, filedata, replacements):
738 def writeworkingdir(repo, ctx, filedata, replacements):
716 """Write new content to the working copy and check out the new p1 if any
739 """Write new content to the working copy and check out the new p1 if any
717
740
718 We check out a new revision if and only if we fixed something in both the
741 We check out a new revision if and only if we fixed something in both the
719 working directory and its parent revision. This avoids the need for a full
742 working directory and its parent revision. This avoids the need for a full
720 update/merge, and means that the working directory simply isn't affected
743 update/merge, and means that the working directory simply isn't affected
721 unless the --working-dir flag is given.
744 unless the --working-dir flag is given.
722
745
723 Directly updates the dirstate for the affected files.
746 Directly updates the dirstate for the affected files.
724 """
747 """
725 for path, data in pycompat.iteritems(filedata):
748 for path, data in pycompat.iteritems(filedata):
726 fctx = ctx[path]
749 fctx = ctx[path]
727 fctx.write(data, fctx.flags())
750 fctx.write(data, fctx.flags())
728 if repo.dirstate[path] == b'n':
751 if repo.dirstate[path] == b'n':
729 repo.dirstate.normallookup(path)
752 repo.dirstate.normallookup(path)
730
753
731 oldparentnodes = repo.dirstate.parents()
754 oldparentnodes = repo.dirstate.parents()
732 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
755 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
733 if newparentnodes != oldparentnodes:
756 if newparentnodes != oldparentnodes:
734 repo.setparents(*newparentnodes)
757 repo.setparents(*newparentnodes)
735
758
736
759
737 def replacerev(ui, repo, ctx, filedata, replacements):
760 def replacerev(ui, repo, ctx, filedata, replacements):
738 """Commit a new revision like the given one, but with file content changes
761 """Commit a new revision like the given one, but with file content changes
739
762
740 "ctx" is the original revision to be replaced by a modified one.
763 "ctx" is the original revision to be replaced by a modified one.
741
764
742 "filedata" is a dict that maps paths to their new file content. All other
765 "filedata" is a dict that maps paths to their new file content. All other
743 paths will be recreated from the original revision without changes.
766 paths will be recreated from the original revision without changes.
744 "filedata" may contain paths that didn't exist in the original revision;
767 "filedata" may contain paths that didn't exist in the original revision;
745 they will be added.
768 they will be added.
746
769
747 "replacements" is a dict that maps a single node to a single node, and it is
770 "replacements" is a dict that maps a single node to a single node, and it is
748 updated to indicate the original revision is replaced by the newly created
771 updated to indicate the original revision is replaced by the newly created
749 one. No entry is added if the replacement's node already exists.
772 one. No entry is added if the replacement's node already exists.
750
773
751 The new revision has the same parents as the old one, unless those parents
774 The new revision has the same parents as the old one, unless those parents
752 have already been replaced, in which case those replacements are the parents
775 have already been replaced, in which case those replacements are the parents
753 of this new revision. Thus, if revisions are replaced in topological order,
776 of this new revision. Thus, if revisions are replaced in topological order,
754 there is no need to rebase them into the original topology later.
777 there is no need to rebase them into the original topology later.
755 """
778 """
756
779
757 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
780 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
758 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
781 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
759 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
782 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
760 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
783 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
761
784
762 # We don't want to create a revision that has no changes from the original,
785 # We don't want to create a revision that has no changes from the original,
763 # but we should if the original revision's parent has been replaced.
786 # but we should if the original revision's parent has been replaced.
764 # Otherwise, we would produce an orphan that needs no actual human
787 # Otherwise, we would produce an orphan that needs no actual human
765 # intervention to evolve. We can't rely on commit() to avoid creating the
788 # intervention to evolve. We can't rely on commit() to avoid creating the
766 # un-needed revision because the extra field added below produces a new hash
789 # un-needed revision because the extra field added below produces a new hash
767 # regardless of file content changes.
790 # regardless of file content changes.
768 if (
791 if (
769 not filedata
792 not filedata
770 and p1ctx.node() not in replacements
793 and p1ctx.node() not in replacements
771 and p2ctx.node() not in replacements
794 and p2ctx.node() not in replacements
772 ):
795 ):
773 return
796 return
774
797
775 extra = ctx.extra().copy()
798 extra = ctx.extra().copy()
776 extra[b'fix_source'] = ctx.hex()
799 extra[b'fix_source'] = ctx.hex()
777
800
778 wctx = context.overlayworkingctx(repo)
801 wctx = context.overlayworkingctx(repo)
779 wctx.setbase(repo[newp1node])
802 wctx.setbase(repo[newp1node])
780 merge.revert_to(ctx, wc=wctx)
803 merge.revert_to(ctx, wc=wctx)
781 copies.graftcopies(wctx, ctx, ctx.p1())
804 copies.graftcopies(wctx, ctx, ctx.p1())
782
805
783 for path in filedata.keys():
806 for path in filedata.keys():
784 fctx = ctx[path]
807 fctx = ctx[path]
785 copysource = fctx.copysource()
808 copysource = fctx.copysource()
786 wctx.write(path, filedata[path], flags=fctx.flags())
809 wctx.write(path, filedata[path], flags=fctx.flags())
787 if copysource:
810 if copysource:
788 wctx.markcopied(path, copysource)
811 wctx.markcopied(path, copysource)
789
812
790 memctx = wctx.tomemctx(
813 memctx = wctx.tomemctx(
791 text=ctx.description(),
814 text=ctx.description(),
792 branch=ctx.branch(),
815 branch=ctx.branch(),
793 extra=extra,
816 extra=extra,
794 date=ctx.date(),
817 date=ctx.date(),
795 parents=(newp1node, newp2node),
818 parents=(newp1node, newp2node),
796 user=ctx.user(),
819 user=ctx.user(),
797 )
820 )
798
821
799 sucnode = memctx.commit()
822 sucnode = memctx.commit()
800 prenode = ctx.node()
823 prenode = ctx.node()
801 if prenode == sucnode:
824 if prenode == sucnode:
802 ui.debug(b'node %s already existed\n' % (ctx.hex()))
825 ui.debug(b'node %s already existed\n' % (ctx.hex()))
803 else:
826 else:
804 replacements[ctx.node()] = sucnode
827 replacements[ctx.node()] = sucnode
805
828
806
829
807 def getfixers(ui):
830 def getfixers(ui):
808 """Returns a map of configured fixer tools indexed by their names
831 """Returns a map of configured fixer tools indexed by their names
809
832
810 Each value is a Fixer object with methods that implement the behavior of the
833 Each value is a Fixer object with methods that implement the behavior of the
811 fixer's config suboptions. Does not validate the config values.
834 fixer's config suboptions. Does not validate the config values.
812 """
835 """
813 fixers = {}
836 fixers = {}
814 for name in fixernames(ui):
837 for name in fixernames(ui):
815 enabled = ui.configbool(b'fix', name + b':enabled')
838 enabled = ui.configbool(b'fix', name + b':enabled')
816 command = ui.config(b'fix', name + b':command')
839 command = ui.config(b'fix', name + b':command')
817 pattern = ui.config(b'fix', name + b':pattern')
840 pattern = ui.config(b'fix', name + b':pattern')
818 linerange = ui.config(b'fix', name + b':linerange')
841 linerange = ui.config(b'fix', name + b':linerange')
819 priority = ui.configint(b'fix', name + b':priority')
842 priority = ui.configint(b'fix', name + b':priority')
820 metadata = ui.configbool(b'fix', name + b':metadata')
843 metadata = ui.configbool(b'fix', name + b':metadata')
821 skipclean = ui.configbool(b'fix', name + b':skipclean')
844 skipclean = ui.configbool(b'fix', name + b':skipclean')
822 # Don't use a fixer if it has no pattern configured. It would be
845 # Don't use a fixer if it has no pattern configured. It would be
823 # dangerous to let it affect all files. It would be pointless to let it
846 # dangerous to let it affect all files. It would be pointless to let it
824 # affect no files. There is no reasonable subset of files to use as the
847 # affect no files. There is no reasonable subset of files to use as the
825 # default.
848 # default.
826 if command is None:
849 if command is None:
827 ui.warn(
850 ui.warn(
828 _(b'fixer tool has no command configuration: %s\n') % (name,)
851 _(b'fixer tool has no command configuration: %s\n') % (name,)
829 )
852 )
830 elif pattern is None:
853 elif pattern is None:
831 ui.warn(
854 ui.warn(
832 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
855 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
833 )
856 )
834 elif not enabled:
857 elif not enabled:
835 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
858 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
836 else:
859 else:
837 fixers[name] = Fixer(
860 fixers[name] = Fixer(
838 command, pattern, linerange, priority, metadata, skipclean
861 command, pattern, linerange, priority, metadata, skipclean
839 )
862 )
840 return collections.OrderedDict(
863 return collections.OrderedDict(
841 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
864 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
842 )
865 )
843
866
844
867
845 def fixernames(ui):
868 def fixernames(ui):
846 """Returns the names of [fix] config options that have suboptions"""
869 """Returns the names of [fix] config options that have suboptions"""
847 names = set()
870 names = set()
848 for k, v in ui.configitems(b'fix'):
871 for k, v in ui.configitems(b'fix'):
849 if b':' in k:
872 if b':' in k:
850 names.add(k.split(b':', 1)[0])
873 names.add(k.split(b':', 1)[0])
851 return names
874 return names
852
875
853
876
854 class Fixer(object):
877 class Fixer(object):
855 """Wraps the raw config values for a fixer with methods"""
878 """Wraps the raw config values for a fixer with methods"""
856
879
857 def __init__(
880 def __init__(
858 self, command, pattern, linerange, priority, metadata, skipclean
881 self, command, pattern, linerange, priority, metadata, skipclean
859 ):
882 ):
860 self._command = command
883 self._command = command
861 self._pattern = pattern
884 self._pattern = pattern
862 self._linerange = linerange
885 self._linerange = linerange
863 self._priority = priority
886 self._priority = priority
864 self._metadata = metadata
887 self._metadata = metadata
865 self._skipclean = skipclean
888 self._skipclean = skipclean
866
889
867 def affects(self, opts, fixctx, path):
890 def affects(self, opts, fixctx, path):
868 """Should this fixer run on the file at the given path and context?"""
891 """Should this fixer run on the file at the given path and context?"""
869 repo = fixctx.repo()
892 repo = fixctx.repo()
870 matcher = matchmod.match(
893 matcher = matchmod.match(
871 repo.root, repo.root, [self._pattern], ctx=fixctx
894 repo.root, repo.root, [self._pattern], ctx=fixctx
872 )
895 )
873 return matcher(path)
896 return matcher(path)
874
897
875 def shouldoutputmetadata(self):
898 def shouldoutputmetadata(self):
876 """Should the stdout of this fixer start with JSON and a null byte?"""
899 """Should the stdout of this fixer start with JSON and a null byte?"""
877 return self._metadata
900 return self._metadata
878
901
879 def command(self, ui, path, ranges):
902 def command(self, ui, path, ranges):
880 """A shell command to use to invoke this fixer on the given file/lines
903 """A shell command to use to invoke this fixer on the given file/lines
881
904
882 May return None if there is no appropriate command to run for the given
905 May return None if there is no appropriate command to run for the given
883 parameters.
906 parameters.
884 """
907 """
885 expand = cmdutil.rendercommandtemplate
908 expand = cmdutil.rendercommandtemplate
886 parts = [
909 parts = [
887 expand(
910 expand(
888 ui,
911 ui,
889 self._command,
912 self._command,
890 {b'rootpath': path, b'basename': os.path.basename(path)},
913 {b'rootpath': path, b'basename': os.path.basename(path)},
891 )
914 )
892 ]
915 ]
893 if self._linerange:
916 if self._linerange:
894 if self._skipclean and not ranges:
917 if self._skipclean and not ranges:
895 # No line ranges to fix, so don't run the fixer.
918 # No line ranges to fix, so don't run the fixer.
896 return None
919 return None
897 for first, last in ranges:
920 for first, last in ranges:
898 parts.append(
921 parts.append(
899 expand(
922 expand(
900 ui, self._linerange, {b'first': first, b'last': last}
923 ui, self._linerange, {b'first': first, b'last': last}
901 )
924 )
902 )
925 )
903 return b' '.join(parts)
926 return b' '.join(parts)
General Comments 0
You need to be logged in to leave comments. Login now