##// END OF EJS Templates
fix: replace str() by b'%d' for formatting integer...
Martin von Zweigbergk -
r43807:b2f95f9d default
parent child Browse files
Show More
@@ -1,882 +1,882 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:pattern=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. Any output on standard error
22 fixed file content is expected on standard output. Any output on standard error
23 will be displayed as a warning. If the exit status is not zero, the file will
23 will be displayed as a warning. If the exit status is not zero, the file will
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
25 status but no standard error output. Some values may be substituted into the
26 command::
26 command::
27
27
28 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
29 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
30
30
31 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
32 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
33 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
34 substituted into the command::
34 substituted into the command::
35
35
36 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
37 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
38
38
39 Deleted sections of a file will be ignored by :linerange, because there is no
39 Deleted sections of a file will be ignored by :linerange, because there is no
40 corresponding line range in the version being fixed.
40 corresponding line range in the version being fixed.
41
41
42 By default, tools that set :linerange will only be executed if there is at least
42 By default, tools that set :linerange will only be executed if there is at least
43 one changed line range. This is meant to prevent accidents like running a code
43 one changed line range. This is meant to prevent accidents like running a code
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 to false.
46 to false.
47
47
48 The :pattern suboption determines which files will be passed through each
48 The :pattern suboption determines which files will be passed through each
49 configured tool. See :hg:`help patterns` for possible values. However, all
49 configured tool. See :hg:`help patterns` for possible values. However, all
50 patterns are relative to the repo root, even if that text says they are relative
50 patterns are relative to the repo root, even if that text says they are relative
51 to the current working directory. If there are file arguments to :hg:`fix`, the
51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 intersection of these patterns is used.
52 intersection of these patterns is used.
53
53
54 There is also a configurable limit for the maximum size of file that will be
54 There is also a configurable limit for the maximum size of file that will be
55 processed by :hg:`fix`::
55 processed by :hg:`fix`::
56
56
57 [fix]
57 [fix]
58 maxfilesize = 2MB
58 maxfilesize = 2MB
59
59
60 Normally, execution of configured tools will continue after a failure (indicated
60 Normally, execution of configured tools will continue after a failure (indicated
61 by a non-zero exit status). It can also be configured to abort after the first
61 by a non-zero exit status). It can also be configured to abort after the first
62 such failure, so that no files will be affected if any tool fails. This abort
62 such failure, so that no files will be affected if any tool fails. This abort
63 will also cause :hg:`fix` to exit with a non-zero status::
63 will also cause :hg:`fix` to exit with a non-zero status::
64
64
65 [fix]
65 [fix]
66 failure = abort
66 failure = abort
67
67
68 When multiple tools are configured to affect a file, they execute in an order
68 When multiple tools are configured to affect a file, they execute in an order
69 defined by the :priority suboption. The priority suboption has a default value
69 defined by the :priority suboption. The priority suboption has a default value
70 of zero for each tool. Tools are executed in order of descending priority. The
70 of zero for each tool. Tools are executed in order of descending priority. The
71 execution order of tools with equal priority is unspecified. For example, you
71 execution order of tools with equal priority is unspecified. For example, you
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 in a text file by ensuring that 'sort' runs before 'head'::
73 in a text file by ensuring that 'sort' runs before 'head'::
74
74
75 [fix]
75 [fix]
76 sort:command = sort -n
76 sort:command = sort -n
77 head:command = head -n 10
77 head:command = head -n 10
78 sort:pattern = numbers.txt
78 sort:pattern = numbers.txt
79 head:pattern = numbers.txt
79 head:pattern = numbers.txt
80 sort:priority = 2
80 sort:priority = 2
81 head:priority = 1
81 head:priority = 1
82
82
83 To account for changes made by each tool, the line numbers used for incremental
83 To account for changes made by each tool, the line numbers used for incremental
84 formatting are recomputed before executing the next tool. So, each tool may see
84 formatting are recomputed before executing the next tool. So, each tool may see
85 different values for the arguments added by the :linerange suboption.
85 different values for the arguments added by the :linerange suboption.
86
86
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 content. The metadata must be placed before the file content on stdout,
88 content. The metadata must be placed before the file content on stdout,
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 is expected to produce this metadata encoding if and only if the :metadata
91 is expected to produce this metadata encoding if and only if the :metadata
92 suboption is true::
92 suboption is true::
93
93
94 [fix]
94 [fix]
95 tool:command = tool --prepend-json-metadata
95 tool:command = tool --prepend-json-metadata
96 tool:metadata = true
96 tool:metadata = true
97
97
98 The metadata values are passed to hooks, which can be used to print summaries or
98 The metadata values are passed to hooks, which can be used to print summaries or
99 perform other post-fixing work. The supported hooks are::
99 perform other post-fixing work. The supported hooks are::
100
100
101 "postfixfile"
101 "postfixfile"
102 Run once for each file in each revision where any fixer tools made changes
102 Run once for each file in each revision where any fixer tools made changes
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 tools that affected the file. Fixer tools that didn't affect the file have a
105 tools that affected the file. Fixer tools that didn't affect the file have a
106 valueof None. Only fixer tools that executed are present in the metadata.
106 valueof None. Only fixer tools that executed are present in the metadata.
107
107
108 "postfix"
108 "postfix"
109 Run once after all files and revisions have been handled. Provides
109 Run once after all files and revisions have been handled. Provides
110 "$HG_REPLACEMENTS" with information about what revisions were created and
110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 files in the working copy were updated. Provides a list "$HG_METADATA"
112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 mapping fixer tool names to lists of metadata values returned from
113 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
114 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
115 previously passed to the "postfixfile" hook.
116
116
117 Fixer tools are run the in repository's root directory. This allows them to read
117 Fixer tools are run the in repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125 from __future__ import absolute_import
125 from __future__ import absolute_import
126
126
127 import collections
127 import collections
128 import itertools
128 import itertools
129 import os
129 import os
130 import re
130 import re
131 import subprocess
131 import subprocess
132
132
133 from mercurial.i18n import _
133 from mercurial.i18n import _
134 from mercurial.node import nullrev
134 from mercurial.node import nullrev
135 from mercurial.node import wdirrev
135 from mercurial.node import wdirrev
136
136
137 from mercurial.utils import procutil
137 from mercurial.utils import procutil
138
138
139 from mercurial import (
139 from mercurial import (
140 cmdutil,
140 cmdutil,
141 context,
141 context,
142 copies,
142 copies,
143 error,
143 error,
144 match as matchmod,
144 match as matchmod,
145 mdiff,
145 mdiff,
146 merge,
146 merge,
147 obsolete,
147 obsolete,
148 pycompat,
148 pycompat,
149 registrar,
149 registrar,
150 scmutil,
150 scmutil,
151 util,
151 util,
152 worker,
152 worker,
153 )
153 )
154
154
155 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
155 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
156 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
156 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
157 # be specifying the version(s) of Mercurial they are tested with, or
157 # be specifying the version(s) of Mercurial they are tested with, or
158 # leave the attribute unspecified.
158 # leave the attribute unspecified.
159 testedwith = b'ships-with-hg-core'
159 testedwith = b'ships-with-hg-core'
160
160
161 cmdtable = {}
161 cmdtable = {}
162 command = registrar.command(cmdtable)
162 command = registrar.command(cmdtable)
163
163
164 configtable = {}
164 configtable = {}
165 configitem = registrar.configitem(configtable)
165 configitem = registrar.configitem(configtable)
166
166
167 # Register the suboptions allowed for each configured fixer, and default values.
167 # Register the suboptions allowed for each configured fixer, and default values.
168 FIXER_ATTRS = {
168 FIXER_ATTRS = {
169 b'command': None,
169 b'command': None,
170 b'linerange': None,
170 b'linerange': None,
171 b'pattern': None,
171 b'pattern': None,
172 b'priority': 0,
172 b'priority': 0,
173 b'metadata': False,
173 b'metadata': False,
174 b'skipclean': True,
174 b'skipclean': True,
175 b'enabled': True,
175 b'enabled': True,
176 }
176 }
177
177
178 for key, default in FIXER_ATTRS.items():
178 for key, default in FIXER_ATTRS.items():
179 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
179 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
180
180
181 # A good default size allows most source code files to be fixed, but avoids
181 # A good default size allows most source code files to be fixed, but avoids
182 # letting fixer tools choke on huge inputs, which could be surprising to the
182 # letting fixer tools choke on huge inputs, which could be surprising to the
183 # user.
183 # user.
184 configitem(b'fix', b'maxfilesize', default=b'2MB')
184 configitem(b'fix', b'maxfilesize', default=b'2MB')
185
185
186 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
186 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
187 # This helps users do shell scripts that stop when a fixer tool signals a
187 # This helps users do shell scripts that stop when a fixer tool signals a
188 # problem.
188 # problem.
189 configitem(b'fix', b'failure', default=b'continue')
189 configitem(b'fix', b'failure', default=b'continue')
190
190
191
191
192 def checktoolfailureaction(ui, message, hint=None):
192 def checktoolfailureaction(ui, message, hint=None):
193 """Abort with 'message' if fix.failure=abort"""
193 """Abort with 'message' if fix.failure=abort"""
194 action = ui.config(b'fix', b'failure')
194 action = ui.config(b'fix', b'failure')
195 if action not in (b'continue', b'abort'):
195 if action not in (b'continue', b'abort'):
196 raise error.Abort(
196 raise error.Abort(
197 _(b'unknown fix.failure action: %s') % (action,),
197 _(b'unknown fix.failure action: %s') % (action,),
198 hint=_(b'use "continue" or "abort"'),
198 hint=_(b'use "continue" or "abort"'),
199 )
199 )
200 if action == b'abort':
200 if action == b'abort':
201 raise error.Abort(message, hint=hint)
201 raise error.Abort(message, hint=hint)
202
202
203
203
204 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
204 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
205 baseopt = (
205 baseopt = (
206 b'',
206 b'',
207 b'base',
207 b'base',
208 [],
208 [],
209 _(
209 _(
210 b'revisions to diff against (overrides automatic '
210 b'revisions to diff against (overrides automatic '
211 b'selection, and applies to every revision being '
211 b'selection, and applies to every revision being '
212 b'fixed)'
212 b'fixed)'
213 ),
213 ),
214 _(b'REV'),
214 _(b'REV'),
215 )
215 )
216 revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
216 revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
217 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
217 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
218 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
218 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
219 usage = _(b'[OPTION]... [FILE]...')
219 usage = _(b'[OPTION]... [FILE]...')
220
220
221
221
222 @command(
222 @command(
223 b'fix',
223 b'fix',
224 [allopt, baseopt, revopt, wdiropt, wholeopt],
224 [allopt, baseopt, revopt, wdiropt, wholeopt],
225 usage,
225 usage,
226 helpcategory=command.CATEGORY_FILE_CONTENTS,
226 helpcategory=command.CATEGORY_FILE_CONTENTS,
227 )
227 )
228 def fix(ui, repo, *pats, **opts):
228 def fix(ui, repo, *pats, **opts):
229 """rewrite file content in changesets or working directory
229 """rewrite file content in changesets or working directory
230
230
231 Runs any configured tools to fix the content of files. Only affects files
231 Runs any configured tools to fix the content of files. Only affects files
232 with changes, unless file arguments are provided. Only affects changed lines
232 with changes, unless file arguments are provided. Only affects changed lines
233 of files, unless the --whole flag is used. Some tools may always affect the
233 of files, unless the --whole flag is used. Some tools may always affect the
234 whole file regardless of --whole.
234 whole file regardless of --whole.
235
235
236 If revisions are specified with --rev, those revisions will be checked, and
236 If revisions are specified with --rev, those revisions will be checked, and
237 they may be replaced with new revisions that have fixed file content. It is
237 they may be replaced with new revisions that have fixed file content. It is
238 desirable to specify all descendants of each specified revision, so that the
238 desirable to specify all descendants of each specified revision, so that the
239 fixes propagate to the descendants. If all descendants are fixed at the same
239 fixes propagate to the descendants. If all descendants are fixed at the same
240 time, no merging, rebasing, or evolution will be required.
240 time, no merging, rebasing, or evolution will be required.
241
241
242 If --working-dir is used, files with uncommitted changes in the working copy
242 If --working-dir is used, files with uncommitted changes in the working copy
243 will be fixed. If the checked-out revision is also fixed, the working
243 will be fixed. If the checked-out revision is also fixed, the working
244 directory will update to the replacement revision.
244 directory will update to the replacement revision.
245
245
246 When determining what lines of each file to fix at each revision, the whole
246 When determining what lines of each file to fix at each revision, the whole
247 set of revisions being fixed is considered, so that fixes to earlier
247 set of revisions being fixed is considered, so that fixes to earlier
248 revisions are not forgotten in later ones. The --base flag can be used to
248 revisions are not forgotten in later ones. The --base flag can be used to
249 override this default behavior, though it is not usually desirable to do so.
249 override this default behavior, though it is not usually desirable to do so.
250 """
250 """
251 opts = pycompat.byteskwargs(opts)
251 opts = pycompat.byteskwargs(opts)
252 if opts[b'all']:
252 if opts[b'all']:
253 if opts[b'rev']:
253 if opts[b'rev']:
254 raise error.Abort(_(b'cannot specify both "--rev" and "--all"'))
254 raise error.Abort(_(b'cannot specify both "--rev" and "--all"'))
255 opts[b'rev'] = [b'not public() and not obsolete()']
255 opts[b'rev'] = [b'not public() and not obsolete()']
256 opts[b'working_dir'] = True
256 opts[b'working_dir'] = True
257 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
257 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
258 revstofix = getrevstofix(ui, repo, opts)
258 revstofix = getrevstofix(ui, repo, opts)
259 basectxs = getbasectxs(repo, opts, revstofix)
259 basectxs = getbasectxs(repo, opts, revstofix)
260 workqueue, numitems = getworkqueue(
260 workqueue, numitems = getworkqueue(
261 ui, repo, pats, opts, revstofix, basectxs
261 ui, repo, pats, opts, revstofix, basectxs
262 )
262 )
263 fixers = getfixers(ui)
263 fixers = getfixers(ui)
264
264
265 # There are no data dependencies between the workers fixing each file
265 # There are no data dependencies between the workers fixing each file
266 # revision, so we can use all available parallelism.
266 # revision, so we can use all available parallelism.
267 def getfixes(items):
267 def getfixes(items):
268 for rev, path in items:
268 for rev, path in items:
269 ctx = repo[rev]
269 ctx = repo[rev]
270 olddata = ctx[path].data()
270 olddata = ctx[path].data()
271 metadata, newdata = fixfile(
271 metadata, newdata = fixfile(
272 ui, repo, opts, fixers, ctx, path, basectxs[rev]
272 ui, repo, opts, fixers, ctx, path, basectxs[rev]
273 )
273 )
274 # Don't waste memory/time passing unchanged content back, but
274 # Don't waste memory/time passing unchanged content back, but
275 # produce one result per item either way.
275 # produce one result per item either way.
276 yield (
276 yield (
277 rev,
277 rev,
278 path,
278 path,
279 metadata,
279 metadata,
280 newdata if newdata != olddata else None,
280 newdata if newdata != olddata else None,
281 )
281 )
282
282
283 results = worker.worker(
283 results = worker.worker(
284 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
284 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
285 )
285 )
286
286
287 # We have to hold on to the data for each successor revision in memory
287 # We have to hold on to the data for each successor revision in memory
288 # until all its parents are committed. We ensure this by committing and
288 # until all its parents are committed. We ensure this by committing and
289 # freeing memory for the revisions in some topological order. This
289 # freeing memory for the revisions in some topological order. This
290 # leaves a little bit of memory efficiency on the table, but also makes
290 # leaves a little bit of memory efficiency on the table, but also makes
291 # the tests deterministic. It might also be considered a feature since
291 # the tests deterministic. It might also be considered a feature since
292 # it makes the results more easily reproducible.
292 # it makes the results more easily reproducible.
293 filedata = collections.defaultdict(dict)
293 filedata = collections.defaultdict(dict)
294 aggregatemetadata = collections.defaultdict(list)
294 aggregatemetadata = collections.defaultdict(list)
295 replacements = {}
295 replacements = {}
296 wdirwritten = False
296 wdirwritten = False
297 commitorder = sorted(revstofix, reverse=True)
297 commitorder = sorted(revstofix, reverse=True)
298 with ui.makeprogress(
298 with ui.makeprogress(
299 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
299 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
300 ) as progress:
300 ) as progress:
301 for rev, path, filerevmetadata, newdata in results:
301 for rev, path, filerevmetadata, newdata in results:
302 progress.increment(item=path)
302 progress.increment(item=path)
303 for fixername, fixermetadata in filerevmetadata.items():
303 for fixername, fixermetadata in filerevmetadata.items():
304 aggregatemetadata[fixername].append(fixermetadata)
304 aggregatemetadata[fixername].append(fixermetadata)
305 if newdata is not None:
305 if newdata is not None:
306 filedata[rev][path] = newdata
306 filedata[rev][path] = newdata
307 hookargs = {
307 hookargs = {
308 b'rev': rev,
308 b'rev': rev,
309 b'path': path,
309 b'path': path,
310 b'metadata': filerevmetadata,
310 b'metadata': filerevmetadata,
311 }
311 }
312 repo.hook(
312 repo.hook(
313 b'postfixfile',
313 b'postfixfile',
314 throw=False,
314 throw=False,
315 **pycompat.strkwargs(hookargs)
315 **pycompat.strkwargs(hookargs)
316 )
316 )
317 numitems[rev] -= 1
317 numitems[rev] -= 1
318 # Apply the fixes for this and any other revisions that are
318 # Apply the fixes for this and any other revisions that are
319 # ready and sitting at the front of the queue. Using a loop here
319 # ready and sitting at the front of the queue. Using a loop here
320 # prevents the queue from being blocked by the first revision to
320 # prevents the queue from being blocked by the first revision to
321 # be ready out of order.
321 # be ready out of order.
322 while commitorder and not numitems[commitorder[-1]]:
322 while commitorder and not numitems[commitorder[-1]]:
323 rev = commitorder.pop()
323 rev = commitorder.pop()
324 ctx = repo[rev]
324 ctx = repo[rev]
325 if rev == wdirrev:
325 if rev == wdirrev:
326 writeworkingdir(repo, ctx, filedata[rev], replacements)
326 writeworkingdir(repo, ctx, filedata[rev], replacements)
327 wdirwritten = bool(filedata[rev])
327 wdirwritten = bool(filedata[rev])
328 else:
328 else:
329 replacerev(ui, repo, ctx, filedata[rev], replacements)
329 replacerev(ui, repo, ctx, filedata[rev], replacements)
330 del filedata[rev]
330 del filedata[rev]
331
331
332 cleanup(repo, replacements, wdirwritten)
332 cleanup(repo, replacements, wdirwritten)
333 hookargs = {
333 hookargs = {
334 b'replacements': replacements,
334 b'replacements': replacements,
335 b'wdirwritten': wdirwritten,
335 b'wdirwritten': wdirwritten,
336 b'metadata': aggregatemetadata,
336 b'metadata': aggregatemetadata,
337 }
337 }
338 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
338 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
339
339
340
340
341 def cleanup(repo, replacements, wdirwritten):
341 def cleanup(repo, replacements, wdirwritten):
342 """Calls scmutil.cleanupnodes() with the given replacements.
342 """Calls scmutil.cleanupnodes() with the given replacements.
343
343
344 "replacements" is a dict from nodeid to nodeid, with one key and one value
344 "replacements" is a dict from nodeid to nodeid, with one key and one value
345 for every revision that was affected by fixing. This is slightly different
345 for every revision that was affected by fixing. This is slightly different
346 from cleanupnodes().
346 from cleanupnodes().
347
347
348 "wdirwritten" is a bool which tells whether the working copy was affected by
348 "wdirwritten" is a bool which tells whether the working copy was affected by
349 fixing, since it has no entry in "replacements".
349 fixing, since it has no entry in "replacements".
350
350
351 Useful as a hook point for extending "hg fix" with output summarizing the
351 Useful as a hook point for extending "hg fix" with output summarizing the
352 effects of the command, though we choose not to output anything here.
352 effects of the command, though we choose not to output anything here.
353 """
353 """
354 replacements = {
354 replacements = {
355 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
355 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
356 }
356 }
357 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
357 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
358
358
359
359
360 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
360 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
361 """"Constructs the list of files to be fixed at specific revisions
361 """"Constructs the list of files to be fixed at specific revisions
362
362
363 It is up to the caller how to consume the work items, and the only
363 It is up to the caller how to consume the work items, and the only
364 dependence between them is that replacement revisions must be committed in
364 dependence between them is that replacement revisions must be committed in
365 topological order. Each work item represents a file in the working copy or
365 topological order. Each work item represents a file in the working copy or
366 in some revision that should be fixed and written back to the working copy
366 in some revision that should be fixed and written back to the working copy
367 or into a replacement revision.
367 or into a replacement revision.
368
368
369 Work items for the same revision are grouped together, so that a worker
369 Work items for the same revision are grouped together, so that a worker
370 pool starting with the first N items in parallel is likely to finish the
370 pool starting with the first N items in parallel is likely to finish the
371 first revision's work before other revisions. This can allow us to write
371 first revision's work before other revisions. This can allow us to write
372 the result to disk and reduce memory footprint. At time of writing, the
372 the result to disk and reduce memory footprint. At time of writing, the
373 partition strategy in worker.py seems favorable to this. We also sort the
373 partition strategy in worker.py seems favorable to this. We also sort the
374 items by ascending revision number to match the order in which we commit
374 items by ascending revision number to match the order in which we commit
375 the fixes later.
375 the fixes later.
376 """
376 """
377 workqueue = []
377 workqueue = []
378 numitems = collections.defaultdict(int)
378 numitems = collections.defaultdict(int)
379 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
379 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
380 for rev in sorted(revstofix):
380 for rev in sorted(revstofix):
381 fixctx = repo[rev]
381 fixctx = repo[rev]
382 match = scmutil.match(fixctx, pats, opts)
382 match = scmutil.match(fixctx, pats, opts)
383 for path in sorted(
383 for path in sorted(
384 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
384 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
385 ):
385 ):
386 fctx = fixctx[path]
386 fctx = fixctx[path]
387 if fctx.islink():
387 if fctx.islink():
388 continue
388 continue
389 if fctx.size() > maxfilesize:
389 if fctx.size() > maxfilesize:
390 ui.warn(
390 ui.warn(
391 _(b'ignoring file larger than %s: %s\n')
391 _(b'ignoring file larger than %s: %s\n')
392 % (util.bytecount(maxfilesize), path)
392 % (util.bytecount(maxfilesize), path)
393 )
393 )
394 continue
394 continue
395 workqueue.append((rev, path))
395 workqueue.append((rev, path))
396 numitems[rev] += 1
396 numitems[rev] += 1
397 return workqueue, numitems
397 return workqueue, numitems
398
398
399
399
400 def getrevstofix(ui, repo, opts):
400 def getrevstofix(ui, repo, opts):
401 """Returns the set of revision numbers that should be fixed"""
401 """Returns the set of revision numbers that should be fixed"""
402 revs = set(scmutil.revrange(repo, opts[b'rev']))
402 revs = set(scmutil.revrange(repo, opts[b'rev']))
403 for rev in revs:
403 for rev in revs:
404 checkfixablectx(ui, repo, repo[rev])
404 checkfixablectx(ui, repo, repo[rev])
405 if revs:
405 if revs:
406 cmdutil.checkunfinished(repo)
406 cmdutil.checkunfinished(repo)
407 checknodescendants(repo, revs)
407 checknodescendants(repo, revs)
408 if opts.get(b'working_dir'):
408 if opts.get(b'working_dir'):
409 revs.add(wdirrev)
409 revs.add(wdirrev)
410 if list(merge.mergestate.read(repo).unresolved()):
410 if list(merge.mergestate.read(repo).unresolved()):
411 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
411 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
412 if not revs:
412 if not revs:
413 raise error.Abort(
413 raise error.Abort(
414 b'no changesets specified', hint=b'use --rev or --working-dir'
414 b'no changesets specified', hint=b'use --rev or --working-dir'
415 )
415 )
416 return revs
416 return revs
417
417
418
418
419 def checknodescendants(repo, revs):
419 def checknodescendants(repo, revs):
420 if not obsolete.isenabled(repo, obsolete.allowunstableopt) and repo.revs(
420 if not obsolete.isenabled(repo, obsolete.allowunstableopt) and repo.revs(
421 b'(%ld::) - (%ld)', revs, revs
421 b'(%ld::) - (%ld)', revs, revs
422 ):
422 ):
423 raise error.Abort(
423 raise error.Abort(
424 _(b'can only fix a changeset together with all its descendants')
424 _(b'can only fix a changeset together with all its descendants')
425 )
425 )
426
426
427
427
428 def checkfixablectx(ui, repo, ctx):
428 def checkfixablectx(ui, repo, ctx):
429 """Aborts if the revision shouldn't be replaced with a fixed one."""
429 """Aborts if the revision shouldn't be replaced with a fixed one."""
430 if not ctx.mutable():
430 if not ctx.mutable():
431 raise error.Abort(
431 raise error.Abort(
432 b'can\'t fix immutable changeset %s'
432 b'can\'t fix immutable changeset %s'
433 % (scmutil.formatchangeid(ctx),)
433 % (scmutil.formatchangeid(ctx),)
434 )
434 )
435 if ctx.obsolete():
435 if ctx.obsolete():
436 # It would be better to actually check if the revision has a successor.
436 # It would be better to actually check if the revision has a successor.
437 allowdivergence = ui.configbool(
437 allowdivergence = ui.configbool(
438 b'experimental', b'evolution.allowdivergence'
438 b'experimental', b'evolution.allowdivergence'
439 )
439 )
440 if not allowdivergence:
440 if not allowdivergence:
441 raise error.Abort(
441 raise error.Abort(
442 b'fixing obsolete revision could cause divergence'
442 b'fixing obsolete revision could cause divergence'
443 )
443 )
444
444
445
445
446 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
446 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
447 """Returns the set of files that should be fixed in a context
447 """Returns the set of files that should be fixed in a context
448
448
449 The result depends on the base contexts; we include any file that has
449 The result depends on the base contexts; we include any file that has
450 changed relative to any of the base contexts. Base contexts should be
450 changed relative to any of the base contexts. Base contexts should be
451 ancestors of the context being fixed.
451 ancestors of the context being fixed.
452 """
452 """
453 files = set()
453 files = set()
454 for basectx in basectxs:
454 for basectx in basectxs:
455 stat = basectx.status(
455 stat = basectx.status(
456 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
456 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
457 )
457 )
458 files.update(
458 files.update(
459 set(
459 set(
460 itertools.chain(
460 itertools.chain(
461 stat.added, stat.modified, stat.clean, stat.unknown
461 stat.added, stat.modified, stat.clean, stat.unknown
462 )
462 )
463 )
463 )
464 )
464 )
465 return files
465 return files
466
466
467
467
468 def lineranges(opts, path, basectxs, fixctx, content2):
468 def lineranges(opts, path, basectxs, fixctx, content2):
469 """Returns the set of line ranges that should be fixed in a file
469 """Returns the set of line ranges that should be fixed in a file
470
470
471 Of the form [(10, 20), (30, 40)].
471 Of the form [(10, 20), (30, 40)].
472
472
473 This depends on the given base contexts; we must consider lines that have
473 This depends on the given base contexts; we must consider lines that have
474 changed versus any of the base contexts, and whether the file has been
474 changed versus any of the base contexts, and whether the file has been
475 renamed versus any of them.
475 renamed versus any of them.
476
476
477 Another way to understand this is that we exclude line ranges that are
477 Another way to understand this is that we exclude line ranges that are
478 common to the file in all base contexts.
478 common to the file in all base contexts.
479 """
479 """
480 if opts.get(b'whole'):
480 if opts.get(b'whole'):
481 # Return a range containing all lines. Rely on the diff implementation's
481 # Return a range containing all lines. Rely on the diff implementation's
482 # idea of how many lines are in the file, instead of reimplementing it.
482 # idea of how many lines are in the file, instead of reimplementing it.
483 return difflineranges(b'', content2)
483 return difflineranges(b'', content2)
484
484
485 rangeslist = []
485 rangeslist = []
486 for basectx in basectxs:
486 for basectx in basectxs:
487 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
487 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
488 if basepath in basectx:
488 if basepath in basectx:
489 content1 = basectx[basepath].data()
489 content1 = basectx[basepath].data()
490 else:
490 else:
491 content1 = b''
491 content1 = b''
492 rangeslist.extend(difflineranges(content1, content2))
492 rangeslist.extend(difflineranges(content1, content2))
493 return unionranges(rangeslist)
493 return unionranges(rangeslist)
494
494
495
495
496 def unionranges(rangeslist):
496 def unionranges(rangeslist):
497 """Return the union of some closed intervals
497 """Return the union of some closed intervals
498
498
499 >>> unionranges([])
499 >>> unionranges([])
500 []
500 []
501 >>> unionranges([(1, 100)])
501 >>> unionranges([(1, 100)])
502 [(1, 100)]
502 [(1, 100)]
503 >>> unionranges([(1, 100), (1, 100)])
503 >>> unionranges([(1, 100), (1, 100)])
504 [(1, 100)]
504 [(1, 100)]
505 >>> unionranges([(1, 100), (2, 100)])
505 >>> unionranges([(1, 100), (2, 100)])
506 [(1, 100)]
506 [(1, 100)]
507 >>> unionranges([(1, 99), (1, 100)])
507 >>> unionranges([(1, 99), (1, 100)])
508 [(1, 100)]
508 [(1, 100)]
509 >>> unionranges([(1, 100), (40, 60)])
509 >>> unionranges([(1, 100), (40, 60)])
510 [(1, 100)]
510 [(1, 100)]
511 >>> unionranges([(1, 49), (50, 100)])
511 >>> unionranges([(1, 49), (50, 100)])
512 [(1, 100)]
512 [(1, 100)]
513 >>> unionranges([(1, 48), (50, 100)])
513 >>> unionranges([(1, 48), (50, 100)])
514 [(1, 48), (50, 100)]
514 [(1, 48), (50, 100)]
515 >>> unionranges([(1, 2), (3, 4), (5, 6)])
515 >>> unionranges([(1, 2), (3, 4), (5, 6)])
516 [(1, 6)]
516 [(1, 6)]
517 """
517 """
518 rangeslist = sorted(set(rangeslist))
518 rangeslist = sorted(set(rangeslist))
519 unioned = []
519 unioned = []
520 if rangeslist:
520 if rangeslist:
521 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
521 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
522 for a, b in rangeslist:
522 for a, b in rangeslist:
523 c, d = unioned[-1]
523 c, d = unioned[-1]
524 if a > d + 1:
524 if a > d + 1:
525 unioned.append((a, b))
525 unioned.append((a, b))
526 else:
526 else:
527 unioned[-1] = (c, max(b, d))
527 unioned[-1] = (c, max(b, d))
528 return unioned
528 return unioned
529
529
530
530
531 def difflineranges(content1, content2):
531 def difflineranges(content1, content2):
532 """Return list of line number ranges in content2 that differ from content1.
532 """Return list of line number ranges in content2 that differ from content1.
533
533
534 Line numbers are 1-based. The numbers are the first and last line contained
534 Line numbers are 1-based. The numbers are the first and last line contained
535 in the range. Single-line ranges have the same line number for the first and
535 in the range. Single-line ranges have the same line number for the first and
536 last line. Excludes any empty ranges that result from lines that are only
536 last line. Excludes any empty ranges that result from lines that are only
537 present in content1. Relies on mdiff's idea of where the line endings are in
537 present in content1. Relies on mdiff's idea of where the line endings are in
538 the string.
538 the string.
539
539
540 >>> from mercurial import pycompat
540 >>> from mercurial import pycompat
541 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
541 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
542 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
542 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
543 >>> difflineranges2(b'', b'')
543 >>> difflineranges2(b'', b'')
544 []
544 []
545 >>> difflineranges2(b'a', b'')
545 >>> difflineranges2(b'a', b'')
546 []
546 []
547 >>> difflineranges2(b'', b'A')
547 >>> difflineranges2(b'', b'A')
548 [(1, 1)]
548 [(1, 1)]
549 >>> difflineranges2(b'a', b'a')
549 >>> difflineranges2(b'a', b'a')
550 []
550 []
551 >>> difflineranges2(b'a', b'A')
551 >>> difflineranges2(b'a', b'A')
552 [(1, 1)]
552 [(1, 1)]
553 >>> difflineranges2(b'ab', b'')
553 >>> difflineranges2(b'ab', b'')
554 []
554 []
555 >>> difflineranges2(b'', b'AB')
555 >>> difflineranges2(b'', b'AB')
556 [(1, 2)]
556 [(1, 2)]
557 >>> difflineranges2(b'abc', b'ac')
557 >>> difflineranges2(b'abc', b'ac')
558 []
558 []
559 >>> difflineranges2(b'ab', b'aCb')
559 >>> difflineranges2(b'ab', b'aCb')
560 [(2, 2)]
560 [(2, 2)]
561 >>> difflineranges2(b'abc', b'aBc')
561 >>> difflineranges2(b'abc', b'aBc')
562 [(2, 2)]
562 [(2, 2)]
563 >>> difflineranges2(b'ab', b'AB')
563 >>> difflineranges2(b'ab', b'AB')
564 [(1, 2)]
564 [(1, 2)]
565 >>> difflineranges2(b'abcde', b'aBcDe')
565 >>> difflineranges2(b'abcde', b'aBcDe')
566 [(2, 2), (4, 4)]
566 [(2, 2), (4, 4)]
567 >>> difflineranges2(b'abcde', b'aBCDe')
567 >>> difflineranges2(b'abcde', b'aBCDe')
568 [(2, 4)]
568 [(2, 4)]
569 """
569 """
570 ranges = []
570 ranges = []
571 for lines, kind in mdiff.allblocks(content1, content2):
571 for lines, kind in mdiff.allblocks(content1, content2):
572 firstline, lastline = lines[2:4]
572 firstline, lastline = lines[2:4]
573 if kind == b'!' and firstline != lastline:
573 if kind == b'!' and firstline != lastline:
574 ranges.append((firstline + 1, lastline))
574 ranges.append((firstline + 1, lastline))
575 return ranges
575 return ranges
576
576
577
577
578 def getbasectxs(repo, opts, revstofix):
578 def getbasectxs(repo, opts, revstofix):
579 """Returns a map of the base contexts for each revision
579 """Returns a map of the base contexts for each revision
580
580
581 The base contexts determine which lines are considered modified when we
581 The base contexts determine which lines are considered modified when we
582 attempt to fix just the modified lines in a file. It also determines which
582 attempt to fix just the modified lines in a file. It also determines which
583 files we attempt to fix, so it is important to compute this even when
583 files we attempt to fix, so it is important to compute this even when
584 --whole is used.
584 --whole is used.
585 """
585 """
586 # The --base flag overrides the usual logic, and we give every revision
586 # The --base flag overrides the usual logic, and we give every revision
587 # exactly the set of baserevs that the user specified.
587 # exactly the set of baserevs that the user specified.
588 if opts.get(b'base'):
588 if opts.get(b'base'):
589 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
589 baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
590 if not baserevs:
590 if not baserevs:
591 baserevs = {nullrev}
591 baserevs = {nullrev}
592 basectxs = {repo[rev] for rev in baserevs}
592 basectxs = {repo[rev] for rev in baserevs}
593 return {rev: basectxs for rev in revstofix}
593 return {rev: basectxs for rev in revstofix}
594
594
595 # Proceed in topological order so that we can easily determine each
595 # Proceed in topological order so that we can easily determine each
596 # revision's baserevs by looking at its parents and their baserevs.
596 # revision's baserevs by looking at its parents and their baserevs.
597 basectxs = collections.defaultdict(set)
597 basectxs = collections.defaultdict(set)
598 for rev in sorted(revstofix):
598 for rev in sorted(revstofix):
599 ctx = repo[rev]
599 ctx = repo[rev]
600 for pctx in ctx.parents():
600 for pctx in ctx.parents():
601 if pctx.rev() in basectxs:
601 if pctx.rev() in basectxs:
602 basectxs[rev].update(basectxs[pctx.rev()])
602 basectxs[rev].update(basectxs[pctx.rev()])
603 else:
603 else:
604 basectxs[rev].add(pctx)
604 basectxs[rev].add(pctx)
605 return basectxs
605 return basectxs
606
606
607
607
608 def fixfile(ui, repo, opts, fixers, fixctx, path, basectxs):
608 def fixfile(ui, repo, opts, fixers, fixctx, path, basectxs):
609 """Run any configured fixers that should affect the file in this context
609 """Run any configured fixers that should affect the file in this context
610
610
611 Returns the file content that results from applying the fixers in some order
611 Returns the file content that results from applying the fixers in some order
612 starting with the file's content in the fixctx. Fixers that support line
612 starting with the file's content in the fixctx. Fixers that support line
613 ranges will affect lines that have changed relative to any of the basectxs
613 ranges will affect lines that have changed relative to any of the basectxs
614 (i.e. they will only avoid lines that are common to all basectxs).
614 (i.e. they will only avoid lines that are common to all basectxs).
615
615
616 A fixer tool's stdout will become the file's new content if and only if it
616 A fixer tool's stdout will become the file's new content if and only if it
617 exits with code zero. The fixer tool's working directory is the repository's
617 exits with code zero. The fixer tool's working directory is the repository's
618 root.
618 root.
619 """
619 """
620 metadata = {}
620 metadata = {}
621 newdata = fixctx[path].data()
621 newdata = fixctx[path].data()
622 for fixername, fixer in pycompat.iteritems(fixers):
622 for fixername, fixer in pycompat.iteritems(fixers):
623 if fixer.affects(opts, fixctx, path):
623 if fixer.affects(opts, fixctx, path):
624 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
624 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
625 command = fixer.command(ui, path, ranges)
625 command = fixer.command(ui, path, ranges)
626 if command is None:
626 if command is None:
627 continue
627 continue
628 ui.debug(b'subprocess: %s\n' % (command,))
628 ui.debug(b'subprocess: %s\n' % (command,))
629 proc = subprocess.Popen(
629 proc = subprocess.Popen(
630 procutil.tonativestr(command),
630 procutil.tonativestr(command),
631 shell=True,
631 shell=True,
632 cwd=procutil.tonativestr(repo.root),
632 cwd=procutil.tonativestr(repo.root),
633 stdin=subprocess.PIPE,
633 stdin=subprocess.PIPE,
634 stdout=subprocess.PIPE,
634 stdout=subprocess.PIPE,
635 stderr=subprocess.PIPE,
635 stderr=subprocess.PIPE,
636 )
636 )
637 stdout, stderr = proc.communicate(newdata)
637 stdout, stderr = proc.communicate(newdata)
638 if stderr:
638 if stderr:
639 showstderr(ui, fixctx.rev(), fixername, stderr)
639 showstderr(ui, fixctx.rev(), fixername, stderr)
640 newerdata = stdout
640 newerdata = stdout
641 if fixer.shouldoutputmetadata():
641 if fixer.shouldoutputmetadata():
642 try:
642 try:
643 metadatajson, newerdata = stdout.split(b'\0', 1)
643 metadatajson, newerdata = stdout.split(b'\0', 1)
644 metadata[fixername] = pycompat.json_loads(metadatajson)
644 metadata[fixername] = pycompat.json_loads(metadatajson)
645 except ValueError:
645 except ValueError:
646 ui.warn(
646 ui.warn(
647 _(b'ignored invalid output from fixer tool: %s\n')
647 _(b'ignored invalid output from fixer tool: %s\n')
648 % (fixername,)
648 % (fixername,)
649 )
649 )
650 continue
650 continue
651 else:
651 else:
652 metadata[fixername] = None
652 metadata[fixername] = None
653 if proc.returncode == 0:
653 if proc.returncode == 0:
654 newdata = newerdata
654 newdata = newerdata
655 else:
655 else:
656 if not stderr:
656 if not stderr:
657 message = _(b'exited with status %d\n') % (proc.returncode,)
657 message = _(b'exited with status %d\n') % (proc.returncode,)
658 showstderr(ui, fixctx.rev(), fixername, message)
658 showstderr(ui, fixctx.rev(), fixername, message)
659 checktoolfailureaction(
659 checktoolfailureaction(
660 ui,
660 ui,
661 _(b'no fixes will be applied'),
661 _(b'no fixes will be applied'),
662 hint=_(
662 hint=_(
663 b'use --config fix.failure=continue to apply any '
663 b'use --config fix.failure=continue to apply any '
664 b'successful fixes anyway'
664 b'successful fixes anyway'
665 ),
665 ),
666 )
666 )
667 return metadata, newdata
667 return metadata, newdata
668
668
669
669
670 def showstderr(ui, rev, fixername, stderr):
670 def showstderr(ui, rev, fixername, stderr):
671 """Writes the lines of the stderr string as warnings on the ui
671 """Writes the lines of the stderr string as warnings on the ui
672
672
673 Uses the revision number and fixername to give more context to each line of
673 Uses the revision number and fixername to give more context to each line of
674 the error message. Doesn't include file names, since those take up a lot of
674 the error message. Doesn't include file names, since those take up a lot of
675 space and would tend to be included in the error message if they were
675 space and would tend to be included in the error message if they were
676 relevant.
676 relevant.
677 """
677 """
678 for line in re.split(b'[\r\n]+', stderr):
678 for line in re.split(b'[\r\n]+', stderr):
679 if line:
679 if line:
680 ui.warn(b'[')
680 ui.warn(b'[')
681 if rev is None:
681 if rev is None:
682 ui.warn(_(b'wdir'), label=b'evolve.rev')
682 ui.warn(_(b'wdir'), label=b'evolve.rev')
683 else:
683 else:
684 ui.warn((str(rev)), label=b'evolve.rev')
684 ui.warn(b'%d' % rev, label=b'evolve.rev')
685 ui.warn(b'] %s: %s\n' % (fixername, line))
685 ui.warn(b'] %s: %s\n' % (fixername, line))
686
686
687
687
688 def writeworkingdir(repo, ctx, filedata, replacements):
688 def writeworkingdir(repo, ctx, filedata, replacements):
689 """Write new content to the working copy and check out the new p1 if any
689 """Write new content to the working copy and check out the new p1 if any
690
690
691 We check out a new revision if and only if we fixed something in both the
691 We check out a new revision if and only if we fixed something in both the
692 working directory and its parent revision. This avoids the need for a full
692 working directory and its parent revision. This avoids the need for a full
693 update/merge, and means that the working directory simply isn't affected
693 update/merge, and means that the working directory simply isn't affected
694 unless the --working-dir flag is given.
694 unless the --working-dir flag is given.
695
695
696 Directly updates the dirstate for the affected files.
696 Directly updates the dirstate for the affected files.
697 """
697 """
698 for path, data in pycompat.iteritems(filedata):
698 for path, data in pycompat.iteritems(filedata):
699 fctx = ctx[path]
699 fctx = ctx[path]
700 fctx.write(data, fctx.flags())
700 fctx.write(data, fctx.flags())
701 if repo.dirstate[path] == b'n':
701 if repo.dirstate[path] == b'n':
702 repo.dirstate.normallookup(path)
702 repo.dirstate.normallookup(path)
703
703
704 oldparentnodes = repo.dirstate.parents()
704 oldparentnodes = repo.dirstate.parents()
705 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
705 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
706 if newparentnodes != oldparentnodes:
706 if newparentnodes != oldparentnodes:
707 repo.setparents(*newparentnodes)
707 repo.setparents(*newparentnodes)
708
708
709
709
710 def replacerev(ui, repo, ctx, filedata, replacements):
710 def replacerev(ui, repo, ctx, filedata, replacements):
711 """Commit a new revision like the given one, but with file content changes
711 """Commit a new revision like the given one, but with file content changes
712
712
713 "ctx" is the original revision to be replaced by a modified one.
713 "ctx" is the original revision to be replaced by a modified one.
714
714
715 "filedata" is a dict that maps paths to their new file content. All other
715 "filedata" is a dict that maps paths to their new file content. All other
716 paths will be recreated from the original revision without changes.
716 paths will be recreated from the original revision without changes.
717 "filedata" may contain paths that didn't exist in the original revision;
717 "filedata" may contain paths that didn't exist in the original revision;
718 they will be added.
718 they will be added.
719
719
720 "replacements" is a dict that maps a single node to a single node, and it is
720 "replacements" is a dict that maps a single node to a single node, and it is
721 updated to indicate the original revision is replaced by the newly created
721 updated to indicate the original revision is replaced by the newly created
722 one. No entry is added if the replacement's node already exists.
722 one. No entry is added if the replacement's node already exists.
723
723
724 The new revision has the same parents as the old one, unless those parents
724 The new revision has the same parents as the old one, unless those parents
725 have already been replaced, in which case those replacements are the parents
725 have already been replaced, in which case those replacements are the parents
726 of this new revision. Thus, if revisions are replaced in topological order,
726 of this new revision. Thus, if revisions are replaced in topological order,
727 there is no need to rebase them into the original topology later.
727 there is no need to rebase them into the original topology later.
728 """
728 """
729
729
730 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
730 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
731 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
731 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
732 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
732 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
733 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
733 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
734
734
735 # We don't want to create a revision that has no changes from the original,
735 # We don't want to create a revision that has no changes from the original,
736 # but we should if the original revision's parent has been replaced.
736 # but we should if the original revision's parent has been replaced.
737 # Otherwise, we would produce an orphan that needs no actual human
737 # Otherwise, we would produce an orphan that needs no actual human
738 # intervention to evolve. We can't rely on commit() to avoid creating the
738 # intervention to evolve. We can't rely on commit() to avoid creating the
739 # un-needed revision because the extra field added below produces a new hash
739 # un-needed revision because the extra field added below produces a new hash
740 # regardless of file content changes.
740 # regardless of file content changes.
741 if (
741 if (
742 not filedata
742 not filedata
743 and p1ctx.node() not in replacements
743 and p1ctx.node() not in replacements
744 and p2ctx.node() not in replacements
744 and p2ctx.node() not in replacements
745 ):
745 ):
746 return
746 return
747
747
748 def filectxfn(repo, memctx, path):
748 def filectxfn(repo, memctx, path):
749 if path not in ctx:
749 if path not in ctx:
750 return None
750 return None
751 fctx = ctx[path]
751 fctx = ctx[path]
752 copysource = fctx.copysource()
752 copysource = fctx.copysource()
753 return context.memfilectx(
753 return context.memfilectx(
754 repo,
754 repo,
755 memctx,
755 memctx,
756 path=fctx.path(),
756 path=fctx.path(),
757 data=filedata.get(path, fctx.data()),
757 data=filedata.get(path, fctx.data()),
758 islink=fctx.islink(),
758 islink=fctx.islink(),
759 isexec=fctx.isexec(),
759 isexec=fctx.isexec(),
760 copysource=copysource,
760 copysource=copysource,
761 )
761 )
762
762
763 extra = ctx.extra().copy()
763 extra = ctx.extra().copy()
764 extra[b'fix_source'] = ctx.hex()
764 extra[b'fix_source'] = ctx.hex()
765
765
766 memctx = context.memctx(
766 memctx = context.memctx(
767 repo,
767 repo,
768 parents=(newp1node, newp2node),
768 parents=(newp1node, newp2node),
769 text=ctx.description(),
769 text=ctx.description(),
770 files=set(ctx.files()) | set(filedata.keys()),
770 files=set(ctx.files()) | set(filedata.keys()),
771 filectxfn=filectxfn,
771 filectxfn=filectxfn,
772 user=ctx.user(),
772 user=ctx.user(),
773 date=ctx.date(),
773 date=ctx.date(),
774 extra=extra,
774 extra=extra,
775 branch=ctx.branch(),
775 branch=ctx.branch(),
776 editor=None,
776 editor=None,
777 )
777 )
778 sucnode = memctx.commit()
778 sucnode = memctx.commit()
779 prenode = ctx.node()
779 prenode = ctx.node()
780 if prenode == sucnode:
780 if prenode == sucnode:
781 ui.debug(b'node %s already existed\n' % (ctx.hex()))
781 ui.debug(b'node %s already existed\n' % (ctx.hex()))
782 else:
782 else:
783 replacements[ctx.node()] = sucnode
783 replacements[ctx.node()] = sucnode
784
784
785
785
786 def getfixers(ui):
786 def getfixers(ui):
787 """Returns a map of configured fixer tools indexed by their names
787 """Returns a map of configured fixer tools indexed by their names
788
788
789 Each value is a Fixer object with methods that implement the behavior of the
789 Each value is a Fixer object with methods that implement the behavior of the
790 fixer's config suboptions. Does not validate the config values.
790 fixer's config suboptions. Does not validate the config values.
791 """
791 """
792 fixers = {}
792 fixers = {}
793 for name in fixernames(ui):
793 for name in fixernames(ui):
794 enabled = ui.configbool(b'fix', name + b':enabled')
794 enabled = ui.configbool(b'fix', name + b':enabled')
795 command = ui.config(b'fix', name + b':command')
795 command = ui.config(b'fix', name + b':command')
796 pattern = ui.config(b'fix', name + b':pattern')
796 pattern = ui.config(b'fix', name + b':pattern')
797 linerange = ui.config(b'fix', name + b':linerange')
797 linerange = ui.config(b'fix', name + b':linerange')
798 priority = ui.configint(b'fix', name + b':priority')
798 priority = ui.configint(b'fix', name + b':priority')
799 metadata = ui.configbool(b'fix', name + b':metadata')
799 metadata = ui.configbool(b'fix', name + b':metadata')
800 skipclean = ui.configbool(b'fix', name + b':skipclean')
800 skipclean = ui.configbool(b'fix', name + b':skipclean')
801 # Don't use a fixer if it has no pattern configured. It would be
801 # Don't use a fixer if it has no pattern configured. It would be
802 # dangerous to let it affect all files. It would be pointless to let it
802 # dangerous to let it affect all files. It would be pointless to let it
803 # affect no files. There is no reasonable subset of files to use as the
803 # affect no files. There is no reasonable subset of files to use as the
804 # default.
804 # default.
805 if command is None:
805 if command is None:
806 ui.warn(
806 ui.warn(
807 _(b'fixer tool has no command configuration: %s\n') % (name,)
807 _(b'fixer tool has no command configuration: %s\n') % (name,)
808 )
808 )
809 elif pattern is None:
809 elif pattern is None:
810 ui.warn(
810 ui.warn(
811 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
811 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
812 )
812 )
813 elif not enabled:
813 elif not enabled:
814 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
814 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
815 else:
815 else:
816 fixers[name] = Fixer(
816 fixers[name] = Fixer(
817 command, pattern, linerange, priority, metadata, skipclean
817 command, pattern, linerange, priority, metadata, skipclean
818 )
818 )
819 return collections.OrderedDict(
819 return collections.OrderedDict(
820 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
820 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
821 )
821 )
822
822
823
823
824 def fixernames(ui):
824 def fixernames(ui):
825 """Returns the names of [fix] config options that have suboptions"""
825 """Returns the names of [fix] config options that have suboptions"""
826 names = set()
826 names = set()
827 for k, v in ui.configitems(b'fix'):
827 for k, v in ui.configitems(b'fix'):
828 if b':' in k:
828 if b':' in k:
829 names.add(k.split(b':', 1)[0])
829 names.add(k.split(b':', 1)[0])
830 return names
830 return names
831
831
832
832
833 class Fixer(object):
833 class Fixer(object):
834 """Wraps the raw config values for a fixer with methods"""
834 """Wraps the raw config values for a fixer with methods"""
835
835
836 def __init__(
836 def __init__(
837 self, command, pattern, linerange, priority, metadata, skipclean
837 self, command, pattern, linerange, priority, metadata, skipclean
838 ):
838 ):
839 self._command = command
839 self._command = command
840 self._pattern = pattern
840 self._pattern = pattern
841 self._linerange = linerange
841 self._linerange = linerange
842 self._priority = priority
842 self._priority = priority
843 self._metadata = metadata
843 self._metadata = metadata
844 self._skipclean = skipclean
844 self._skipclean = skipclean
845
845
846 def affects(self, opts, fixctx, path):
846 def affects(self, opts, fixctx, path):
847 """Should this fixer run on the file at the given path and context?"""
847 """Should this fixer run on the file at the given path and context?"""
848 repo = fixctx.repo()
848 repo = fixctx.repo()
849 matcher = matchmod.match(
849 matcher = matchmod.match(
850 repo.root, repo.root, [self._pattern], ctx=fixctx
850 repo.root, repo.root, [self._pattern], ctx=fixctx
851 )
851 )
852 return matcher(path)
852 return matcher(path)
853
853
854 def shouldoutputmetadata(self):
854 def shouldoutputmetadata(self):
855 """Should the stdout of this fixer start with JSON and a null byte?"""
855 """Should the stdout of this fixer start with JSON and a null byte?"""
856 return self._metadata
856 return self._metadata
857
857
858 def command(self, ui, path, ranges):
858 def command(self, ui, path, ranges):
859 """A shell command to use to invoke this fixer on the given file/lines
859 """A shell command to use to invoke this fixer on the given file/lines
860
860
861 May return None if there is no appropriate command to run for the given
861 May return None if there is no appropriate command to run for the given
862 parameters.
862 parameters.
863 """
863 """
864 expand = cmdutil.rendercommandtemplate
864 expand = cmdutil.rendercommandtemplate
865 parts = [
865 parts = [
866 expand(
866 expand(
867 ui,
867 ui,
868 self._command,
868 self._command,
869 {b'rootpath': path, b'basename': os.path.basename(path)},
869 {b'rootpath': path, b'basename': os.path.basename(path)},
870 )
870 )
871 ]
871 ]
872 if self._linerange:
872 if self._linerange:
873 if self._skipclean and not ranges:
873 if self._skipclean and not ranges:
874 # No line ranges to fix, so don't run the fixer.
874 # No line ranges to fix, so don't run the fixer.
875 return None
875 return None
876 for first, last in ranges:
876 for first, last in ranges:
877 parts.append(
877 parts.append(
878 expand(
878 expand(
879 ui, self._linerange, {b'first': first, b'last': last}
879 ui, self._linerange, {b'first': first, b'last': last}
880 )
880 )
881 )
881 )
882 return b' '.join(parts)
882 return b' '.join(parts)
General Comments 0
You need to be logged in to leave comments. Login now