##// END OF EJS Templates
fix: remove unused imports...
Danny Hooper -
r37224:d3f1d3e4 default
parent child Browse files
Show More
@@ -1,544 +1,541 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:fileset=set:**.cpp or **.hpp
18 clang-format:fileset=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. If there is any output on
22 fixed file content is expected on standard output. If there is any output on
23 standard error, the file will not be affected. Some values may be substituted
23 standard error, the file will not be affected. Some values may be substituted
24 into the command::
24 into the command::
25
25
26 {rootpath} The path of the file being fixed, relative to the repo root
26 {rootpath} The path of the file being fixed, relative to the repo root
27 {basename} The name of the file being fixed, without the directory path
27 {basename} The name of the file being fixed, without the directory path
28
28
29 If the :linerange suboption is set, the tool will only be run if there are
29 If the :linerange suboption is set, the tool will only be run if there are
30 changed lines in a file. The value of this suboption is appended to the shell
30 changed lines in a file. The value of this suboption is appended to the shell
31 command once for every range of changed lines in the file. Some values may be
31 command once for every range of changed lines in the file. Some values may be
32 substituted into the command::
32 substituted into the command::
33
33
34 {first} The 1-based line number of the first line in the modified range
34 {first} The 1-based line number of the first line in the modified range
35 {last} The 1-based line number of the last line in the modified range
35 {last} The 1-based line number of the last line in the modified range
36
36
37 The :fileset suboption determines which files will be passed through each
37 The :fileset suboption determines which files will be passed through each
38 configured tool. See :hg:`help fileset` for possible values. If there are file
38 configured tool. See :hg:`help fileset` for possible values. If there are file
39 arguments to :hg:`fix`, the intersection of these filesets is used.
39 arguments to :hg:`fix`, the intersection of these filesets is used.
40
40
41 There is also a configurable limit for the maximum size of file that will be
41 There is also a configurable limit for the maximum size of file that will be
42 processed by :hg:`fix`::
42 processed by :hg:`fix`::
43
43
44 [fix]
44 [fix]
45 maxfilesize=2MB
45 maxfilesize=2MB
46
46
47 """
47 """
48
48
49 from __future__ import absolute_import
49 from __future__ import absolute_import
50
50
51 import collections
51 import collections
52 import itertools
52 import itertools
53 import os
53 import os
54 import re
54 import re
55 import subprocess
55 import subprocess
56 import sys
57
56
58 from mercurial.i18n import _
57 from mercurial.i18n import _
59 from mercurial.node import nullrev
58 from mercurial.node import nullrev
60 from mercurial.node import wdirrev
59 from mercurial.node import wdirrev
61
60
62 from mercurial import (
61 from mercurial import (
63 cmdutil,
62 cmdutil,
64 context,
63 context,
65 copies,
64 copies,
66 error,
65 error,
67 match,
68 mdiff,
66 mdiff,
69 merge,
67 merge,
70 obsolete,
68 obsolete,
71 posix,
72 registrar,
69 registrar,
73 scmutil,
70 scmutil,
74 util,
71 util,
75 )
72 )
76
73
77 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
74 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
78 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
75 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
79 # be specifying the version(s) of Mercurial they are tested with, or
76 # be specifying the version(s) of Mercurial they are tested with, or
80 # leave the attribute unspecified.
77 # leave the attribute unspecified.
81 testedwith = 'ships-with-hg-core'
78 testedwith = 'ships-with-hg-core'
82
79
83 cmdtable = {}
80 cmdtable = {}
84 command = registrar.command(cmdtable)
81 command = registrar.command(cmdtable)
85
82
86 configtable = {}
83 configtable = {}
87 configitem = registrar.configitem(configtable)
84 configitem = registrar.configitem(configtable)
88
85
89 # Register the suboptions allowed for each configured fixer.
86 # Register the suboptions allowed for each configured fixer.
90 FIXER_ATTRS = ('command', 'linerange', 'fileset')
87 FIXER_ATTRS = ('command', 'linerange', 'fileset')
91
88
92 for key in FIXER_ATTRS:
89 for key in FIXER_ATTRS:
93 configitem('fix', '.*(:%s)?' % key, default=None, generic=True)
90 configitem('fix', '.*(:%s)?' % key, default=None, generic=True)
94
91
95 # A good default size allows most source code files to be fixed, but avoids
92 # A good default size allows most source code files to be fixed, but avoids
96 # letting fixer tools choke on huge inputs, which could be surprising to the
93 # letting fixer tools choke on huge inputs, which could be surprising to the
97 # user.
94 # user.
98 configitem('fix', 'maxfilesize', default='2MB')
95 configitem('fix', 'maxfilesize', default='2MB')
99
96
100 @command('fix',
97 @command('fix',
101 [('', 'base', [], _('revisions to diff against (overrides automatic '
98 [('', 'base', [], _('revisions to diff against (overrides automatic '
102 'selection, and applies to every revision being '
99 'selection, and applies to every revision being '
103 'fixed)'), _('REV')),
100 'fixed)'), _('REV')),
104 ('r', 'rev', [], _('revisions to fix'), _('REV')),
101 ('r', 'rev', [], _('revisions to fix'), _('REV')),
105 ('w', 'working-dir', False, _('fix the working directory')),
102 ('w', 'working-dir', False, _('fix the working directory')),
106 ('', 'whole', False, _('always fix every line of a file'))],
103 ('', 'whole', False, _('always fix every line of a file'))],
107 _('[OPTION]... [FILE]...'))
104 _('[OPTION]... [FILE]...'))
108 def fix(ui, repo, *pats, **opts):
105 def fix(ui, repo, *pats, **opts):
109 """rewrite file content in changesets or working directory
106 """rewrite file content in changesets or working directory
110
107
111 Runs any configured tools to fix the content of files. Only affects files
108 Runs any configured tools to fix the content of files. Only affects files
112 with changes, unless file arguments are provided. Only affects changed lines
109 with changes, unless file arguments are provided. Only affects changed lines
113 of files, unless the --whole flag is used. Some tools may always affect the
110 of files, unless the --whole flag is used. Some tools may always affect the
114 whole file regardless of --whole.
111 whole file regardless of --whole.
115
112
116 If revisions are specified with --rev, those revisions will be checked, and
113 If revisions are specified with --rev, those revisions will be checked, and
117 they may be replaced with new revisions that have fixed file content. It is
114 they may be replaced with new revisions that have fixed file content. It is
118 desirable to specify all descendants of each specified revision, so that the
115 desirable to specify all descendants of each specified revision, so that the
119 fixes propagate to the descendants. If all descendants are fixed at the same
116 fixes propagate to the descendants. If all descendants are fixed at the same
120 time, no merging, rebasing, or evolution will be required.
117 time, no merging, rebasing, or evolution will be required.
121
118
122 If --working-dir is used, files with uncommitted changes in the working copy
119 If --working-dir is used, files with uncommitted changes in the working copy
123 will be fixed. If the checked-out revision is also fixed, the working
120 will be fixed. If the checked-out revision is also fixed, the working
124 directory will update to the replacement revision.
121 directory will update to the replacement revision.
125
122
126 When determining what lines of each file to fix at each revision, the whole
123 When determining what lines of each file to fix at each revision, the whole
127 set of revisions being fixed is considered, so that fixes to earlier
124 set of revisions being fixed is considered, so that fixes to earlier
128 revisions are not forgotten in later ones. The --base flag can be used to
125 revisions are not forgotten in later ones. The --base flag can be used to
129 override this default behavior, though it is not usually desirable to do so.
126 override this default behavior, though it is not usually desirable to do so.
130 """
127 """
131 with repo.wlock(), repo.lock():
128 with repo.wlock(), repo.lock():
132 revstofix = getrevstofix(ui, repo, opts)
129 revstofix = getrevstofix(ui, repo, opts)
133 basectxs = getbasectxs(repo, opts, revstofix)
130 basectxs = getbasectxs(repo, opts, revstofix)
134 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
131 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
135 basectxs)
132 basectxs)
136 filedata = collections.defaultdict(dict)
133 filedata = collections.defaultdict(dict)
137 replacements = {}
134 replacements = {}
138 fixers = getfixers(ui)
135 fixers = getfixers(ui)
139 # Some day this loop can become a worker pool, but for now it's easier
136 # Some day this loop can become a worker pool, but for now it's easier
140 # to fix everything serially in topological order.
137 # to fix everything serially in topological order.
141 for rev, path in sorted(workqueue):
138 for rev, path in sorted(workqueue):
142 ctx = repo[rev]
139 ctx = repo[rev]
143 olddata = ctx[path].data()
140 olddata = ctx[path].data()
144 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
141 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
145 if newdata != olddata:
142 if newdata != olddata:
146 filedata[rev][path] = newdata
143 filedata[rev][path] = newdata
147 numitems[rev] -= 1
144 numitems[rev] -= 1
148 if not numitems[rev]:
145 if not numitems[rev]:
149 if rev == wdirrev:
146 if rev == wdirrev:
150 writeworkingdir(repo, ctx, filedata[rev], replacements)
147 writeworkingdir(repo, ctx, filedata[rev], replacements)
151 else:
148 else:
152 replacerev(ui, repo, ctx, filedata[rev], replacements)
149 replacerev(ui, repo, ctx, filedata[rev], replacements)
153 del filedata[rev]
150 del filedata[rev]
154
151
155 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
152 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
156 scmutil.cleanupnodes(repo, replacements, 'fix')
153 scmutil.cleanupnodes(repo, replacements, 'fix')
157
154
158 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
155 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
159 """"Constructs the list of files to be fixed at specific revisions
156 """"Constructs the list of files to be fixed at specific revisions
160
157
161 It is up to the caller how to consume the work items, and the only
158 It is up to the caller how to consume the work items, and the only
162 dependence between them is that replacement revisions must be committed in
159 dependence between them is that replacement revisions must be committed in
163 topological order. Each work item represents a file in the working copy or
160 topological order. Each work item represents a file in the working copy or
164 in some revision that should be fixed and written back to the working copy
161 in some revision that should be fixed and written back to the working copy
165 or into a replacement revision.
162 or into a replacement revision.
166 """
163 """
167 workqueue = []
164 workqueue = []
168 numitems = collections.defaultdict(int)
165 numitems = collections.defaultdict(int)
169 maxfilesize = ui.configbytes('fix', 'maxfilesize')
166 maxfilesize = ui.configbytes('fix', 'maxfilesize')
170 for rev in revstofix:
167 for rev in revstofix:
171 fixctx = repo[rev]
168 fixctx = repo[rev]
172 match = scmutil.match(fixctx, pats, opts)
169 match = scmutil.match(fixctx, pats, opts)
173 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
170 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
174 fixctx):
171 fixctx):
175 if path not in fixctx:
172 if path not in fixctx:
176 continue
173 continue
177 fctx = fixctx[path]
174 fctx = fixctx[path]
178 if fctx.islink():
175 if fctx.islink():
179 continue
176 continue
180 if fctx.size() > maxfilesize:
177 if fctx.size() > maxfilesize:
181 ui.warn(_('ignoring file larger than %s: %s\n') %
178 ui.warn(_('ignoring file larger than %s: %s\n') %
182 (util.bytecount(maxfilesize), path))
179 (util.bytecount(maxfilesize), path))
183 continue
180 continue
184 workqueue.append((rev, path))
181 workqueue.append((rev, path))
185 numitems[rev] += 1
182 numitems[rev] += 1
186 return workqueue, numitems
183 return workqueue, numitems
187
184
188 def getrevstofix(ui, repo, opts):
185 def getrevstofix(ui, repo, opts):
189 """Returns the set of revision numbers that should be fixed"""
186 """Returns the set of revision numbers that should be fixed"""
190 revs = set(scmutil.revrange(repo, opts['rev']))
187 revs = set(scmutil.revrange(repo, opts['rev']))
191 for rev in revs:
188 for rev in revs:
192 checkfixablectx(ui, repo, repo[rev])
189 checkfixablectx(ui, repo, repo[rev])
193 if revs:
190 if revs:
194 cmdutil.checkunfinished(repo)
191 cmdutil.checkunfinished(repo)
195 checknodescendants(repo, revs)
192 checknodescendants(repo, revs)
196 if opts.get('working_dir'):
193 if opts.get('working_dir'):
197 revs.add(wdirrev)
194 revs.add(wdirrev)
198 if list(merge.mergestate.read(repo).unresolved()):
195 if list(merge.mergestate.read(repo).unresolved()):
199 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
196 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
200 if not revs:
197 if not revs:
201 raise error.Abort(
198 raise error.Abort(
202 'no changesets specified', hint='use --rev or --working-dir')
199 'no changesets specified', hint='use --rev or --working-dir')
203 return revs
200 return revs
204
201
205 def checknodescendants(repo, revs):
202 def checknodescendants(repo, revs):
206 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
203 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
207 repo.revs('(%ld::) - (%ld)', revs, revs)):
204 repo.revs('(%ld::) - (%ld)', revs, revs)):
208 raise error.Abort(_('can only fix a changeset together '
205 raise error.Abort(_('can only fix a changeset together '
209 'with all its descendants'))
206 'with all its descendants'))
210
207
211 def checkfixablectx(ui, repo, ctx):
208 def checkfixablectx(ui, repo, ctx):
212 """Aborts if the revision shouldn't be replaced with a fixed one."""
209 """Aborts if the revision shouldn't be replaced with a fixed one."""
213 if not ctx.mutable():
210 if not ctx.mutable():
214 raise error.Abort('can\'t fix immutable changeset %s' %
211 raise error.Abort('can\'t fix immutable changeset %s' %
215 (scmutil.formatchangeid(ctx),))
212 (scmutil.formatchangeid(ctx),))
216 if ctx.obsolete():
213 if ctx.obsolete():
217 # It would be better to actually check if the revision has a successor.
214 # It would be better to actually check if the revision has a successor.
218 allowdivergence = ui.configbool('experimental',
215 allowdivergence = ui.configbool('experimental',
219 'evolution.allowdivergence')
216 'evolution.allowdivergence')
220 if not allowdivergence:
217 if not allowdivergence:
221 raise error.Abort('fixing obsolete revision could cause divergence')
218 raise error.Abort('fixing obsolete revision could cause divergence')
222
219
223 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
220 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
224 """Returns the set of files that should be fixed in a context
221 """Returns the set of files that should be fixed in a context
225
222
226 The result depends on the base contexts; we include any file that has
223 The result depends on the base contexts; we include any file that has
227 changed relative to any of the base contexts. Base contexts should be
224 changed relative to any of the base contexts. Base contexts should be
228 ancestors of the context being fixed.
225 ancestors of the context being fixed.
229 """
226 """
230 files = set()
227 files = set()
231 for basectx in basectxs:
228 for basectx in basectxs:
232 stat = repo.status(
229 stat = repo.status(
233 basectx, fixctx, match=match, clean=bool(pats), unknown=bool(pats))
230 basectx, fixctx, match=match, clean=bool(pats), unknown=bool(pats))
234 files.update(
231 files.update(
235 set(itertools.chain(stat.added, stat.modified, stat.clean,
232 set(itertools.chain(stat.added, stat.modified, stat.clean,
236 stat.unknown)))
233 stat.unknown)))
237 return files
234 return files
238
235
239 def lineranges(opts, path, basectxs, fixctx, content2):
236 def lineranges(opts, path, basectxs, fixctx, content2):
240 """Returns the set of line ranges that should be fixed in a file
237 """Returns the set of line ranges that should be fixed in a file
241
238
242 Of the form [(10, 20), (30, 40)].
239 Of the form [(10, 20), (30, 40)].
243
240
244 This depends on the given base contexts; we must consider lines that have
241 This depends on the given base contexts; we must consider lines that have
245 changed versus any of the base contexts, and whether the file has been
242 changed versus any of the base contexts, and whether the file has been
246 renamed versus any of them.
243 renamed versus any of them.
247
244
248 Another way to understand this is that we exclude line ranges that are
245 Another way to understand this is that we exclude line ranges that are
249 common to the file in all base contexts.
246 common to the file in all base contexts.
250 """
247 """
251 if opts.get('whole'):
248 if opts.get('whole'):
252 # Return a range containing all lines. Rely on the diff implementation's
249 # Return a range containing all lines. Rely on the diff implementation's
253 # idea of how many lines are in the file, instead of reimplementing it.
250 # idea of how many lines are in the file, instead of reimplementing it.
254 return difflineranges('', content2)
251 return difflineranges('', content2)
255
252
256 rangeslist = []
253 rangeslist = []
257 for basectx in basectxs:
254 for basectx in basectxs:
258 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
255 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
259 if basepath in basectx:
256 if basepath in basectx:
260 content1 = basectx[basepath].data()
257 content1 = basectx[basepath].data()
261 else:
258 else:
262 content1 = ''
259 content1 = ''
263 rangeslist.extend(difflineranges(content1, content2))
260 rangeslist.extend(difflineranges(content1, content2))
264 return unionranges(rangeslist)
261 return unionranges(rangeslist)
265
262
266 def unionranges(rangeslist):
263 def unionranges(rangeslist):
267 """Return the union of some closed intervals
264 """Return the union of some closed intervals
268
265
269 >>> unionranges([])
266 >>> unionranges([])
270 []
267 []
271 >>> unionranges([(1, 100)])
268 >>> unionranges([(1, 100)])
272 [(1, 100)]
269 [(1, 100)]
273 >>> unionranges([(1, 100), (1, 100)])
270 >>> unionranges([(1, 100), (1, 100)])
274 [(1, 100)]
271 [(1, 100)]
275 >>> unionranges([(1, 100), (2, 100)])
272 >>> unionranges([(1, 100), (2, 100)])
276 [(1, 100)]
273 [(1, 100)]
277 >>> unionranges([(1, 99), (1, 100)])
274 >>> unionranges([(1, 99), (1, 100)])
278 [(1, 100)]
275 [(1, 100)]
279 >>> unionranges([(1, 100), (40, 60)])
276 >>> unionranges([(1, 100), (40, 60)])
280 [(1, 100)]
277 [(1, 100)]
281 >>> unionranges([(1, 49), (50, 100)])
278 >>> unionranges([(1, 49), (50, 100)])
282 [(1, 100)]
279 [(1, 100)]
283 >>> unionranges([(1, 48), (50, 100)])
280 >>> unionranges([(1, 48), (50, 100)])
284 [(1, 48), (50, 100)]
281 [(1, 48), (50, 100)]
285 >>> unionranges([(1, 2), (3, 4), (5, 6)])
282 >>> unionranges([(1, 2), (3, 4), (5, 6)])
286 [(1, 6)]
283 [(1, 6)]
287 """
284 """
288 rangeslist = sorted(set(rangeslist))
285 rangeslist = sorted(set(rangeslist))
289 unioned = []
286 unioned = []
290 if rangeslist:
287 if rangeslist:
291 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
288 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
292 for a, b in rangeslist:
289 for a, b in rangeslist:
293 c, d = unioned[-1]
290 c, d = unioned[-1]
294 if a > d + 1:
291 if a > d + 1:
295 unioned.append((a, b))
292 unioned.append((a, b))
296 else:
293 else:
297 unioned[-1] = (c, max(b, d))
294 unioned[-1] = (c, max(b, d))
298 return unioned
295 return unioned
299
296
300 def difflineranges(content1, content2):
297 def difflineranges(content1, content2):
301 """Return list of line number ranges in content2 that differ from content1.
298 """Return list of line number ranges in content2 that differ from content1.
302
299
303 Line numbers are 1-based. The numbers are the first and last line contained
300 Line numbers are 1-based. The numbers are the first and last line contained
304 in the range. Single-line ranges have the same line number for the first and
301 in the range. Single-line ranges have the same line number for the first and
305 last line. Excludes any empty ranges that result from lines that are only
302 last line. Excludes any empty ranges that result from lines that are only
306 present in content1. Relies on mdiff's idea of where the line endings are in
303 present in content1. Relies on mdiff's idea of where the line endings are in
307 the string.
304 the string.
308
305
309 >>> lines = lambda s: '\\n'.join([c for c in s])
306 >>> lines = lambda s: '\\n'.join([c for c in s])
310 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
307 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
311 >>> difflineranges2('', '')
308 >>> difflineranges2('', '')
312 []
309 []
313 >>> difflineranges2('a', '')
310 >>> difflineranges2('a', '')
314 []
311 []
315 >>> difflineranges2('', 'A')
312 >>> difflineranges2('', 'A')
316 [(1, 1)]
313 [(1, 1)]
317 >>> difflineranges2('a', 'a')
314 >>> difflineranges2('a', 'a')
318 []
315 []
319 >>> difflineranges2('a', 'A')
316 >>> difflineranges2('a', 'A')
320 [(1, 1)]
317 [(1, 1)]
321 >>> difflineranges2('ab', '')
318 >>> difflineranges2('ab', '')
322 []
319 []
323 >>> difflineranges2('', 'AB')
320 >>> difflineranges2('', 'AB')
324 [(1, 2)]
321 [(1, 2)]
325 >>> difflineranges2('abc', 'ac')
322 >>> difflineranges2('abc', 'ac')
326 []
323 []
327 >>> difflineranges2('ab', 'aCb')
324 >>> difflineranges2('ab', 'aCb')
328 [(2, 2)]
325 [(2, 2)]
329 >>> difflineranges2('abc', 'aBc')
326 >>> difflineranges2('abc', 'aBc')
330 [(2, 2)]
327 [(2, 2)]
331 >>> difflineranges2('ab', 'AB')
328 >>> difflineranges2('ab', 'AB')
332 [(1, 2)]
329 [(1, 2)]
333 >>> difflineranges2('abcde', 'aBcDe')
330 >>> difflineranges2('abcde', 'aBcDe')
334 [(2, 2), (4, 4)]
331 [(2, 2), (4, 4)]
335 >>> difflineranges2('abcde', 'aBCDe')
332 >>> difflineranges2('abcde', 'aBCDe')
336 [(2, 4)]
333 [(2, 4)]
337 """
334 """
338 ranges = []
335 ranges = []
339 for lines, kind in mdiff.allblocks(content1, content2):
336 for lines, kind in mdiff.allblocks(content1, content2):
340 firstline, lastline = lines[2:4]
337 firstline, lastline = lines[2:4]
341 if kind == '!' and firstline != lastline:
338 if kind == '!' and firstline != lastline:
342 ranges.append((firstline + 1, lastline))
339 ranges.append((firstline + 1, lastline))
343 return ranges
340 return ranges
344
341
345 def getbasectxs(repo, opts, revstofix):
342 def getbasectxs(repo, opts, revstofix):
346 """Returns a map of the base contexts for each revision
343 """Returns a map of the base contexts for each revision
347
344
348 The base contexts determine which lines are considered modified when we
345 The base contexts determine which lines are considered modified when we
349 attempt to fix just the modified lines in a file.
346 attempt to fix just the modified lines in a file.
350 """
347 """
351 # The --base flag overrides the usual logic, and we give every revision
348 # The --base flag overrides the usual logic, and we give every revision
352 # exactly the set of baserevs that the user specified.
349 # exactly the set of baserevs that the user specified.
353 if opts.get('base'):
350 if opts.get('base'):
354 baserevs = set(scmutil.revrange(repo, opts.get('base')))
351 baserevs = set(scmutil.revrange(repo, opts.get('base')))
355 if not baserevs:
352 if not baserevs:
356 baserevs = {nullrev}
353 baserevs = {nullrev}
357 basectxs = {repo[rev] for rev in baserevs}
354 basectxs = {repo[rev] for rev in baserevs}
358 return {rev: basectxs for rev in revstofix}
355 return {rev: basectxs for rev in revstofix}
359
356
360 # Proceed in topological order so that we can easily determine each
357 # Proceed in topological order so that we can easily determine each
361 # revision's baserevs by looking at its parents and their baserevs.
358 # revision's baserevs by looking at its parents and their baserevs.
362 basectxs = collections.defaultdict(set)
359 basectxs = collections.defaultdict(set)
363 for rev in sorted(revstofix):
360 for rev in sorted(revstofix):
364 ctx = repo[rev]
361 ctx = repo[rev]
365 for pctx in ctx.parents():
362 for pctx in ctx.parents():
366 if pctx.rev() in basectxs:
363 if pctx.rev() in basectxs:
367 basectxs[rev].update(basectxs[pctx.rev()])
364 basectxs[rev].update(basectxs[pctx.rev()])
368 else:
365 else:
369 basectxs[rev].add(pctx)
366 basectxs[rev].add(pctx)
370 return basectxs
367 return basectxs
371
368
372 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
369 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
373 """Run any configured fixers that should affect the file in this context
370 """Run any configured fixers that should affect the file in this context
374
371
375 Returns the file content that results from applying the fixers in some order
372 Returns the file content that results from applying the fixers in some order
376 starting with the file's content in the fixctx. Fixers that support line
373 starting with the file's content in the fixctx. Fixers that support line
377 ranges will affect lines that have changed relative to any of the basectxs
374 ranges will affect lines that have changed relative to any of the basectxs
378 (i.e. they will only avoid lines that are common to all basectxs).
375 (i.e. they will only avoid lines that are common to all basectxs).
379 """
376 """
380 newdata = fixctx[path].data()
377 newdata = fixctx[path].data()
381 for fixername, fixer in fixers.iteritems():
378 for fixername, fixer in fixers.iteritems():
382 if fixer.affects(opts, fixctx, path):
379 if fixer.affects(opts, fixctx, path):
383 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
380 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
384 command = fixer.command(path, ranges)
381 command = fixer.command(path, ranges)
385 if command is None:
382 if command is None:
386 continue
383 continue
387 ui.debug('subprocess: %s\n' % (command,))
384 ui.debug('subprocess: %s\n' % (command,))
388 proc = subprocess.Popen(
385 proc = subprocess.Popen(
389 command,
386 command,
390 shell=True,
387 shell=True,
391 cwd='/',
388 cwd='/',
392 stdin=subprocess.PIPE,
389 stdin=subprocess.PIPE,
393 stdout=subprocess.PIPE,
390 stdout=subprocess.PIPE,
394 stderr=subprocess.PIPE)
391 stderr=subprocess.PIPE)
395 newerdata, stderr = proc.communicate(newdata)
392 newerdata, stderr = proc.communicate(newdata)
396 if stderr:
393 if stderr:
397 showstderr(ui, fixctx.rev(), fixername, stderr)
394 showstderr(ui, fixctx.rev(), fixername, stderr)
398 else:
395 else:
399 newdata = newerdata
396 newdata = newerdata
400 return newdata
397 return newdata
401
398
402 def showstderr(ui, rev, fixername, stderr):
399 def showstderr(ui, rev, fixername, stderr):
403 """Writes the lines of the stderr string as warnings on the ui
400 """Writes the lines of the stderr string as warnings on the ui
404
401
405 Uses the revision number and fixername to give more context to each line of
402 Uses the revision number and fixername to give more context to each line of
406 the error message. Doesn't include file names, since those take up a lot of
403 the error message. Doesn't include file names, since those take up a lot of
407 space and would tend to be included in the error message if they were
404 space and would tend to be included in the error message if they were
408 relevant.
405 relevant.
409 """
406 """
410 for line in re.split('[\r\n]+', stderr):
407 for line in re.split('[\r\n]+', stderr):
411 if line:
408 if line:
412 ui.warn(('['))
409 ui.warn(('['))
413 if rev is None:
410 if rev is None:
414 ui.warn(_('wdir'), label='evolve.rev')
411 ui.warn(_('wdir'), label='evolve.rev')
415 else:
412 else:
416 ui.warn((str(rev)), label='evolve.rev')
413 ui.warn((str(rev)), label='evolve.rev')
417 ui.warn(('] %s: %s\n') % (fixername, line))
414 ui.warn(('] %s: %s\n') % (fixername, line))
418
415
419 def writeworkingdir(repo, ctx, filedata, replacements):
416 def writeworkingdir(repo, ctx, filedata, replacements):
420 """Write new content to the working copy and check out the new p1 if any
417 """Write new content to the working copy and check out the new p1 if any
421
418
422 We check out a new revision if and only if we fixed something in both the
419 We check out a new revision if and only if we fixed something in both the
423 working directory and its parent revision. This avoids the need for a full
420 working directory and its parent revision. This avoids the need for a full
424 update/merge, and means that the working directory simply isn't affected
421 update/merge, and means that the working directory simply isn't affected
425 unless the --working-dir flag is given.
422 unless the --working-dir flag is given.
426
423
427 Directly updates the dirstate for the affected files.
424 Directly updates the dirstate for the affected files.
428 """
425 """
429 for path, data in filedata.iteritems():
426 for path, data in filedata.iteritems():
430 fctx = ctx[path]
427 fctx = ctx[path]
431 fctx.write(data, fctx.flags())
428 fctx.write(data, fctx.flags())
432 if repo.dirstate[path] == 'n':
429 if repo.dirstate[path] == 'n':
433 repo.dirstate.normallookup(path)
430 repo.dirstate.normallookup(path)
434
431
435 oldparentnodes = repo.dirstate.parents()
432 oldparentnodes = repo.dirstate.parents()
436 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
433 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
437 if newparentnodes != oldparentnodes:
434 if newparentnodes != oldparentnodes:
438 repo.setparents(*newparentnodes)
435 repo.setparents(*newparentnodes)
439
436
440 def replacerev(ui, repo, ctx, filedata, replacements):
437 def replacerev(ui, repo, ctx, filedata, replacements):
441 """Commit a new revision like the given one, but with file content changes
438 """Commit a new revision like the given one, but with file content changes
442
439
443 "ctx" is the original revision to be replaced by a modified one.
440 "ctx" is the original revision to be replaced by a modified one.
444
441
445 "filedata" is a dict that maps paths to their new file content. All other
442 "filedata" is a dict that maps paths to their new file content. All other
446 paths will be recreated from the original revision without changes.
443 paths will be recreated from the original revision without changes.
447 "filedata" may contain paths that didn't exist in the original revision;
444 "filedata" may contain paths that didn't exist in the original revision;
448 they will be added.
445 they will be added.
449
446
450 "replacements" is a dict that maps a single node to a single node, and it is
447 "replacements" is a dict that maps a single node to a single node, and it is
451 updated to indicate the original revision is replaced by the newly created
448 updated to indicate the original revision is replaced by the newly created
452 one. No entry is added if the replacement's node already exists.
449 one. No entry is added if the replacement's node already exists.
453
450
454 The new revision has the same parents as the old one, unless those parents
451 The new revision has the same parents as the old one, unless those parents
455 have already been replaced, in which case those replacements are the parents
452 have already been replaced, in which case those replacements are the parents
456 of this new revision. Thus, if revisions are replaced in topological order,
453 of this new revision. Thus, if revisions are replaced in topological order,
457 there is no need to rebase them into the original topology later.
454 there is no need to rebase them into the original topology later.
458 """
455 """
459
456
460 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
457 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
461 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
458 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
462 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
459 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
463 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
460 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
464
461
465 def filectxfn(repo, memctx, path):
462 def filectxfn(repo, memctx, path):
466 if path not in ctx:
463 if path not in ctx:
467 return None
464 return None
468 fctx = ctx[path]
465 fctx = ctx[path]
469 copied = fctx.renamed()
466 copied = fctx.renamed()
470 if copied:
467 if copied:
471 copied = copied[0]
468 copied = copied[0]
472 return context.memfilectx(
469 return context.memfilectx(
473 repo,
470 repo,
474 memctx,
471 memctx,
475 path=fctx.path(),
472 path=fctx.path(),
476 data=filedata.get(path, fctx.data()),
473 data=filedata.get(path, fctx.data()),
477 islink=fctx.islink(),
474 islink=fctx.islink(),
478 isexec=fctx.isexec(),
475 isexec=fctx.isexec(),
479 copied=copied)
476 copied=copied)
480
477
481 overrides = {('phases', 'new-commit'): ctx.phase()}
478 overrides = {('phases', 'new-commit'): ctx.phase()}
482 with ui.configoverride(overrides, source='fix'):
479 with ui.configoverride(overrides, source='fix'):
483 memctx = context.memctx(
480 memctx = context.memctx(
484 repo,
481 repo,
485 parents=(newp1node, newp2node),
482 parents=(newp1node, newp2node),
486 text=ctx.description(),
483 text=ctx.description(),
487 files=set(ctx.files()) | set(filedata.keys()),
484 files=set(ctx.files()) | set(filedata.keys()),
488 filectxfn=filectxfn,
485 filectxfn=filectxfn,
489 user=ctx.user(),
486 user=ctx.user(),
490 date=ctx.date(),
487 date=ctx.date(),
491 extra=ctx.extra(),
488 extra=ctx.extra(),
492 branch=ctx.branch(),
489 branch=ctx.branch(),
493 editor=None)
490 editor=None)
494 sucnode = memctx.commit()
491 sucnode = memctx.commit()
495 prenode = ctx.node()
492 prenode = ctx.node()
496 if prenode == sucnode:
493 if prenode == sucnode:
497 ui.debug('node %s already existed\n' % (ctx.hex()))
494 ui.debug('node %s already existed\n' % (ctx.hex()))
498 else:
495 else:
499 replacements[ctx.node()] = sucnode
496 replacements[ctx.node()] = sucnode
500
497
501 def getfixers(ui):
498 def getfixers(ui):
502 """Returns a map of configured fixer tools indexed by their names
499 """Returns a map of configured fixer tools indexed by their names
503
500
504 Each value is a Fixer object with methods that implement the behavior of the
501 Each value is a Fixer object with methods that implement the behavior of the
505 fixer's config suboptions. Does not validate the config values.
502 fixer's config suboptions. Does not validate the config values.
506 """
503 """
507 result = {}
504 result = {}
508 for name in fixernames(ui):
505 for name in fixernames(ui):
509 result[name] = Fixer()
506 result[name] = Fixer()
510 attrs = ui.configsuboptions('fix', name)[1]
507 attrs = ui.configsuboptions('fix', name)[1]
511 for key in FIXER_ATTRS:
508 for key in FIXER_ATTRS:
512 setattr(result[name], '_' + key, attrs.get(key, ''))
509 setattr(result[name], '_' + key, attrs.get(key, ''))
513 return result
510 return result
514
511
515 def fixernames(ui):
512 def fixernames(ui):
516 """Returns the names of [fix] config options that have suboptions"""
513 """Returns the names of [fix] config options that have suboptions"""
517 names = set()
514 names = set()
518 for k, v in ui.configitems('fix'):
515 for k, v in ui.configitems('fix'):
519 if ':' in k:
516 if ':' in k:
520 names.add(k.split(':', 1)[0])
517 names.add(k.split(':', 1)[0])
521 return names
518 return names
522
519
523 class Fixer(object):
520 class Fixer(object):
524 """Wraps the raw config values for a fixer with methods"""
521 """Wraps the raw config values for a fixer with methods"""
525
522
526 def affects(self, opts, fixctx, path):
523 def affects(self, opts, fixctx, path):
527 """Should this fixer run on the file at the given path and context?"""
524 """Should this fixer run on the file at the given path and context?"""
528 return scmutil.match(fixctx, [self._fileset], opts)(path)
525 return scmutil.match(fixctx, [self._fileset], opts)(path)
529
526
530 def command(self, path, ranges):
527 def command(self, path, ranges):
531 """A shell command to use to invoke this fixer on the given file/lines
528 """A shell command to use to invoke this fixer on the given file/lines
532
529
533 May return None if there is no appropriate command to run for the given
530 May return None if there is no appropriate command to run for the given
534 parameters.
531 parameters.
535 """
532 """
536 parts = [self._command.format(rootpath=path,
533 parts = [self._command.format(rootpath=path,
537 basename=os.path.basename(path))]
534 basename=os.path.basename(path))]
538 if self._linerange:
535 if self._linerange:
539 if not ranges:
536 if not ranges:
540 # No line ranges to fix, so don't run the fixer.
537 # No line ranges to fix, so don't run the fixer.
541 return None
538 return None
542 for first, last in ranges:
539 for first, last in ranges:
543 parts.append(self._linerange.format(first=first, last=last))
540 parts.append(self._linerange.format(first=first, last=last))
544 return ' '.join(parts)
541 return ' '.join(parts)
General Comments 0
You need to be logged in to leave comments. Login now