##// END OF EJS Templates
fix: add a monkey-patchable point after all new revisions have been committed...
Danny Hooper -
r38847:64535d43 @98 default
parent child Browse files
Show More
@@ -1,586 +1,601
1 1 # fix - rewrite file content in changesets and working copy
2 2 #
3 3 # Copyright 2018 Google LLC.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8 8
9 9 Provides a command that runs configured tools on the contents of modified files,
10 10 writing back any fixes to the working copy or replacing changesets.
11 11
12 12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 13 formatting fixes to modified lines in C++ code::
14 14
15 15 [fix]
16 16 clang-format:command=clang-format --assume-filename={rootpath}
17 17 clang-format:linerange=--lines={first}:{last}
18 18 clang-format:fileset=set:**.cpp or **.hpp
19 19
20 20 The :command suboption forms the first part of the shell command that will be
21 21 used to fix a file. The content of the file is passed on standard input, and the
22 22 fixed file content is expected on standard output. If there is any output on
23 23 standard error, the file will not be affected. Some values may be substituted
24 24 into the command::
25 25
26 26 {rootpath} The path of the file being fixed, relative to the repo root
27 27 {basename} The name of the file being fixed, without the directory path
28 28
29 29 If the :linerange suboption is set, the tool will only be run if there are
30 30 changed lines in a file. The value of this suboption is appended to the shell
31 31 command once for every range of changed lines in the file. Some values may be
32 32 substituted into the command::
33 33
34 34 {first} The 1-based line number of the first line in the modified range
35 35 {last} The 1-based line number of the last line in the modified range
36 36
37 37 The :fileset suboption determines which files will be passed through each
38 38 configured tool. See :hg:`help fileset` for possible values. If there are file
39 39 arguments to :hg:`fix`, the intersection of these filesets is used.
40 40
41 41 There is also a configurable limit for the maximum size of file that will be
42 42 processed by :hg:`fix`::
43 43
44 44 [fix]
45 45 maxfilesize=2MB
46 46
47 47 """
48 48
49 49 from __future__ import absolute_import
50 50
51 51 import collections
52 52 import itertools
53 53 import os
54 54 import re
55 55 import subprocess
56 56
57 57 from mercurial.i18n import _
58 58 from mercurial.node import nullrev
59 59 from mercurial.node import wdirrev
60 60
61 61 from mercurial import (
62 62 cmdutil,
63 63 context,
64 64 copies,
65 65 error,
66 66 mdiff,
67 67 merge,
68 68 obsolete,
69 69 pycompat,
70 70 registrar,
71 71 scmutil,
72 72 util,
73 73 worker,
74 74 )
75 75
76 76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 77 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 78 # be specifying the version(s) of Mercurial they are tested with, or
79 79 # leave the attribute unspecified.
80 80 testedwith = 'ships-with-hg-core'
81 81
82 82 cmdtable = {}
83 83 command = registrar.command(cmdtable)
84 84
85 85 configtable = {}
86 86 configitem = registrar.configitem(configtable)
87 87
88 88 # Register the suboptions allowed for each configured fixer.
89 89 FIXER_ATTRS = ('command', 'linerange', 'fileset')
90 90
91 91 for key in FIXER_ATTRS:
92 92 configitem('fix', '.*(:%s)?' % key, default=None, generic=True)
93 93
94 94 # A good default size allows most source code files to be fixed, but avoids
95 95 # letting fixer tools choke on huge inputs, which could be surprising to the
96 96 # user.
97 97 configitem('fix', 'maxfilesize', default='2MB')
98 98
99 99 @command('fix',
100 100 [('', 'all', False, _('fix all non-public non-obsolete revisions')),
101 101 ('', 'base', [], _('revisions to diff against (overrides automatic '
102 102 'selection, and applies to every revision being '
103 103 'fixed)'), _('REV')),
104 104 ('r', 'rev', [], _('revisions to fix'), _('REV')),
105 105 ('w', 'working-dir', False, _('fix the working directory')),
106 106 ('', 'whole', False, _('always fix every line of a file'))],
107 107 _('[OPTION]... [FILE]...'))
108 108 def fix(ui, repo, *pats, **opts):
109 109 """rewrite file content in changesets or working directory
110 110
111 111 Runs any configured tools to fix the content of files. Only affects files
112 112 with changes, unless file arguments are provided. Only affects changed lines
113 113 of files, unless the --whole flag is used. Some tools may always affect the
114 114 whole file regardless of --whole.
115 115
116 116 If revisions are specified with --rev, those revisions will be checked, and
117 117 they may be replaced with new revisions that have fixed file content. It is
118 118 desirable to specify all descendants of each specified revision, so that the
119 119 fixes propagate to the descendants. If all descendants are fixed at the same
120 120 time, no merging, rebasing, or evolution will be required.
121 121
122 122 If --working-dir is used, files with uncommitted changes in the working copy
123 123 will be fixed. If the checked-out revision is also fixed, the working
124 124 directory will update to the replacement revision.
125 125
126 126 When determining what lines of each file to fix at each revision, the whole
127 127 set of revisions being fixed is considered, so that fixes to earlier
128 128 revisions are not forgotten in later ones. The --base flag can be used to
129 129 override this default behavior, though it is not usually desirable to do so.
130 130 """
131 131 opts = pycompat.byteskwargs(opts)
132 132 if opts['all']:
133 133 if opts['rev']:
134 134 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
135 135 opts['rev'] = ['not public() and not obsolete()']
136 136 opts['working_dir'] = True
137 137 with repo.wlock(), repo.lock(), repo.transaction('fix'):
138 138 revstofix = getrevstofix(ui, repo, opts)
139 139 basectxs = getbasectxs(repo, opts, revstofix)
140 140 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
141 141 basectxs)
142 142 fixers = getfixers(ui)
143 143
144 144 # There are no data dependencies between the workers fixing each file
145 145 # revision, so we can use all available parallelism.
146 146 def getfixes(items):
147 147 for rev, path in items:
148 148 ctx = repo[rev]
149 149 olddata = ctx[path].data()
150 150 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
151 151 # Don't waste memory/time passing unchanged content back, but
152 152 # produce one result per item either way.
153 153 yield (rev, path, newdata if newdata != olddata else None)
154 154 results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue)
155 155
156 156 # We have to hold on to the data for each successor revision in memory
157 157 # until all its parents are committed. We ensure this by committing and
158 158 # freeing memory for the revisions in some topological order. This
159 159 # leaves a little bit of memory efficiency on the table, but also makes
160 160 # the tests deterministic. It might also be considered a feature since
161 161 # it makes the results more easily reproducible.
162 162 filedata = collections.defaultdict(dict)
163 163 replacements = {}
164 164 commitorder = sorted(revstofix, reverse=True)
165 165 with ui.makeprogress(topic=_('fixing'), unit=_('files'),
166 166 total=sum(numitems.values())) as progress:
167 167 for rev, path, newdata in results:
168 168 progress.increment(item=path)
169 169 if newdata is not None:
170 170 filedata[rev][path] = newdata
171 171 numitems[rev] -= 1
172 172 # Apply the fixes for this and any other revisions that are
173 173 # ready and sitting at the front of the queue. Using a loop here
174 174 # prevents the queue from being blocked by the first revision to
175 175 # be ready out of order.
176 176 while commitorder and not numitems[commitorder[-1]]:
177 177 rev = commitorder.pop()
178 178 ctx = repo[rev]
179 179 if rev == wdirrev:
180 180 writeworkingdir(repo, ctx, filedata[rev], replacements)
181 181 else:
182 182 replacerev(ui, repo, ctx, filedata[rev], replacements)
183 183 del filedata[rev]
184 184
185 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
186 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
185 cleanup(repo, replacements, bool(filedata[wdirrev]))
186
187 def cleanup(repo, replacements, wdirwritten):
188 """Calls scmutil.cleanupnodes() with the given replacements.
189
190 "replacements" is a dict from nodeid to nodeid, with one key and one value
191 for every revision that was affected by fixing. This is slightly different
192 from cleanupnodes().
193
194 "wdirwritten" is a bool which tells whether the working copy was affected by
195 fixing, since it has no entry in "replacements".
196
197 Useful as a hook point for extending "hg fix" with output summarizing the
198 effects of the command, though we choose not to output anything here.
199 """
200 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
201 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
187 202
188 203 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
189 204 """"Constructs the list of files to be fixed at specific revisions
190 205
191 206 It is up to the caller how to consume the work items, and the only
192 207 dependence between them is that replacement revisions must be committed in
193 208 topological order. Each work item represents a file in the working copy or
194 209 in some revision that should be fixed and written back to the working copy
195 210 or into a replacement revision.
196 211
197 212 Work items for the same revision are grouped together, so that a worker
198 213 pool starting with the first N items in parallel is likely to finish the
199 214 first revision's work before other revisions. This can allow us to write
200 215 the result to disk and reduce memory footprint. At time of writing, the
201 216 partition strategy in worker.py seems favorable to this. We also sort the
202 217 items by ascending revision number to match the order in which we commit
203 218 the fixes later.
204 219 """
205 220 workqueue = []
206 221 numitems = collections.defaultdict(int)
207 222 maxfilesize = ui.configbytes('fix', 'maxfilesize')
208 223 for rev in sorted(revstofix):
209 224 fixctx = repo[rev]
210 225 match = scmutil.match(fixctx, pats, opts)
211 226 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
212 227 fixctx):
213 228 if path not in fixctx:
214 229 continue
215 230 fctx = fixctx[path]
216 231 if fctx.islink():
217 232 continue
218 233 if fctx.size() > maxfilesize:
219 234 ui.warn(_('ignoring file larger than %s: %s\n') %
220 235 (util.bytecount(maxfilesize), path))
221 236 continue
222 237 workqueue.append((rev, path))
223 238 numitems[rev] += 1
224 239 return workqueue, numitems
225 240
226 241 def getrevstofix(ui, repo, opts):
227 242 """Returns the set of revision numbers that should be fixed"""
228 243 revs = set(scmutil.revrange(repo, opts['rev']))
229 244 for rev in revs:
230 245 checkfixablectx(ui, repo, repo[rev])
231 246 if revs:
232 247 cmdutil.checkunfinished(repo)
233 248 checknodescendants(repo, revs)
234 249 if opts.get('working_dir'):
235 250 revs.add(wdirrev)
236 251 if list(merge.mergestate.read(repo).unresolved()):
237 252 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
238 253 if not revs:
239 254 raise error.Abort(
240 255 'no changesets specified', hint='use --rev or --working-dir')
241 256 return revs
242 257
243 258 def checknodescendants(repo, revs):
244 259 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
245 260 repo.revs('(%ld::) - (%ld)', revs, revs)):
246 261 raise error.Abort(_('can only fix a changeset together '
247 262 'with all its descendants'))
248 263
249 264 def checkfixablectx(ui, repo, ctx):
250 265 """Aborts if the revision shouldn't be replaced with a fixed one."""
251 266 if not ctx.mutable():
252 267 raise error.Abort('can\'t fix immutable changeset %s' %
253 268 (scmutil.formatchangeid(ctx),))
254 269 if ctx.obsolete():
255 270 # It would be better to actually check if the revision has a successor.
256 271 allowdivergence = ui.configbool('experimental',
257 272 'evolution.allowdivergence')
258 273 if not allowdivergence:
259 274 raise error.Abort('fixing obsolete revision could cause divergence')
260 275
261 276 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
262 277 """Returns the set of files that should be fixed in a context
263 278
264 279 The result depends on the base contexts; we include any file that has
265 280 changed relative to any of the base contexts. Base contexts should be
266 281 ancestors of the context being fixed.
267 282 """
268 283 files = set()
269 284 for basectx in basectxs:
270 285 stat = basectx.status(fixctx, match=match, listclean=bool(pats),
271 286 listunknown=bool(pats))
272 287 files.update(
273 288 set(itertools.chain(stat.added, stat.modified, stat.clean,
274 289 stat.unknown)))
275 290 return files
276 291
277 292 def lineranges(opts, path, basectxs, fixctx, content2):
278 293 """Returns the set of line ranges that should be fixed in a file
279 294
280 295 Of the form [(10, 20), (30, 40)].
281 296
282 297 This depends on the given base contexts; we must consider lines that have
283 298 changed versus any of the base contexts, and whether the file has been
284 299 renamed versus any of them.
285 300
286 301 Another way to understand this is that we exclude line ranges that are
287 302 common to the file in all base contexts.
288 303 """
289 304 if opts.get('whole'):
290 305 # Return a range containing all lines. Rely on the diff implementation's
291 306 # idea of how many lines are in the file, instead of reimplementing it.
292 307 return difflineranges('', content2)
293 308
294 309 rangeslist = []
295 310 for basectx in basectxs:
296 311 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
297 312 if basepath in basectx:
298 313 content1 = basectx[basepath].data()
299 314 else:
300 315 content1 = ''
301 316 rangeslist.extend(difflineranges(content1, content2))
302 317 return unionranges(rangeslist)
303 318
304 319 def unionranges(rangeslist):
305 320 """Return the union of some closed intervals
306 321
307 322 >>> unionranges([])
308 323 []
309 324 >>> unionranges([(1, 100)])
310 325 [(1, 100)]
311 326 >>> unionranges([(1, 100), (1, 100)])
312 327 [(1, 100)]
313 328 >>> unionranges([(1, 100), (2, 100)])
314 329 [(1, 100)]
315 330 >>> unionranges([(1, 99), (1, 100)])
316 331 [(1, 100)]
317 332 >>> unionranges([(1, 100), (40, 60)])
318 333 [(1, 100)]
319 334 >>> unionranges([(1, 49), (50, 100)])
320 335 [(1, 100)]
321 336 >>> unionranges([(1, 48), (50, 100)])
322 337 [(1, 48), (50, 100)]
323 338 >>> unionranges([(1, 2), (3, 4), (5, 6)])
324 339 [(1, 6)]
325 340 """
326 341 rangeslist = sorted(set(rangeslist))
327 342 unioned = []
328 343 if rangeslist:
329 344 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
330 345 for a, b in rangeslist:
331 346 c, d = unioned[-1]
332 347 if a > d + 1:
333 348 unioned.append((a, b))
334 349 else:
335 350 unioned[-1] = (c, max(b, d))
336 351 return unioned
337 352
338 353 def difflineranges(content1, content2):
339 354 """Return list of line number ranges in content2 that differ from content1.
340 355
341 356 Line numbers are 1-based. The numbers are the first and last line contained
342 357 in the range. Single-line ranges have the same line number for the first and
343 358 last line. Excludes any empty ranges that result from lines that are only
344 359 present in content1. Relies on mdiff's idea of where the line endings are in
345 360 the string.
346 361
347 362 >>> from mercurial import pycompat
348 363 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
349 364 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
350 365 >>> difflineranges2(b'', b'')
351 366 []
352 367 >>> difflineranges2(b'a', b'')
353 368 []
354 369 >>> difflineranges2(b'', b'A')
355 370 [(1, 1)]
356 371 >>> difflineranges2(b'a', b'a')
357 372 []
358 373 >>> difflineranges2(b'a', b'A')
359 374 [(1, 1)]
360 375 >>> difflineranges2(b'ab', b'')
361 376 []
362 377 >>> difflineranges2(b'', b'AB')
363 378 [(1, 2)]
364 379 >>> difflineranges2(b'abc', b'ac')
365 380 []
366 381 >>> difflineranges2(b'ab', b'aCb')
367 382 [(2, 2)]
368 383 >>> difflineranges2(b'abc', b'aBc')
369 384 [(2, 2)]
370 385 >>> difflineranges2(b'ab', b'AB')
371 386 [(1, 2)]
372 387 >>> difflineranges2(b'abcde', b'aBcDe')
373 388 [(2, 2), (4, 4)]
374 389 >>> difflineranges2(b'abcde', b'aBCDe')
375 390 [(2, 4)]
376 391 """
377 392 ranges = []
378 393 for lines, kind in mdiff.allblocks(content1, content2):
379 394 firstline, lastline = lines[2:4]
380 395 if kind == '!' and firstline != lastline:
381 396 ranges.append((firstline + 1, lastline))
382 397 return ranges
383 398
384 399 def getbasectxs(repo, opts, revstofix):
385 400 """Returns a map of the base contexts for each revision
386 401
387 402 The base contexts determine which lines are considered modified when we
388 403 attempt to fix just the modified lines in a file. It also determines which
389 404 files we attempt to fix, so it is important to compute this even when
390 405 --whole is used.
391 406 """
392 407 # The --base flag overrides the usual logic, and we give every revision
393 408 # exactly the set of baserevs that the user specified.
394 409 if opts.get('base'):
395 410 baserevs = set(scmutil.revrange(repo, opts.get('base')))
396 411 if not baserevs:
397 412 baserevs = {nullrev}
398 413 basectxs = {repo[rev] for rev in baserevs}
399 414 return {rev: basectxs for rev in revstofix}
400 415
401 416 # Proceed in topological order so that we can easily determine each
402 417 # revision's baserevs by looking at its parents and their baserevs.
403 418 basectxs = collections.defaultdict(set)
404 419 for rev in sorted(revstofix):
405 420 ctx = repo[rev]
406 421 for pctx in ctx.parents():
407 422 if pctx.rev() in basectxs:
408 423 basectxs[rev].update(basectxs[pctx.rev()])
409 424 else:
410 425 basectxs[rev].add(pctx)
411 426 return basectxs
412 427
413 428 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
414 429 """Run any configured fixers that should affect the file in this context
415 430
416 431 Returns the file content that results from applying the fixers in some order
417 432 starting with the file's content in the fixctx. Fixers that support line
418 433 ranges will affect lines that have changed relative to any of the basectxs
419 434 (i.e. they will only avoid lines that are common to all basectxs).
420 435 """
421 436 newdata = fixctx[path].data()
422 437 for fixername, fixer in fixers.iteritems():
423 438 if fixer.affects(opts, fixctx, path):
424 439 ranges = lineranges(opts, path, basectxs, fixctx, newdata)
425 440 command = fixer.command(ui, path, ranges)
426 441 if command is None:
427 442 continue
428 443 ui.debug('subprocess: %s\n' % (command,))
429 444 proc = subprocess.Popen(
430 445 command,
431 446 shell=True,
432 447 cwd='/',
433 448 stdin=subprocess.PIPE,
434 449 stdout=subprocess.PIPE,
435 450 stderr=subprocess.PIPE)
436 451 newerdata, stderr = proc.communicate(newdata)
437 452 if stderr:
438 453 showstderr(ui, fixctx.rev(), fixername, stderr)
439 454 else:
440 455 newdata = newerdata
441 456 return newdata
442 457
443 458 def showstderr(ui, rev, fixername, stderr):
444 459 """Writes the lines of the stderr string as warnings on the ui
445 460
446 461 Uses the revision number and fixername to give more context to each line of
447 462 the error message. Doesn't include file names, since those take up a lot of
448 463 space and would tend to be included in the error message if they were
449 464 relevant.
450 465 """
451 466 for line in re.split('[\r\n]+', stderr):
452 467 if line:
453 468 ui.warn(('['))
454 469 if rev is None:
455 470 ui.warn(_('wdir'), label='evolve.rev')
456 471 else:
457 472 ui.warn((str(rev)), label='evolve.rev')
458 473 ui.warn(('] %s: %s\n') % (fixername, line))
459 474
460 475 def writeworkingdir(repo, ctx, filedata, replacements):
461 476 """Write new content to the working copy and check out the new p1 if any
462 477
463 478 We check out a new revision if and only if we fixed something in both the
464 479 working directory and its parent revision. This avoids the need for a full
465 480 update/merge, and means that the working directory simply isn't affected
466 481 unless the --working-dir flag is given.
467 482
468 483 Directly updates the dirstate for the affected files.
469 484 """
470 485 for path, data in filedata.iteritems():
471 486 fctx = ctx[path]
472 487 fctx.write(data, fctx.flags())
473 488 if repo.dirstate[path] == 'n':
474 489 repo.dirstate.normallookup(path)
475 490
476 491 oldparentnodes = repo.dirstate.parents()
477 492 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
478 493 if newparentnodes != oldparentnodes:
479 494 repo.setparents(*newparentnodes)
480 495
481 496 def replacerev(ui, repo, ctx, filedata, replacements):
482 497 """Commit a new revision like the given one, but with file content changes
483 498
484 499 "ctx" is the original revision to be replaced by a modified one.
485 500
486 501 "filedata" is a dict that maps paths to their new file content. All other
487 502 paths will be recreated from the original revision without changes.
488 503 "filedata" may contain paths that didn't exist in the original revision;
489 504 they will be added.
490 505
491 506 "replacements" is a dict that maps a single node to a single node, and it is
492 507 updated to indicate the original revision is replaced by the newly created
493 508 one. No entry is added if the replacement's node already exists.
494 509
495 510 The new revision has the same parents as the old one, unless those parents
496 511 have already been replaced, in which case those replacements are the parents
497 512 of this new revision. Thus, if revisions are replaced in topological order,
498 513 there is no need to rebase them into the original topology later.
499 514 """
500 515
501 516 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
502 517 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
503 518 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
504 519 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
505 520
506 521 def filectxfn(repo, memctx, path):
507 522 if path not in ctx:
508 523 return None
509 524 fctx = ctx[path]
510 525 copied = fctx.renamed()
511 526 if copied:
512 527 copied = copied[0]
513 528 return context.memfilectx(
514 529 repo,
515 530 memctx,
516 531 path=fctx.path(),
517 532 data=filedata.get(path, fctx.data()),
518 533 islink=fctx.islink(),
519 534 isexec=fctx.isexec(),
520 535 copied=copied)
521 536
522 537 memctx = context.memctx(
523 538 repo,
524 539 parents=(newp1node, newp2node),
525 540 text=ctx.description(),
526 541 files=set(ctx.files()) | set(filedata.keys()),
527 542 filectxfn=filectxfn,
528 543 user=ctx.user(),
529 544 date=ctx.date(),
530 545 extra=ctx.extra(),
531 546 branch=ctx.branch(),
532 547 editor=None)
533 548 sucnode = memctx.commit()
534 549 prenode = ctx.node()
535 550 if prenode == sucnode:
536 551 ui.debug('node %s already existed\n' % (ctx.hex()))
537 552 else:
538 553 replacements[ctx.node()] = sucnode
539 554
540 555 def getfixers(ui):
541 556 """Returns a map of configured fixer tools indexed by their names
542 557
543 558 Each value is a Fixer object with methods that implement the behavior of the
544 559 fixer's config suboptions. Does not validate the config values.
545 560 """
546 561 result = {}
547 562 for name in fixernames(ui):
548 563 result[name] = Fixer()
549 564 attrs = ui.configsuboptions('fix', name)[1]
550 565 for key in FIXER_ATTRS:
551 566 setattr(result[name], pycompat.sysstr('_' + key),
552 567 attrs.get(key, ''))
553 568 return result
554 569
555 570 def fixernames(ui):
556 571 """Returns the names of [fix] config options that have suboptions"""
557 572 names = set()
558 573 for k, v in ui.configitems('fix'):
559 574 if ':' in k:
560 575 names.add(k.split(':', 1)[0])
561 576 return names
562 577
563 578 class Fixer(object):
564 579 """Wraps the raw config values for a fixer with methods"""
565 580
566 581 def affects(self, opts, fixctx, path):
567 582 """Should this fixer run on the file at the given path and context?"""
568 583 return scmutil.match(fixctx, [self._fileset], opts)(path)
569 584
570 585 def command(self, ui, path, ranges):
571 586 """A shell command to use to invoke this fixer on the given file/lines
572 587
573 588 May return None if there is no appropriate command to run for the given
574 589 parameters.
575 590 """
576 591 expand = cmdutil.rendercommandtemplate
577 592 parts = [expand(ui, self._command,
578 593 {'rootpath': path, 'basename': os.path.basename(path)})]
579 594 if self._linerange:
580 595 if not ranges:
581 596 # No line ranges to fix, so don't run the fixer.
582 597 return None
583 598 for first, last in ranges:
584 599 parts.append(expand(ui, self._linerange,
585 600 {'first': first, 'last': last}))
586 601 return ' '.join(parts)
General Comments 0
You need to be logged in to leave comments. Login now