##// END OF EJS Templates
cleanup: use set literals where possible...
Martin von Zweigbergk -
r42224:566daffc default
parent child Browse files
Show More
@@ -198,9 +198,9 b' def fastannotate(ui, repo, *pats, **opts'
198 198 formatter.write(result, lines, existinglines=existinglines)
199 199 formatter.end()
200 200
201 _newopts = set([])
202 _knownopts = set([opt[1].replace('-', '_') for opt in
203 (fastannotatecommandargs[r'options'] + commands.globalopts)])
201 _newopts = set()
202 _knownopts = {opt[1].replace('-', '_') for opt in
203 (fastannotatecommandargs[r'options'] + commands.globalopts)}
204 204
205 205 def _annotatewrapper(orig, ui, repo, *pats, **opts):
206 206 """used by wrapdefault"""
@@ -136,7 +136,7 b' class basestore(object):'
136 136 failed = self._verifyfiles(contents, filestocheck)
137 137
138 138 numrevs = len(verified)
139 numlfiles = len(set([fname for (fname, fnode) in verified]))
139 numlfiles = len({fname for (fname, fnode) in verified})
140 140 if contents:
141 141 self.ui.status(
142 142 _('verified contents of %d revisions of %d largefiles\n')
@@ -1940,7 +1940,7 b' class queue(object):'
1940 1940 self.ui.write(patchname, label='qseries.' + state)
1941 1941 self.ui.write('\n')
1942 1942
1943 applied = set([p.name for p in self.applied])
1943 applied = {p.name for p in self.applied}
1944 1944 if length is None:
1945 1945 length = len(self.series) - start
1946 1946 if not missing:
@@ -3658,7 +3658,7 b' def revsetmq(repo, subset, x):'
3658 3658 """Changesets managed by MQ.
3659 3659 """
3660 3660 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3661 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3661 applied = {repo[r.node].rev() for r in repo.mq.applied}
3662 3662 return smartset.baseset([r for r in subset if r in applied])
3663 3663
3664 3664 # tell hggettext to extract docstrings from these functions:
@@ -1878,8 +1878,8 b' def _computeobsoletenotrebased(repo, reb'
1878 1878 obsolete successors.
1879 1879 """
1880 1880 obsoletenotrebased = {}
1881 obsoletewithoutsuccessorindestination = set([])
1882 obsoleteextinctsuccessors = set([])
1881 obsoletewithoutsuccessorindestination = set()
1882 obsoleteextinctsuccessors = set()
1883 1883
1884 1884 assert repo.filtername is None
1885 1885 cl = repo.changelog
@@ -804,7 +804,7 b' def gcclient(ui, cachepath):'
804 804 return
805 805
806 806 reposfile = open(repospath, 'rb')
807 repos = set([r[:-1] for r in reposfile.readlines()])
807 repos = {r[:-1] for r in reposfile.readlines()}
808 808 reposfile.close()
809 809
810 810 # build list of useful files
@@ -154,9 +154,9 b' def _deletebigpacks(repo, folder, files)'
154 154
155 155 # Either an oversize index or datapack will trigger cleanup of the whole
156 156 # pack:
157 oversized = set([os.path.splitext(path)[0] for path, ftype, stat in files
157 oversized = {os.path.splitext(path)[0] for path, ftype, stat in files
158 158 if (stat.st_size > maxsize and (os.path.splitext(path)[1]
159 in VALIDEXTS))])
159 in VALIDEXTS))}
160 160
161 161 for rootfname in oversized:
162 162 rootpath = os.path.join(folder, rootfname)
@@ -243,7 +243,7 b' def showstack(ui, repo, displayer):'
243 243 else:
244 244 newheads = set()
245 245
246 allrevs = set(stackrevs) | newheads | set([baserev])
246 allrevs = set(stackrevs) | newheads | {baserev}
247 247 nodelen = longestshortest(repo, allrevs)
248 248
249 249 try:
@@ -147,7 +147,7 b' def uncommit(ui, repo, *pats, **opts):'
147 147 # if not everything tracked in that directory can be
148 148 # uncommitted.
149 149 if badfiles:
150 badfiles -= set([f for f in util.dirs(eligible)])
150 badfiles -= {f for f in util.dirs(eligible)}
151 151
152 152 for f in sorted(badfiles):
153 153 if f in s.clean:
@@ -2315,7 +2315,7 b' def widen_bundle(repo, oldmatcher, newma'
2315 2315 oldmatcher=oldmatcher,
2316 2316 matcher=newmatcher,
2317 2317 fullnodes=commonnodes)
2318 cgdata = packer.generate(set([nodemod.nullid]), list(commonnodes),
2318 cgdata = packer.generate({nodemod.nullid}, list(commonnodes),
2319 2319 False, 'narrow_widen', changelog=False)
2320 2320
2321 2321 part = bundler.newpart('changegroup', data=cgdata)
@@ -458,7 +458,7 b' class dirnode(object):'
458 458
459 459 def __init__(self, dirpath):
460 460 self.path = dirpath
461 self.statuses = set([])
461 self.statuses = set()
462 462 self.files = []
463 463 self.subdirs = {}
464 464
@@ -2495,8 +2495,7 b' def amend(ui, repo, old, extra, pats, op'
2495 2495 if len(old.parents()) > 1:
2496 2496 # ctx.files() isn't reliable for merges, so fall back to the
2497 2497 # slower repo.status() method
2498 files = set([fn for st in base.status(old)[:3]
2499 for fn in st])
2498 files = {fn for st in base.status(old)[:3] for fn in st}
2500 2499 else:
2501 2500 files = set(old.files())
2502 2501
@@ -764,7 +764,7 b' def headrevs(revs, parentsfn):'
764 764 the input set.
765 765 """
766 766 headrevs = set(revs)
767 parents = set([node.nullrev])
767 parents = {node.nullrev}
768 768 up = parents.update
769 769
770 770 for rev in revs:
@@ -707,8 +707,8 b' def _pushdiscoverybookmarks(pushop):'
707 707
708 708 remotebookmark = listkeys(remote, 'bookmarks')
709 709
710 explicit = set([repo._bookmarks.expandname(bookmark)
711 for bookmark in pushop.bookmarks])
710 explicit = {repo._bookmarks.expandname(bookmark)
711 for bookmark in pushop.bookmarks}
712 712
713 713 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
714 714 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
@@ -34,7 +34,7 b' def bisect(repo, state):'
34 34
35 35 changelog = repo.changelog
36 36 clparents = changelog.parentrevs
37 skip = set([changelog.rev(n) for n in state['skip']])
37 skip = {changelog.rev(n) for n in state['skip']}
38 38
39 39 def buildancestors(bad, good):
40 40 badrev = min([changelog.rev(n) for n in bad])
@@ -748,7 +748,7 b' def help_(ui, commands, name, unknowncmd'
748 748 ct = mod.cmdtable
749 749 except AttributeError:
750 750 ct = {}
751 modcmds = set([c.partition('|')[0] for c in ct])
751 modcmds = {c.partition('|')[0] for c in ct}
752 752 rst.extend(helplist(modcmds.__contains__))
753 753 else:
754 754 rst.append(_("(use 'hg help extensions' for information on enabling"
@@ -815,8 +815,8 b' def _checkunknownfiles(repo, wctx, mctx,'
815 815 fileconflicts.add(f)
816 816
817 817 allconflicts = fileconflicts | pathconflicts
818 ignoredconflicts = set([c for c in allconflicts
819 if repo.dirstate._ignore(c)])
818 ignoredconflicts = {c for c in allconflicts
819 if repo.dirstate._ignore(c)}
820 820 unknownconflicts = allconflicts - ignoredconflicts
821 821 collectconflicts(ignoredconflicts, ignoredconfig)
822 822 collectconflicts(unknownconflicts, unknownconfig)
@@ -1104,7 +1104,7 b' def _filternarrowactions(narrowmatch, br'
1104 1104 Raise an exception if the merge cannot be completed because the repo is
1105 1105 narrowed.
1106 1106 """
1107 nooptypes = set(['k']) # TODO: handle with nonconflicttypes
1107 nooptypes = {'k'} # TODO: handle with nonconflicttypes
1108 1108 nonconflicttypes = set('a am c cm f g r e'.split())
1109 1109 # We mutate the items in the dict during iteration, so iterate
1110 1110 # over a copy.
@@ -743,7 +743,7 b' class obsstore(object):'
743 743 pruned = [m for m in succsmarkers.get(current, ()) if not m[1]]
744 744 direct.update(pruned)
745 745 direct -= seenmarkers
746 pendingnodes = set([m[0] for m in direct])
746 pendingnodes = {m[0] for m in direct}
747 747 seenmarkers |= direct
748 748 pendingnodes -= seennodes
749 749 seennodes |= pendingnodes
@@ -637,7 +637,7 b' def _candidategroups(revlog, textlen, p1'
637 637
638 638 deltas_limit = textlen * LIMIT_DELTA2TEXT
639 639
640 tested = set([nullrev])
640 tested = {nullrev}
641 641 candidates = _refinedgroups(revlog, p1, p2, cachedelta)
642 642 while True:
643 643 temptative = candidates.send(good)
@@ -345,7 +345,7 b' def find_pullbundle(repo, proto, opts, c'
345 345 one specific branch of many.
346 346 """
347 347 def decodehexstring(s):
348 return set([binascii.unhexlify(h) for h in s.split(';')])
348 return {binascii.unhexlify(h) for h in s.split(';')}
349 349
350 350 manifest = repo.vfs.tryread('pullbundles.manifest')
351 351 if not manifest:
General Comments 0
You need to be logged in to leave comments. Login now