Show More
@@ -198,9 +198,9 def fastannotate(ui, repo, *pats, **opts | |||
|
198 | 198 | formatter.write(result, lines, existinglines=existinglines) |
|
199 | 199 | formatter.end() |
|
200 | 200 | |
|
201 |
_newopts = set( |
|
|
202 |
_knownopts = |
|
|
203 |
|
|
|
201 | _newopts = set() | |
|
202 | _knownopts = {opt[1].replace('-', '_') for opt in | |
|
203 | (fastannotatecommandargs[r'options'] + commands.globalopts)} | |
|
204 | 204 | |
|
205 | 205 | def _annotatewrapper(orig, ui, repo, *pats, **opts): |
|
206 | 206 | """used by wrapdefault""" |
@@ -136,7 +136,7 class basestore(object): | |||
|
136 | 136 | failed = self._verifyfiles(contents, filestocheck) |
|
137 | 137 | |
|
138 | 138 | numrevs = len(verified) |
|
139 |
numlfiles = len( |
|
|
139 | numlfiles = len({fname for (fname, fnode) in verified}) | |
|
140 | 140 | if contents: |
|
141 | 141 | self.ui.status( |
|
142 | 142 | _('verified contents of %d revisions of %d largefiles\n') |
@@ -1940,7 +1940,7 class queue(object): | |||
|
1940 | 1940 | self.ui.write(patchname, label='qseries.' + state) |
|
1941 | 1941 | self.ui.write('\n') |
|
1942 | 1942 | |
|
1943 |
applied = |
|
|
1943 | applied = {p.name for p in self.applied} | |
|
1944 | 1944 | if length is None: |
|
1945 | 1945 | length = len(self.series) - start |
|
1946 | 1946 | if not missing: |
@@ -3658,7 +3658,7 def revsetmq(repo, subset, x): | |||
|
3658 | 3658 | """Changesets managed by MQ. |
|
3659 | 3659 | """ |
|
3660 | 3660 | revsetlang.getargs(x, 0, 0, _("mq takes no arguments")) |
|
3661 |
applied = |
|
|
3661 | applied = {repo[r.node].rev() for r in repo.mq.applied} | |
|
3662 | 3662 | return smartset.baseset([r for r in subset if r in applied]) |
|
3663 | 3663 | |
|
3664 | 3664 | # tell hggettext to extract docstrings from these functions: |
@@ -1878,8 +1878,8 def _computeobsoletenotrebased(repo, reb | |||
|
1878 | 1878 | obsolete successors. |
|
1879 | 1879 | """ |
|
1880 | 1880 | obsoletenotrebased = {} |
|
1881 |
obsoletewithoutsuccessorindestination = set( |
|
|
1882 |
obsoleteextinctsuccessors = set( |
|
|
1881 | obsoletewithoutsuccessorindestination = set() | |
|
1882 | obsoleteextinctsuccessors = set() | |
|
1883 | 1883 | |
|
1884 | 1884 | assert repo.filtername is None |
|
1885 | 1885 | cl = repo.changelog |
@@ -804,7 +804,7 def gcclient(ui, cachepath): | |||
|
804 | 804 | return |
|
805 | 805 | |
|
806 | 806 | reposfile = open(repospath, 'rb') |
|
807 |
repos = |
|
|
807 | repos = {r[:-1] for r in reposfile.readlines()} | |
|
808 | 808 | reposfile.close() |
|
809 | 809 | |
|
810 | 810 | # build list of useful files |
@@ -154,9 +154,9 def _deletebigpacks(repo, folder, files) | |||
|
154 | 154 | |
|
155 | 155 | # Either an oversize index or datapack will trigger cleanup of the whole |
|
156 | 156 | # pack: |
|
157 |
oversized = |
|
|
157 | oversized = {os.path.splitext(path)[0] for path, ftype, stat in files | |
|
158 | 158 | if (stat.st_size > maxsize and (os.path.splitext(path)[1] |
|
159 |
in VALIDEXTS)) |
|
|
159 | in VALIDEXTS))} | |
|
160 | 160 | |
|
161 | 161 | for rootfname in oversized: |
|
162 | 162 | rootpath = os.path.join(folder, rootfname) |
@@ -243,7 +243,7 def showstack(ui, repo, displayer): | |||
|
243 | 243 | else: |
|
244 | 244 | newheads = set() |
|
245 | 245 | |
|
246 |
allrevs = set(stackrevs) | newheads | |
|
|
246 | allrevs = set(stackrevs) | newheads | {baserev} | |
|
247 | 247 | nodelen = longestshortest(repo, allrevs) |
|
248 | 248 | |
|
249 | 249 | try: |
@@ -147,7 +147,7 def uncommit(ui, repo, *pats, **opts): | |||
|
147 | 147 | # if not everything tracked in that directory can be |
|
148 | 148 | # uncommitted. |
|
149 | 149 | if badfiles: |
|
150 |
badfiles -= |
|
|
150 | badfiles -= {f for f in util.dirs(eligible)} | |
|
151 | 151 | |
|
152 | 152 | for f in sorted(badfiles): |
|
153 | 153 | if f in s.clean: |
@@ -2315,7 +2315,7 def widen_bundle(repo, oldmatcher, newma | |||
|
2315 | 2315 | oldmatcher=oldmatcher, |
|
2316 | 2316 | matcher=newmatcher, |
|
2317 | 2317 | fullnodes=commonnodes) |
|
2318 |
cgdata = packer.generate( |
|
|
2318 | cgdata = packer.generate({nodemod.nullid}, list(commonnodes), | |
|
2319 | 2319 | False, 'narrow_widen', changelog=False) |
|
2320 | 2320 | |
|
2321 | 2321 | part = bundler.newpart('changegroup', data=cgdata) |
@@ -458,7 +458,7 class dirnode(object): | |||
|
458 | 458 | |
|
459 | 459 | def __init__(self, dirpath): |
|
460 | 460 | self.path = dirpath |
|
461 |
self.statuses = set( |
|
|
461 | self.statuses = set() | |
|
462 | 462 | self.files = [] |
|
463 | 463 | self.subdirs = {} |
|
464 | 464 | |
@@ -2495,8 +2495,7 def amend(ui, repo, old, extra, pats, op | |||
|
2495 | 2495 | if len(old.parents()) > 1: |
|
2496 | 2496 | # ctx.files() isn't reliable for merges, so fall back to the |
|
2497 | 2497 | # slower repo.status() method |
|
2498 |
files = |
|
|
2499 | for fn in st]) | |
|
2498 | files = {fn for st in base.status(old)[:3] for fn in st} | |
|
2500 | 2499 | else: |
|
2501 | 2500 | files = set(old.files()) |
|
2502 | 2501 |
@@ -764,7 +764,7 def headrevs(revs, parentsfn): | |||
|
764 | 764 | the input set. |
|
765 | 765 | """ |
|
766 | 766 | headrevs = set(revs) |
|
767 |
parents = |
|
|
767 | parents = {node.nullrev} | |
|
768 | 768 | up = parents.update |
|
769 | 769 | |
|
770 | 770 | for rev in revs: |
@@ -707,8 +707,8 def _pushdiscoverybookmarks(pushop): | |||
|
707 | 707 | |
|
708 | 708 | remotebookmark = listkeys(remote, 'bookmarks') |
|
709 | 709 | |
|
710 |
explicit = |
|
|
711 |
|
|
|
710 | explicit = {repo._bookmarks.expandname(bookmark) | |
|
711 | for bookmark in pushop.bookmarks} | |
|
712 | 712 | |
|
713 | 713 | remotebookmark = bookmod.unhexlifybookmarks(remotebookmark) |
|
714 | 714 | comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark) |
@@ -34,7 +34,7 def bisect(repo, state): | |||
|
34 | 34 | |
|
35 | 35 | changelog = repo.changelog |
|
36 | 36 | clparents = changelog.parentrevs |
|
37 |
skip = |
|
|
37 | skip = {changelog.rev(n) for n in state['skip']} | |
|
38 | 38 | |
|
39 | 39 | def buildancestors(bad, good): |
|
40 | 40 | badrev = min([changelog.rev(n) for n in bad]) |
@@ -748,7 +748,7 def help_(ui, commands, name, unknowncmd | |||
|
748 | 748 | ct = mod.cmdtable |
|
749 | 749 | except AttributeError: |
|
750 | 750 | ct = {} |
|
751 |
modcmds = |
|
|
751 | modcmds = {c.partition('|')[0] for c in ct} | |
|
752 | 752 | rst.extend(helplist(modcmds.__contains__)) |
|
753 | 753 | else: |
|
754 | 754 | rst.append(_("(use 'hg help extensions' for information on enabling" |
@@ -815,8 +815,8 def _checkunknownfiles(repo, wctx, mctx, | |||
|
815 | 815 | fileconflicts.add(f) |
|
816 | 816 | |
|
817 | 817 | allconflicts = fileconflicts | pathconflicts |
|
818 |
ignoredconflicts = |
|
|
819 |
|
|
|
818 | ignoredconflicts = {c for c in allconflicts | |
|
819 | if repo.dirstate._ignore(c)} | |
|
820 | 820 | unknownconflicts = allconflicts - ignoredconflicts |
|
821 | 821 | collectconflicts(ignoredconflicts, ignoredconfig) |
|
822 | 822 | collectconflicts(unknownconflicts, unknownconfig) |
@@ -1104,7 +1104,7 def _filternarrowactions(narrowmatch, br | |||
|
1104 | 1104 | Raise an exception if the merge cannot be completed because the repo is |
|
1105 | 1105 | narrowed. |
|
1106 | 1106 | """ |
|
1107 |
nooptypes = |
|
|
1107 | nooptypes = {'k'} # TODO: handle with nonconflicttypes | |
|
1108 | 1108 | nonconflicttypes = set('a am c cm f g r e'.split()) |
|
1109 | 1109 | # We mutate the items in the dict during iteration, so iterate |
|
1110 | 1110 | # over a copy. |
@@ -743,7 +743,7 class obsstore(object): | |||
|
743 | 743 | pruned = [m for m in succsmarkers.get(current, ()) if not m[1]] |
|
744 | 744 | direct.update(pruned) |
|
745 | 745 | direct -= seenmarkers |
|
746 |
pendingnodes = |
|
|
746 | pendingnodes = {m[0] for m in direct} | |
|
747 | 747 | seenmarkers |= direct |
|
748 | 748 | pendingnodes -= seennodes |
|
749 | 749 | seennodes |= pendingnodes |
@@ -637,7 +637,7 def _candidategroups(revlog, textlen, p1 | |||
|
637 | 637 | |
|
638 | 638 | deltas_limit = textlen * LIMIT_DELTA2TEXT |
|
639 | 639 | |
|
640 |
tested = |
|
|
640 | tested = {nullrev} | |
|
641 | 641 | candidates = _refinedgroups(revlog, p1, p2, cachedelta) |
|
642 | 642 | while True: |
|
643 | 643 | temptative = candidates.send(good) |
@@ -345,7 +345,7 def find_pullbundle(repo, proto, opts, c | |||
|
345 | 345 | one specific branch of many. |
|
346 | 346 | """ |
|
347 | 347 | def decodehexstring(s): |
|
348 |
return |
|
|
348 | return {binascii.unhexlify(h) for h in s.split(';')} | |
|
349 | 349 | |
|
350 | 350 | manifest = repo.vfs.tryread('pullbundles.manifest') |
|
351 | 351 | if not manifest: |
General Comments 0
You need to be logged in to leave comments.
Login now