Show More
@@ -81,7 +81,7 b' def runperfcommand(reponame, command, *a' | |||||
81 | output = ui.popbuffer() |
|
81 | output = ui.popbuffer() | |
82 | match = outputre.search(output) |
|
82 | match = outputre.search(output) | |
83 | if not match: |
|
83 | if not match: | |
84 |
raise ValueError("Invalid output { |
|
84 | raise ValueError("Invalid output {}".format(output)) | |
85 | return float(match.group(1)) |
|
85 | return float(match.group(1)) | |
86 |
|
86 | |||
87 |
|
87 |
@@ -32,7 +32,7 b' def check_compat_py2(f):' | |||||
32 | for node in ast.walk(root): |
|
32 | for node in ast.walk(root): | |
33 | if isinstance(node, ast.ImportFrom): |
|
33 | if isinstance(node, ast.ImportFrom): | |
34 | if node.module == '__future__': |
|
34 | if node.module == '__future__': | |
35 |
futures |= |
|
35 | futures |= {n.name for n in node.names} | |
36 | elif isinstance(node, ast.Print): |
|
36 | elif isinstance(node, ast.Print): | |
37 | haveprint = True |
|
37 | haveprint = True | |
38 |
|
38 |
@@ -2523,7 +2523,7 b' def perfdiffwd(ui, repo, **opts):' | |||||
2523 | } |
|
2523 | } | |
2524 |
|
2524 | |||
2525 | for diffopt in ('', 'w', 'b', 'B', 'wB'): |
|
2525 | for diffopt in ('', 'w', 'b', 'B', 'wB'): | |
2526 |
opts = |
|
2526 | opts = {options[c]: b'1' for c in diffopt} | |
2527 |
|
2527 | |||
2528 | def d(): |
|
2528 | def d(): | |
2529 | ui.pushbuffer() |
|
2529 | ui.pushbuffer() | |
@@ -3048,7 +3048,7 b' def perfrevlogchunks(ui, repo, file_=Non' | |||||
3048 |
|
3048 | |||
3049 | # Verify engines argument. |
|
3049 | # Verify engines argument. | |
3050 | if engines: |
|
3050 | if engines: | |
3051 |
engines = |
|
3051 | engines = {e.strip() for e in engines.split(b',')} | |
3052 | for engine in engines: |
|
3052 | for engine in engines: | |
3053 | try: |
|
3053 | try: | |
3054 | util.compressionengines[engine] |
|
3054 | util.compressionengines[engine] |
@@ -407,7 +407,7 b' class filefixupstate(object):' | |||||
407 | involved = [ |
|
407 | involved = [ | |
408 | annotated[i] for i in nearbylinenums if annotated[i][0] != 1 |
|
408 | annotated[i] for i in nearbylinenums if annotated[i][0] != 1 | |
409 | ] |
|
409 | ] | |
410 |
involvedrevs = list( |
|
410 | involvedrevs = list({r for r, l in involved}) | |
411 | newfixups = [] |
|
411 | newfixups = [] | |
412 | if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True): |
|
412 | if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True): | |
413 | # chunk belongs to a single revision |
|
413 | # chunk belongs to a single revision | |
@@ -734,10 +734,10 b' class fixupstate(object):' | |||||
734 | @property |
|
734 | @property | |
735 | def chunkstats(self): |
|
735 | def chunkstats(self): | |
736 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" |
|
736 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" | |
737 |
return |
|
737 | return { | |
738 |
|
|
738 | path: state.chunkstats | |
739 | for path, state in pycompat.iteritems(self.fixupmap) |
|
739 | for path, state in pycompat.iteritems(self.fixupmap) | |
740 |
|
|
740 | } | |
741 |
|
741 | |||
742 | def commit(self): |
|
742 | def commit(self): | |
743 | """commit changes. update self.finalnode, self.replacemap""" |
|
743 | """commit changes. update self.finalnode, self.replacemap""" |
@@ -76,7 +76,7 b' def close_branch(ui, repo, *revs, **opts' | |||||
76 | heads = [] |
|
76 | heads = [] | |
77 | for branch in repo.branchmap(): |
|
77 | for branch in repo.branchmap(): | |
78 | heads.extend(repo.branchheads(branch)) |
|
78 | heads.extend(repo.branchheads(branch)) | |
79 |
heads = |
|
79 | heads = {repo[h].rev() for h in heads} | |
80 | for rev in revs: |
|
80 | for rev in revs: | |
81 | if rev not in heads: |
|
81 | if rev not in heads: | |
82 | raise error.Abort(_(b'revision is not an open head: %d') % rev) |
|
82 | raise error.Abort(_(b'revision is not an open head: %d') % rev) |
@@ -677,13 +677,9 b' class mercurial_source(common.converter_' | |||||
677 | for t in self.repo.tagslist() |
|
677 | for t in self.repo.tagslist() | |
678 | if self.repo.tagtype(t[0]) == b'global' |
|
678 | if self.repo.tagtype(t[0]) == b'global' | |
679 | ] |
|
679 | ] | |
680 |
return |
|
680 | return { | |
681 | [ |
|
681 | name: nodemod.hex(node) for name, node in tags if self.keep(node) | |
682 | (name, nodemod.hex(node)) |
|
682 | } | |
683 | for name, node in tags |
|
|||
684 | if self.keep(node) |
|
|||
685 | ] |
|
|||
686 | ) |
|
|||
687 |
|
683 | |||
688 | def getchangedfiles(self, rev, i): |
|
684 | def getchangedfiles(self, rev, i): | |
689 | ctx = self._changectx(rev) |
|
685 | ctx = self._changectx(rev) |
@@ -710,11 +710,11 b' class svn_source(converter_source):' | |||||
710 | # Here/tags/tag.1 discarded as well as its children. |
|
710 | # Here/tags/tag.1 discarded as well as its children. | |
711 | # It happens with tools like cvs2svn. Such tags cannot |
|
711 | # It happens with tools like cvs2svn. Such tags cannot | |
712 | # be represented in mercurial. |
|
712 | # be represented in mercurial. | |
713 |
addeds = |
|
713 | addeds = { | |
714 |
|
|
714 | p: e.copyfrom_path | |
715 | for p, e in pycompat.iteritems(origpaths) |
|
715 | for p, e in pycompat.iteritems(origpaths) | |
716 | if e.action == b'A' and e.copyfrom_path |
|
716 | if e.action == b'A' and e.copyfrom_path | |
717 |
|
|
717 | } | |
718 | badroots = set() |
|
718 | badroots = set() | |
719 | for destroot in addeds: |
|
719 | for destroot in addeds: | |
720 | for source, sourcerev, dest in pendings: |
|
720 | for source, sourcerev, dest in pendings: |
@@ -221,7 +221,7 b' class eolfile(object):' | |||||
221 | self.match = match.match(root, b'', [], include, exclude) |
|
221 | self.match = match.match(root, b'', [], include, exclude) | |
222 |
|
222 | |||
223 | def copytoui(self, ui): |
|
223 | def copytoui(self, ui): | |
224 |
newpatterns = |
|
224 | newpatterns = {pattern for pattern, key, m in self.patterns} | |
225 | for section in (b'decode', b'encode'): |
|
225 | for section in (b'decode', b'encode'): | |
226 | for oldpattern, _filter in ui.configitems(section): |
|
226 | for oldpattern, _filter in ui.configitems(section): | |
227 | if oldpattern not in newpatterns: |
|
227 | if oldpattern not in newpatterns: |
@@ -233,7 +233,7 b' def fastannotate(ui, repo, *pats, **opts' | |||||
233 | showlines=(showlines and not showdeleted), |
|
233 | showlines=(showlines and not showdeleted), | |
234 | ) |
|
234 | ) | |
235 | if showdeleted: |
|
235 | if showdeleted: | |
236 |
existinglines = |
|
236 | existinglines = {(l[0], l[1]) for l in result} | |
237 | result = a.annotatealllines( |
|
237 | result = a.annotatealllines( | |
238 | rev, showpath=showpath, showlines=showlines |
|
238 | rev, showpath=showpath, showlines=showlines | |
239 | ) |
|
239 | ) |
@@ -397,7 +397,7 b' def overridewalk(orig, self, match, subr' | |||||
397 | # for file paths which require normalization and we encounter a case |
|
397 | # for file paths which require normalization and we encounter a case | |
398 | # collision, we store our own foldmap |
|
398 | # collision, we store our own foldmap | |
399 | if normalize: |
|
399 | if normalize: | |
400 |
foldmap = |
|
400 | foldmap = {normcase(k): k for k in results} | |
401 |
|
401 | |||
402 | switch_slashes = pycompat.ossep == b'\\' |
|
402 | switch_slashes = pycompat.ossep == b'\\' | |
403 | # The order of the results is, strictly speaking, undefined. |
|
403 | # The order of the results is, strictly speaking, undefined. | |
@@ -459,22 +459,16 b' def overridewalk(orig, self, match, subr' | |||||
459 | if normalize: |
|
459 | if normalize: | |
460 | # any notable files that have changed case will already be handled |
|
460 | # any notable files that have changed case will already be handled | |
461 | # above, so just check membership in the foldmap |
|
461 | # above, so just check membership in the foldmap | |
462 |
notefiles = |
|
462 | notefiles = { | |
463 | ( |
|
463 | normalize(f, True, True) | |
464 | normalize(f, True, True) |
|
|||
465 | for f in notefiles |
|
|||
466 | if normcase(f) not in foldmap |
|
|||
467 | ) |
|
|||
468 | ) |
|
|||
469 | visit = set( |
|
|||
470 | ( |
|
|||
471 | f |
|
|||
472 | for f in notefiles |
|
464 | for f in notefiles | |
473 | if ( |
|
465 | if normcase(f) not in foldmap | |
474 | f not in results and matchfn(f) and (f in dmap or not ignore(f)) |
|
466 | } | |
475 | ) |
|
467 | visit = { | |
476 |
|
|
468 | f | |
477 | ) |
|
469 | for f in notefiles | |
|
470 | if (f not in results and matchfn(f) and (f in dmap or not ignore(f))) | |||
|
471 | } | |||
478 |
|
472 | |||
479 | if not fresh_instance: |
|
473 | if not fresh_instance: | |
480 | if matchalways: |
|
474 | if matchalways: |
@@ -835,10 +835,10 b' class fold(histeditaction):' | |||||
835 | return ctx, [(self.node, (parentctxnode,))] |
|
835 | return ctx, [(self.node, (parentctxnode,))] | |
836 |
|
836 | |||
837 | parentctx = repo[parentctxnode] |
|
837 | parentctx = repo[parentctxnode] | |
838 |
newcommits = |
|
838 | newcommits = { | |
839 | c.node() |
|
839 | c.node() | |
840 | for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev()) |
|
840 | for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev()) | |
841 |
|
|
841 | } | |
842 | if not newcommits: |
|
842 | if not newcommits: | |
843 | repo.ui.warn( |
|
843 | repo.ui.warn( | |
844 | _( |
|
844 | _( | |
@@ -2412,7 +2412,7 b' def verifyactions(actions, state, ctxs):' | |||||
2412 | Will abort if there are to many or too few rules, a malformed rule, |
|
2412 | Will abort if there are to many or too few rules, a malformed rule, | |
2413 | or a rule on a changeset outside of the user-given range. |
|
2413 | or a rule on a changeset outside of the user-given range. | |
2414 | """ |
|
2414 | """ | |
2415 |
expected = |
|
2415 | expected = {c.node() for c in ctxs} | |
2416 | seen = set() |
|
2416 | seen = set() | |
2417 | prev = None |
|
2417 | prev = None | |
2418 |
|
2418 |
@@ -67,7 +67,7 b' class basestore(object):' | |||||
67 | ui = self.ui |
|
67 | ui = self.ui | |
68 |
|
68 | |||
69 | at = 0 |
|
69 | at = 0 | |
70 |
available = self.exists( |
|
70 | available = self.exists({hash for (_filename, hash) in files}) | |
71 | with ui.makeprogress( |
|
71 | with ui.makeprogress( | |
72 | _(b'getting largefiles'), unit=_(b'files'), total=len(files) |
|
72 | _(b'getting largefiles'), unit=_(b'files'), total=len(files) | |
73 | ) as progress: |
|
73 | ) as progress: |
@@ -1564,11 +1564,11 b' def overridepurge(orig, ui, repo, *dirs,' | |||||
1564 | def overriderollback(orig, ui, repo, **opts): |
|
1564 | def overriderollback(orig, ui, repo, **opts): | |
1565 | with repo.wlock(): |
|
1565 | with repo.wlock(): | |
1566 | before = repo.dirstate.parents() |
|
1566 | before = repo.dirstate.parents() | |
1567 |
orphans = |
|
1567 | orphans = { | |
1568 | f |
|
1568 | f | |
1569 | for f in repo.dirstate |
|
1569 | for f in repo.dirstate | |
1570 | if lfutil.isstandin(f) and repo.dirstate[f] != b'r' |
|
1570 | if lfutil.isstandin(f) and repo.dirstate[f] != b'r' | |
1571 |
|
|
1571 | } | |
1572 | result = orig(ui, repo, **opts) |
|
1572 | result = orig(ui, repo, **opts) | |
1573 | after = repo.dirstate.parents() |
|
1573 | after = repo.dirstate.parents() | |
1574 | if before == after: |
|
1574 | if before == after: |
@@ -48,12 +48,12 b' class remotestore(basestore.basestore):' | |||||
48 | ) |
|
48 | ) | |
49 |
|
49 | |||
50 | def exists(self, hashes): |
|
50 | def exists(self, hashes): | |
51 |
return |
|
51 | return { | |
52 |
|
|
52 | h: s == 0 | |
53 | for (h, s) in pycompat.iteritems( |
|
53 | for (h, s) in pycompat.iteritems( | |
54 | self._stat(hashes) |
|
54 | self._stat(hashes) | |
55 | ) # dict-from-generator |
|
55 | ) # dict-from-generator | |
56 |
|
|
56 | } | |
57 |
|
57 | |||
58 | def sendfile(self, filename, hash): |
|
58 | def sendfile(self, filename, hash): | |
59 | self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash)) |
|
59 | self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash)) |
@@ -1162,7 +1162,7 b' class queue(object):' | |||||
1162 |
|
1162 | |||
1163 | if unknown: |
|
1163 | if unknown: | |
1164 | if numrevs: |
|
1164 | if numrevs: | |
1165 |
rev = |
|
1165 | rev = {entry.name: entry.node for entry in qfinished} | |
1166 | for p in unknown: |
|
1166 | for p in unknown: | |
1167 | msg = _(b'revision %s refers to unknown patches: %s\n') |
|
1167 | msg = _(b'revision %s refers to unknown patches: %s\n') | |
1168 | self.ui.warn(msg % (short(rev[p]), p)) |
|
1168 | self.ui.warn(msg % (short(rev[p]), p)) | |
@@ -3361,7 +3361,7 b' def guard(ui, repo, *args, **opts):' | |||||
3361 | ui.write(b'\n') |
|
3361 | ui.write(b'\n') | |
3362 |
|
3362 | |||
3363 | q = repo.mq |
|
3363 | q = repo.mq | |
3364 |
applied = |
|
3364 | applied = {p.name for p in q.applied} | |
3365 | patch = None |
|
3365 | patch = None | |
3366 | args = list(args) |
|
3366 | args = list(args) | |
3367 | if opts.get('list'): |
|
3367 | if opts.get('list'): |
@@ -483,7 +483,7 b' def getoldnodedrevmap(repo, nodelist):' | |||||
483 | ] |
|
483 | ] | |
484 |
|
484 | |||
485 | # "precursors" as known by Phabricator |
|
485 | # "precursors" as known by Phabricator | |
486 |
phprecset = |
|
486 | phprecset = {getnode(d) for d in diffs} | |
487 |
|
487 | |||
488 | # Ignore if precursors (Phabricator and local repo) do not overlap, |
|
488 | # Ignore if precursors (Phabricator and local repo) do not overlap, | |
489 | # and force is not set (when commit message says nothing) |
|
489 | # and force is not set (when commit message says nothing) | |
@@ -1062,7 +1062,7 b' def userphids(ui, names):' | |||||
1062 | # username not found is not an error of the API. So check if we have missed |
|
1062 | # username not found is not an error of the API. So check if we have missed | |
1063 | # some names here. |
|
1063 | # some names here. | |
1064 | data = result[b'data'] |
|
1064 | data = result[b'data'] | |
1065 |
resolved = |
|
1065 | resolved = {entry[b'fields'][b'username'].lower() for entry in data} | |
1066 | unresolved = set(names) - resolved |
|
1066 | unresolved = set(names) - resolved | |
1067 | if unresolved: |
|
1067 | if unresolved: | |
1068 | raise error.Abort( |
|
1068 | raise error.Abort( | |
@@ -1635,7 +1635,7 b' def readpatch(ui, drevs, write):' | |||||
1635 | "differential.query". |
|
1635 | "differential.query". | |
1636 | """ |
|
1636 | """ | |
1637 | # Prefetch hg:meta property for all diffs |
|
1637 | # Prefetch hg:meta property for all diffs | |
1638 |
diffids = sorted( |
|
1638 | diffids = sorted({max(int(v) for v in drev[b'diffs']) for drev in drevs}) | |
1639 | diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids}) |
|
1639 | diffs = callconduit(ui, b'differential.querydiffs', {b'ids': diffids}) | |
1640 |
|
1640 | |||
1641 | patches = [] |
|
1641 | patches = [] | |
@@ -1792,11 +1792,11 b' def phabstatusshowview(ui, repo, display' | |||||
1792 | """Phabricator differiential status""" |
|
1792 | """Phabricator differiential status""" | |
1793 | revs = repo.revs('sort(_underway(), topo)') |
|
1793 | revs = repo.revs('sort(_underway(), topo)') | |
1794 | drevmap = getdrevmap(repo, revs) |
|
1794 | drevmap = getdrevmap(repo, revs) | |
1795 |
unknownrevs, drevids, revsbydrevid = [], set( |
|
1795 | unknownrevs, drevids, revsbydrevid = [], set(), {} | |
1796 | for rev, drevid in pycompat.iteritems(drevmap): |
|
1796 | for rev, drevid in pycompat.iteritems(drevmap): | |
1797 | if drevid is not None: |
|
1797 | if drevid is not None: | |
1798 | drevids.add(drevid) |
|
1798 | drevids.add(drevid) | |
1799 |
revsbydrevid.setdefault(drevid, set( |
|
1799 | revsbydrevid.setdefault(drevid, set()).add(rev) | |
1800 | else: |
|
1800 | else: | |
1801 | unknownrevs.append(rev) |
|
1801 | unknownrevs.append(rev) | |
1802 |
|
1802 |
@@ -1936,7 +1936,7 b' def buildstate(repo, destmap, collapse):' | |||||
1936 | # applied patch. But it prevents messing up the working directory when |
|
1936 | # applied patch. But it prevents messing up the working directory when | |
1937 | # a partially completed rebase is blocked by mq. |
|
1937 | # a partially completed rebase is blocked by mq. | |
1938 | if b'qtip' in repo.tags(): |
|
1938 | if b'qtip' in repo.tags(): | |
1939 |
mqapplied = |
|
1939 | mqapplied = {repo[s.node].rev() for s in repo.mq.applied} | |
1940 | if set(destmap.values()) & mqapplied: |
|
1940 | if set(destmap.values()) & mqapplied: | |
1941 | raise error.Abort(_(b'cannot rebase onto an applied mq patch')) |
|
1941 | raise error.Abort(_(b'cannot rebase onto an applied mq patch')) | |
1942 |
|
1942 | |||
@@ -2121,7 +2121,7 b' def pullrebase(orig, ui, repo, *args, **' | |||||
2121 |
|
2121 | |||
2122 | def _filterobsoleterevs(repo, revs): |
|
2122 | def _filterobsoleterevs(repo, revs): | |
2123 | """returns a set of the obsolete revisions in revs""" |
|
2123 | """returns a set of the obsolete revisions in revs""" | |
2124 |
return |
|
2124 | return {r for r in revs if repo[r].obsolete()} | |
2125 |
|
2125 | |||
2126 |
|
2126 | |||
2127 | def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap): |
|
2127 | def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap): |
@@ -737,7 +737,7 b' def onetimeclientsetup(ui):' | |||||
737 | # "link" is actually wrong here (it is set to len(changelog)) |
|
737 | # "link" is actually wrong here (it is set to len(changelog)) | |
738 | # if changelog remains unchanged, skip writing file revisions |
|
738 | # if changelog remains unchanged, skip writing file revisions | |
739 | # but still do a sanity check about pending multiple revisions |
|
739 | # but still do a sanity check about pending multiple revisions | |
740 |
if len( |
|
740 | if len({x[3] for x in pendingfilecommits}) > 1: | |
741 | raise error.ProgrammingError( |
|
741 | raise error.ProgrammingError( | |
742 | b'pending multiple integer revisions are not supported' |
|
742 | b'pending multiple integer revisions are not supported' | |
743 | ) |
|
743 | ) |
@@ -101,7 +101,7 b' class _cachebackedpacks(object):' | |||||
101 | self._lastpack = pack |
|
101 | self._lastpack = pack | |
102 | yield pack |
|
102 | yield pack | |
103 |
|
103 | |||
104 |
cachedpacks = |
|
104 | cachedpacks = {pack for pack in self._lrucache} | |
105 | # Yield for paths not in the cache. |
|
105 | # Yield for paths not in the cache. | |
106 | for pack in self._packs - cachedpacks: |
|
106 | for pack in self._packs - cachedpacks: | |
107 | self._lastpack = pack |
|
107 | self._lastpack = pack | |
@@ -259,7 +259,7 b' class basepackstore(object):' | |||||
259 | newpacks = [] |
|
259 | newpacks = [] | |
260 | if now > self.lastrefresh + REFRESHRATE: |
|
260 | if now > self.lastrefresh + REFRESHRATE: | |
261 | self.lastrefresh = now |
|
261 | self.lastrefresh = now | |
262 |
previous = |
|
262 | previous = {p.path for p in self.packs} | |
263 | for filepath, __, __ in self._getavailablepackfilessorted(): |
|
263 | for filepath, __, __ in self._getavailablepackfilessorted(): | |
264 | if filepath not in previous: |
|
264 | if filepath not in previous: | |
265 | newpack = self.getpack(filepath) |
|
265 | newpack = self.getpack(filepath) |
@@ -300,7 +300,7 b' class manifestrevlogstore(object):' | |||||
300 |
|
300 | |||
301 | rl = self._revlog(name) |
|
301 | rl = self._revlog(name) | |
302 | ancestors = {} |
|
302 | ancestors = {} | |
303 |
missing = |
|
303 | missing = {node} | |
304 | for ancrev in rl.ancestors([rl.rev(node)], inclusive=True): |
|
304 | for ancrev in rl.ancestors([rl.rev(node)], inclusive=True): | |
305 | ancnode = rl.node(ancrev) |
|
305 | ancnode = rl.node(ancrev) | |
306 | missing.discard(ancnode) |
|
306 | missing.discard(ancnode) |
@@ -271,9 +271,9 b' class datapack(basepack.basepack):' | |||||
271 | def cleanup(self, ledger): |
|
271 | def cleanup(self, ledger): | |
272 | entries = ledger.sources.get(self, []) |
|
272 | entries = ledger.sources.get(self, []) | |
273 | allkeys = set(self) |
|
273 | allkeys = set(self) | |
274 |
repackedkeys = |
|
274 | repackedkeys = { | |
275 | (e.filename, e.node) for e in entries if e.datarepacked or e.gced |
|
275 | (e.filename, e.node) for e in entries if e.datarepacked or e.gced | |
276 |
|
|
276 | } | |
277 |
|
277 | |||
278 | if len(allkeys - repackedkeys) == 0: |
|
278 | if len(allkeys - repackedkeys) == 0: | |
279 | if self.path not in ledger.created: |
|
279 | if self.path not in ledger.created: |
@@ -132,7 +132,7 b' class historypack(basepack.basepack):' | |||||
132 | known = set() |
|
132 | known = set() | |
133 | section = self._findsection(name) |
|
133 | section = self._findsection(name) | |
134 | filename, offset, size, nodeindexoffset, nodeindexsize = section |
|
134 | filename, offset, size, nodeindexoffset, nodeindexsize = section | |
135 |
pending = |
|
135 | pending = {node} | |
136 | o = 0 |
|
136 | o = 0 | |
137 | while o < size: |
|
137 | while o < size: | |
138 | if not pending: |
|
138 | if not pending: | |
@@ -291,9 +291,9 b' class historypack(basepack.basepack):' | |||||
291 | def cleanup(self, ledger): |
|
291 | def cleanup(self, ledger): | |
292 | entries = ledger.sources.get(self, []) |
|
292 | entries = ledger.sources.get(self, []) | |
293 | allkeys = set(self) |
|
293 | allkeys = set(self) | |
294 |
repackedkeys = |
|
294 | repackedkeys = { | |
295 | (e.filename, e.node) for e in entries if e.historyrepacked |
|
295 | (e.filename, e.node) for e in entries if e.historyrepacked | |
296 |
|
|
296 | } | |
297 |
|
297 | |||
298 | if len(allkeys - repackedkeys) == 0: |
|
298 | if len(allkeys - repackedkeys) == 0: | |
299 | if self.path not in ledger.created: |
|
299 | if self.path not in ledger.created: | |
@@ -452,7 +452,7 b' class mutablehistorypack(basepack.mutabl' | |||||
452 | sectionstart = self.packfp.tell() |
|
452 | sectionstart = self.packfp.tell() | |
453 |
|
453 | |||
454 | # Write the file section content |
|
454 | # Write the file section content | |
455 |
entrymap = |
|
455 | entrymap = {e[0]: e for e in entries} | |
456 |
|
456 | |||
457 | def parentfunc(node): |
|
457 | def parentfunc(node): | |
458 | x, p1, p2, x, x, x = entrymap[node] |
|
458 | x, p1, p2, x, x, x = entrymap[node] |
@@ -429,7 +429,7 b' class remotefilelog(object):' | |||||
429 | return nullid |
|
429 | return nullid | |
430 |
|
430 | |||
431 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
431 | revmap, parentfunc = self._buildrevgraph(a, b) | |
432 |
nodemap = |
|
432 | nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} | |
433 |
|
433 | |||
434 | ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b]) |
|
434 | ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b]) | |
435 | if ancs: |
|
435 | if ancs: | |
@@ -444,7 +444,7 b' class remotefilelog(object):' | |||||
444 | return nullid |
|
444 | return nullid | |
445 |
|
445 | |||
446 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
446 | revmap, parentfunc = self._buildrevgraph(a, b) | |
447 |
nodemap = |
|
447 | nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)} | |
448 |
|
448 | |||
449 | ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b]) |
|
449 | ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b]) | |
450 | return map(nodemap.__getitem__, ancs) |
|
450 | return map(nodemap.__getitem__, ancs) |
@@ -321,7 +321,7 b' def _computeincrementalhistorypack(ui, f' | |||||
321 |
|
321 | |||
322 | def _allpackfileswithsuffix(files, packsuffix, indexsuffix): |
|
322 | def _allpackfileswithsuffix(files, packsuffix, indexsuffix): | |
323 | result = [] |
|
323 | result = [] | |
324 |
fileset = |
|
324 | fileset = {fn for fn, mode, stat in files} | |
325 | for filename, mode, stat in files: |
|
325 | for filename, mode, stat in files: | |
326 | if not filename.endswith(packsuffix): |
|
326 | if not filename.endswith(packsuffix): | |
327 | continue |
|
327 | continue |
@@ -97,7 +97,7 b' class ShortRepository(object):' | |||||
97 | parts = parts[:-1] |
|
97 | parts = parts[:-1] | |
98 | else: |
|
98 | else: | |
99 | tail = b'' |
|
99 | tail = b'' | |
100 |
context = |
|
100 | context = {b'%d' % (i + 1): v for i, v in enumerate(parts)} | |
101 | return b''.join(self.templater.process(self.url, context)) + tail |
|
101 | return b''.join(self.templater.process(self.url, context)) + tail | |
102 |
|
102 | |||
103 |
|
103 |
@@ -246,7 +246,7 b' def _setupdirstate(ui):' | |||||
246 | if changedfiles is not None: |
|
246 | if changedfiles is not None: | |
247 | # In _rebuild, these files will be deleted from the dirstate |
|
247 | # In _rebuild, these files will be deleted from the dirstate | |
248 | # when they are not found to be in allfiles |
|
248 | # when they are not found to be in allfiles | |
249 |
dirstatefilestoremove = |
|
249 | dirstatefilestoremove = {f for f in self if not matcher(f)} | |
250 | changedfiles = dirstatefilestoremove.union(changedfiles) |
|
250 | changedfiles = dirstatefilestoremove.union(changedfiles) | |
251 |
|
251 | |||
252 | return orig(self, parent, allfiles, changedfiles) |
|
252 | return orig(self, parent, allfiles, changedfiles) |
@@ -228,7 +228,7 b' def stripcmd(ui, repo, *revs, **opts):' | |||||
228 | for p in repo.dirstate.parents() |
|
228 | for p in repo.dirstate.parents() | |
229 | ) |
|
229 | ) | |
230 |
|
230 | |||
231 |
rootnodes = |
|
231 | rootnodes = {cl.node(r) for r in roots} | |
232 |
|
232 | |||
233 | q = getattr(repo, 'mq', None) |
|
233 | q = getattr(repo, 'mq', None) | |
234 | if q is not None and q.applied: |
|
234 | if q is not None and q.applied: |
@@ -840,10 +840,10 b' def _dotransplant(ui, repo, *revs, **opt' | |||||
840 |
|
840 | |||
841 | tf = tp.transplantfilter(repo, source, p1) |
|
841 | tf = tp.transplantfilter(repo, source, p1) | |
842 | if opts.get(b'prune'): |
|
842 | if opts.get(b'prune'): | |
843 |
prune = |
|
843 | prune = { | |
844 | source[r].node() |
|
844 | source[r].node() | |
845 | for r in scmutil.revrange(source, opts.get(b'prune')) |
|
845 | for r in scmutil.revrange(source, opts.get(b'prune')) | |
846 |
|
|
846 | } | |
847 | matchfn = lambda x: tf(x) and x not in prune |
|
847 | matchfn = lambda x: tf(x) and x not in prune | |
848 | else: |
|
848 | else: | |
849 | matchfn = tf |
|
849 | matchfn = tf |
@@ -65,7 +65,7 b' def _commitfiltered(' | |||||
65 | base = ctx.p1() |
|
65 | base = ctx.p1() | |
66 | # ctx |
|
66 | # ctx | |
67 | initialfiles = set(ctx.files()) |
|
67 | initialfiles = set(ctx.files()) | |
68 |
exclude = |
|
68 | exclude = {f for f in initialfiles if match(f)} | |
69 |
|
69 | |||
70 | # No files matched commit, so nothing excluded |
|
70 | # No files matched commit, so nothing excluded | |
71 | if not exclude: |
|
71 | if not exclude: | |
@@ -78,9 +78,9 b' def _commitfiltered(' | |||||
78 | files = initialfiles - exclude |
|
78 | files = initialfiles - exclude | |
79 | # Filter copies |
|
79 | # Filter copies | |
80 | copied = copiesmod.pathcopies(base, ctx) |
|
80 | copied = copiesmod.pathcopies(base, ctx) | |
81 |
copied = |
|
81 | copied = { | |
82 |
|
|
82 | dst: src for dst, src in pycompat.iteritems(copied) if dst in files | |
83 |
|
|
83 | } | |
84 |
|
84 | |||
85 | def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): |
|
85 | def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): | |
86 | if path not in contentctx: |
|
86 | if path not in contentctx: |
@@ -722,8 +722,8 b' class POFile(_BaseFile):' | |||||
722 | object POFile, the reference catalog. |
|
722 | object POFile, the reference catalog. | |
723 | """ |
|
723 | """ | |
724 | # Store entries in dict/set for faster access |
|
724 | # Store entries in dict/set for faster access | |
725 |
self_entries = |
|
725 | self_entries = {entry.msgid: entry for entry in self} | |
726 |
refpot_msgids = |
|
726 | refpot_msgids = {entry.msgid for entry in refpot} | |
727 | # Merge entries that are in the refpot |
|
727 | # Merge entries that are in the refpot | |
728 | for entry in refpot: |
|
728 | for entry in refpot: | |
729 | e = self_entries.get(entry.msgid) |
|
729 | e = self_entries.get(entry.msgid) | |
@@ -1808,9 +1808,9 b' class _MOFileParser(object):' | |||||
1808 | entry = self._build_entry( |
|
1808 | entry = self._build_entry( | |
1809 | msgid=msgid_tokens[0], |
|
1809 | msgid=msgid_tokens[0], | |
1810 | msgid_plural=msgid_tokens[1], |
|
1810 | msgid_plural=msgid_tokens[1], | |
1811 |
msgstr_plural= |
|
1811 | msgstr_plural={ | |
1812 |
|
|
1812 | k: v for k, v in enumerate(msgstr.split(b('\0'))) | |
1813 |
|
|
1813 | }, | |
1814 | ) |
|
1814 | ) | |
1815 | else: |
|
1815 | else: | |
1816 | entry = self._build_entry(msgid=msgid, msgstr=msgstr) |
|
1816 | entry = self._build_entry(msgid=msgid, msgstr=msgstr) |
@@ -138,7 +138,7 b' def ancestors(pfunc, *orignodes):' | |||||
138 | k = 0 |
|
138 | k = 0 | |
139 | for i in interesting: |
|
139 | for i in interesting: | |
140 | k |= i |
|
140 | k |= i | |
141 |
return |
|
141 | return {n for (i, n) in mapping if k & i} | |
142 |
|
142 | |||
143 | gca = commonancestorsheads(pfunc, *orignodes) |
|
143 | gca = commonancestorsheads(pfunc, *orignodes) | |
144 |
|
144 |
@@ -446,7 +446,7 b' class branchcache(object):' | |||||
446 | # 1 (branch a) -> 2 (branch b) -> 3 (branch a) |
|
446 | # 1 (branch a) -> 2 (branch b) -> 3 (branch a) | |
447 | for branch, newheadrevs in pycompat.iteritems(newbranches): |
|
447 | for branch, newheadrevs in pycompat.iteritems(newbranches): | |
448 | bheads = self._entries.setdefault(branch, []) |
|
448 | bheads = self._entries.setdefault(branch, []) | |
449 |
bheadset = |
|
449 | bheadset = {cl.rev(node) for node in bheads} | |
450 |
|
450 | |||
451 | # This have been tested True on all internal usage of this function. |
|
451 | # This have been tested True on all internal usage of this function. | |
452 | # run it again in case of doubt |
|
452 | # run it again in case of doubt | |
@@ -582,7 +582,7 b' class revbranchcache(object):' | |||||
582 |
|
582 | |||
583 | @util.propertycache |
|
583 | @util.propertycache | |
584 | def _namesreverse(self): |
|
584 | def _namesreverse(self): | |
585 |
return |
|
585 | return {b: r for r, b in enumerate(self._names)} | |
586 |
|
586 | |||
587 | def branchinfo(self, rev): |
|
587 | def branchinfo(self, rev): | |
588 | """Return branch name and close flag for rev, using and updating |
|
588 | """Return branch name and close flag for rev, using and updating |
@@ -993,7 +993,7 b' class cgpacker(object):' | |||||
993 | ] |
|
993 | ] | |
994 |
|
994 | |||
995 | manifests.clear() |
|
995 | manifests.clear() | |
996 |
clrevs = |
|
996 | clrevs = {cl.rev(x) for x in clnodes} | |
997 |
|
997 | |||
998 | it = self.generatefiles( |
|
998 | it = self.generatefiles( | |
999 | changedfiles, |
|
999 | changedfiles, | |
@@ -1282,9 +1282,7 b' class cgpacker(object):' | |||||
1282 | flinkrev = store.linkrev |
|
1282 | flinkrev = store.linkrev | |
1283 | fnode = store.node |
|
1283 | fnode = store.node | |
1284 | revs = ((r, flinkrev(r)) for r in store) |
|
1284 | revs = ((r, flinkrev(r)) for r in store) | |
1285 | return dict( |
|
1285 | return {fnode(r): cln(lr) for r, lr in revs if lr in clrevs} | |
1286 | (fnode(r), cln(lr)) for r, lr in revs if lr in clrevs |
|
|||
1287 | ) |
|
|||
1288 |
|
1286 | |||
1289 | clrevtolocalrev = {} |
|
1287 | clrevtolocalrev = {} | |
1290 |
|
1288 |
@@ -3125,7 +3125,7 b' def amend(ui, repo, old, extra, pats, op' | |||||
3125 | ms = mergemod.mergestate.read(repo) |
|
3125 | ms = mergemod.mergestate.read(repo) | |
3126 | mergeutil.checkunresolved(ms) |
|
3126 | mergeutil.checkunresolved(ms) | |
3127 |
|
3127 | |||
3128 |
filestoamend = |
|
3128 | filestoamend = {f for f in wctx.files() if matcher(f)} | |
3129 |
|
3129 | |||
3130 | changes = len(filestoamend) > 0 |
|
3130 | changes = len(filestoamend) > 0 | |
3131 | if changes: |
|
3131 | if changes: | |
@@ -3917,7 +3917,7 b' def _performrevert(' | |||||
3917 | # Apply changes |
|
3917 | # Apply changes | |
3918 | fp = stringio() |
|
3918 | fp = stringio() | |
3919 | # chunks are serialized per file, but files aren't sorted |
|
3919 | # chunks are serialized per file, but files aren't sorted | |
3920 |
for f in sorted( |
|
3920 | for f in sorted({c.header.filename() for c in chunks if ishunk(c)}): | |
3921 | prntstatusmsg(b'revert', f) |
|
3921 | prntstatusmsg(b'revert', f) | |
3922 | files = set() |
|
3922 | files = set() | |
3923 | for c in chunks: |
|
3923 | for c in chunks: |
@@ -3721,9 +3721,9 b' def heads(ui, repo, *branchrevs, **opts)' | |||||
3721 | heads = [repo[h] for h in heads] |
|
3721 | heads = [repo[h] for h in heads] | |
3722 |
|
3722 | |||
3723 | if branchrevs: |
|
3723 | if branchrevs: | |
3724 |
branches = |
|
3724 | branches = { | |
3725 | repo[r].branch() for r in scmutil.revrange(repo, branchrevs) |
|
3725 | repo[r].branch() for r in scmutil.revrange(repo, branchrevs) | |
3726 |
|
|
3726 | } | |
3727 | heads = [h for h in heads if h.branch() in branches] |
|
3727 | heads = [h for h in heads if h.branch() in branches] | |
3728 |
|
3728 | |||
3729 | if opts.get(b'active') and branchrevs: |
|
3729 | if opts.get(b'active') and branchrevs: | |
@@ -3731,7 +3731,7 b' def heads(ui, repo, *branchrevs, **opts)' | |||||
3731 | heads = [h for h in heads if h.node() in dagheads] |
|
3731 | heads = [h for h in heads if h.node() in dagheads] | |
3732 |
|
3732 | |||
3733 | if branchrevs: |
|
3733 | if branchrevs: | |
3734 |
haveheads = |
|
3734 | haveheads = {h.branch() for h in heads} | |
3735 | if branches - haveheads: |
|
3735 | if branches - haveheads: | |
3736 | headless = b', '.join(b for b in branches - haveheads) |
|
3736 | headless = b', '.join(b for b in branches - haveheads) | |
3737 | msg = _(b'no open branch heads found on branches %s') |
|
3737 | msg = _(b'no open branch heads found on branches %s') |
@@ -584,7 +584,7 b' def debugdag(ui, repo, file_=None, *revs' | |||||
584 | dots = opts.get('dots') |
|
584 | dots = opts.get('dots') | |
585 | if file_: |
|
585 | if file_: | |
586 | rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_) |
|
586 | rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_) | |
587 |
revs = |
|
587 | revs = {int(r) for r in revs} | |
588 |
|
588 | |||
589 | def events(): |
|
589 | def events(): | |
590 | for r in rlog: |
|
590 | for r in rlog: | |
@@ -1134,7 +1134,7 b' def debugfileset(ui, repo, expr, **opts)' | |||||
1134 | (b'analyzed', filesetlang.analyze), |
|
1134 | (b'analyzed', filesetlang.analyze), | |
1135 | (b'optimized', filesetlang.optimize), |
|
1135 | (b'optimized', filesetlang.optimize), | |
1136 | ] |
|
1136 | ] | |
1137 |
stagenames = |
|
1137 | stagenames = {n for n, f in stages} | |
1138 |
|
1138 | |||
1139 | showalways = set() |
|
1139 | showalways = set() | |
1140 | if ui.verbose and not opts[b'show_stage']: |
|
1140 | if ui.verbose and not opts[b'show_stage']: | |
@@ -2598,7 +2598,7 b' def debugrebuilddirstate(ui, repo, rev, ' | |||||
2598 | dirstatefiles = set(dirstate) |
|
2598 | dirstatefiles = set(dirstate) | |
2599 | manifestonly = manifestfiles - dirstatefiles |
|
2599 | manifestonly = manifestfiles - dirstatefiles | |
2600 | dsonly = dirstatefiles - manifestfiles |
|
2600 | dsonly = dirstatefiles - manifestfiles | |
2601 |
dsnotadded = |
|
2601 | dsnotadded = {f for f in dsonly if dirstate[f] != b'a'} | |
2602 | changedfiles = manifestonly | dsnotadded |
|
2602 | changedfiles = manifestonly | dsnotadded | |
2603 |
|
2603 | |||
2604 | dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) |
|
2604 | dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) | |
@@ -3165,7 +3165,7 b' def debugrevspec(ui, repo, expr, **opts)' | |||||
3165 | raise error.Abort( |
|
3165 | raise error.Abort( | |
3166 | _(b'cannot use --verify-optimized with --no-optimized') |
|
3166 | _(b'cannot use --verify-optimized with --no-optimized') | |
3167 | ) |
|
3167 | ) | |
3168 |
stagenames = |
|
3168 | stagenames = {n for n, f in stages} | |
3169 |
|
3169 | |||
3170 | showalways = set() |
|
3170 | showalways = set() | |
3171 | showchanged = set() |
|
3171 | showchanged = set() |
@@ -188,7 +188,7 b' def findcommonoutgoing(' | |||||
188 | # ancestors of missing |
|
188 | # ancestors of missing | |
189 | og._computecommonmissing() |
|
189 | og._computecommonmissing() | |
190 | cl = repo.changelog |
|
190 | cl = repo.changelog | |
191 |
missingrevs = |
|
191 | missingrevs = {cl.rev(n) for n in og._missing} | |
192 | og._common = set(cl.ancestors(missingrevs)) - missingrevs |
|
192 | og._common = set(cl.ancestors(missingrevs)) - missingrevs | |
193 | commonheads = set(og.commonheads) |
|
193 | commonheads = set(og.commonheads) | |
194 | og.missingheads = [h for h in og.missingheads if h not in commonheads] |
|
194 | og.missingheads = [h for h in og.missingheads if h not in commonheads] | |
@@ -264,8 +264,8 b' def _headssummary(pushop):' | |||||
264 | # If there are no obsstore, no post processing are needed. |
|
264 | # If there are no obsstore, no post processing are needed. | |
265 | if repo.obsstore: |
|
265 | if repo.obsstore: | |
266 | torev = repo.changelog.rev |
|
266 | torev = repo.changelog.rev | |
267 |
futureheads = |
|
267 | futureheads = {torev(h) for h in outgoing.missingheads} | |
268 |
futureheads |= |
|
268 | futureheads |= {torev(h) for h in outgoing.commonheads} | |
269 | allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True) |
|
269 | allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True) | |
270 | for branch, heads in sorted(pycompat.iteritems(headssum)): |
|
270 | for branch, heads in sorted(pycompat.iteritems(headssum)): | |
271 | remoteheads, newheads, unsyncedheads, placeholder = heads |
|
271 | remoteheads, newheads, unsyncedheads, placeholder = heads |
@@ -514,7 +514,7 b' def aliasinterpolate(name, args, cmd):' | |||||
514 | ''' |
|
514 | ''' | |
515 | # util.interpolate can't deal with "$@" (with quotes) because it's only |
|
515 | # util.interpolate can't deal with "$@" (with quotes) because it's only | |
516 | # built to match prefix + patterns. |
|
516 | # built to match prefix + patterns. | |
517 |
replacemap = |
|
517 | replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)} | |
518 | replacemap[b'$0'] = name |
|
518 | replacemap[b'$0'] = name | |
519 | replacemap[b'$$'] = b'$' |
|
519 | replacemap[b'$$'] = b'$' | |
520 | replacemap[b'$@'] = b' '.join(args) |
|
520 | replacemap[b'$@'] = b' '.join(args) |
@@ -86,10 +86,10 b' elif _nativeenviron:' | |||||
86 | else: |
|
86 | else: | |
87 | # preferred encoding isn't known yet; use utf-8 to avoid unicode error |
|
87 | # preferred encoding isn't known yet; use utf-8 to avoid unicode error | |
88 | # and recreate it once encoding is settled |
|
88 | # and recreate it once encoding is settled | |
89 |
environ = |
|
89 | environ = { | |
90 |
|
|
90 | k.encode('utf-8'): v.encode('utf-8') | |
91 | for k, v in os.environ.items() # re-exports |
|
91 | for k, v in os.environ.items() # re-exports | |
92 |
|
|
92 | } | |
93 |
|
93 | |||
94 | _encodingrewrites = { |
|
94 | _encodingrewrites = { | |
95 | b'646': b'ascii', |
|
95 | b'646': b'ascii', | |
@@ -285,10 +285,10 b' else:' | |||||
285 | if not _nativeenviron: |
|
285 | if not _nativeenviron: | |
286 | # now encoding and helper functions are available, recreate the environ |
|
286 | # now encoding and helper functions are available, recreate the environ | |
287 | # dict to be exported to other modules |
|
287 | # dict to be exported to other modules | |
288 |
environ = |
|
288 | environ = { | |
289 |
|
|
289 | tolocal(k.encode('utf-8')): tolocal(v.encode('utf-8')) | |
290 | for k, v in os.environ.items() # re-exports |
|
290 | for k, v in os.environ.items() # re-exports | |
291 |
|
|
291 | } | |
292 |
|
292 | |||
293 | if pycompat.ispy3: |
|
293 | if pycompat.ispy3: | |
294 | # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which |
|
294 | # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which |
@@ -1679,12 +1679,12 b' def _fullpullbundle2(repo, pullop):' | |||||
1679 | def headsofdiff(h1, h2): |
|
1679 | def headsofdiff(h1, h2): | |
1680 | """Returns heads(h1 % h2)""" |
|
1680 | """Returns heads(h1 % h2)""" | |
1681 | res = unfi.set(b'heads(%ln %% %ln)', h1, h2) |
|
1681 | res = unfi.set(b'heads(%ln %% %ln)', h1, h2) | |
1682 |
return |
|
1682 | return {ctx.node() for ctx in res} | |
1683 |
|
1683 | |||
1684 | def headsofunion(h1, h2): |
|
1684 | def headsofunion(h1, h2): | |
1685 | """Returns heads((h1 + h2) - null)""" |
|
1685 | """Returns heads((h1 + h2) - null)""" | |
1686 | res = unfi.set(b'heads((%ln + %ln - null))', h1, h2) |
|
1686 | res = unfi.set(b'heads((%ln + %ln - null))', h1, h2) | |
1687 |
return |
|
1687 | return {ctx.node() for ctx in res} | |
1688 |
|
1688 | |||
1689 | while True: |
|
1689 | while True: | |
1690 | old_heads = unficl.heads() |
|
1690 | old_heads = unficl.heads() |
@@ -787,11 +787,11 b' def disabled():' | |||||
787 | try: |
|
787 | try: | |
788 | from hgext import __index__ # pytype: disable=import-error |
|
788 | from hgext import __index__ # pytype: disable=import-error | |
789 |
|
789 | |||
790 |
return |
|
790 | return { | |
791 |
|
|
791 | name: gettext(desc) | |
792 | for name, desc in pycompat.iteritems(__index__.docs) |
|
792 | for name, desc in pycompat.iteritems(__index__.docs) | |
793 | if name not in _order |
|
793 | if name not in _order | |
794 |
|
|
794 | } | |
795 | except (ImportError, AttributeError): |
|
795 | except (ImportError, AttributeError): | |
796 | pass |
|
796 | pass | |
797 |
|
797 |
@@ -314,7 +314,7 b' def fancyopts(args, options, state, gnu=' | |||||
314 | argmap = {} |
|
314 | argmap = {} | |
315 | defmap = {} |
|
315 | defmap = {} | |
316 | negations = {} |
|
316 | negations = {} | |
317 |
alllong = |
|
317 | alllong = {o[1] for o in options} | |
318 |
|
318 | |||
319 | for option in options: |
|
319 | for option in options: | |
320 | if len(option) == 5: |
|
320 | if len(option) == 5: |
@@ -58,7 +58,7 b' def dagwalker(repo, revs):' | |||||
58 | # partition into parents in the rev set and missing parents, then |
|
58 | # partition into parents in the rev set and missing parents, then | |
59 | # augment the lists with markers, to inform graph drawing code about |
|
59 | # augment the lists with markers, to inform graph drawing code about | |
60 | # what kind of edge to draw between nodes. |
|
60 | # what kind of edge to draw between nodes. | |
61 |
pset = |
|
61 | pset = {p.rev() for p in ctx.parents() if p.rev() in revs} | |
62 | mpars = [ |
|
62 | mpars = [ | |
63 | p.rev() |
|
63 | p.rev() | |
64 | for p in ctx.parents() |
|
64 | for p in ctx.parents() | |
@@ -95,9 +95,9 b' def nodes(repo, nodes):' | |||||
95 | include = set(nodes) |
|
95 | include = set(nodes) | |
96 | for node in nodes: |
|
96 | for node in nodes: | |
97 | ctx = repo[node] |
|
97 | ctx = repo[node] | |
98 |
parents = |
|
98 | parents = { | |
99 | (PARENT, p.rev()) for p in ctx.parents() if p.node() in include |
|
99 | (PARENT, p.rev()) for p in ctx.parents() if p.node() in include | |
100 |
|
|
100 | } | |
101 | yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) |
|
101 | yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) | |
102 |
|
102 | |||
103 |
|
103 |
@@ -137,7 +137,7 b' def extendrange(repo, state, nodes, good' | |||||
137 | side = state[b'bad'] |
|
137 | side = state[b'bad'] | |
138 | else: |
|
138 | else: | |
139 | side = state[b'good'] |
|
139 | side = state[b'good'] | |
140 |
num = len( |
|
140 | num = len({i.node() for i in parents} & set(side)) | |
141 | if num == 1: |
|
141 | if num == 1: | |
142 | return parents[0].ancestor(parents[1]) |
|
142 | return parents[0].ancestor(parents[1]) | |
143 | return None |
|
143 | return None |
@@ -1809,7 +1809,7 b' class localrepository(object):' | |||||
1809 | # map tag name to (node, hist) |
|
1809 | # map tag name to (node, hist) | |
1810 | alltags = tagsmod.findglobaltags(self.ui, self) |
|
1810 | alltags = tagsmod.findglobaltags(self.ui, self) | |
1811 | # map tag name to tag type |
|
1811 | # map tag name to tag type | |
1812 |
tagtypes = |
|
1812 | tagtypes = {tag: b'global' for tag in alltags} | |
1813 |
|
1813 | |||
1814 | tagsmod.readlocaltags(self.ui, self, alltags, tagtypes) |
|
1814 | tagsmod.readlocaltags(self.ui, self, alltags, tagtypes) | |
1815 |
|
1815 | |||
@@ -1822,12 +1822,10 b' class localrepository(object):' | |||||
1822 | if node != nullid: |
|
1822 | if node != nullid: | |
1823 | tags[encoding.tolocal(name)] = node |
|
1823 | tags[encoding.tolocal(name)] = node | |
1824 | tags[b'tip'] = self.changelog.tip() |
|
1824 | tags[b'tip'] = self.changelog.tip() | |
1825 |
tagtypes = |
|
1825 | tagtypes = { | |
1826 | [ |
|
1826 | encoding.tolocal(name): value | |
1827 | (encoding.tolocal(name), value) |
|
1827 | for (name, value) in pycompat.iteritems(tagtypes) | |
1828 | for (name, value) in pycompat.iteritems(tagtypes) |
|
1828 | } | |
1829 | ] |
|
|||
1830 | ) |
|
|||
1831 | return (tags, tagtypes) |
|
1829 | return (tags, tagtypes) | |
1832 |
|
1830 | |||
1833 | def tagtype(self, tagname): |
|
1831 | def tagtype(self, tagname): |
@@ -772,7 +772,7 b' class exactmatcher(basematcher):' | |||||
772 | candidates = self._fileset | self._dirs - {b''} |
|
772 | candidates = self._fileset | self._dirs - {b''} | |
773 | if dir != b'': |
|
773 | if dir != b'': | |
774 | d = dir + b'/' |
|
774 | d = dir + b'/' | |
775 |
candidates = |
|
775 | candidates = {c[len(d) :] for c in candidates if c.startswith(d)} | |
776 | # self._dirs includes all of the directories, recursively, so if |
|
776 | # self._dirs includes all of the directories, recursively, so if | |
777 | # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo', |
|
777 | # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo', | |
778 | # 'foo/bar' in it. Thus we can safely ignore a candidate that has a |
|
778 | # 'foo/bar' in it. Thus we can safely ignore a candidate that has a |
@@ -91,7 +91,7 b' class diffopts(object):' | |||||
91 | ) |
|
91 | ) | |
92 |
|
92 | |||
93 | def copy(self, **kwargs): |
|
93 | def copy(self, **kwargs): | |
94 |
opts = |
|
94 | opts = {k: getattr(self, k) for k in self.defaults} | |
95 | opts = pycompat.strkwargs(opts) |
|
95 | opts = pycompat.strkwargs(opts) | |
96 | opts.update(kwargs) |
|
96 | opts.update(kwargs) | |
97 | return diffopts(**opts) |
|
97 | return diffopts(**opts) |
@@ -1796,8 +1796,8 b' class updateresult(object):' | |||||
1796 |
|
1796 | |||
1797 | def emptyactions(): |
|
1797 | def emptyactions(): | |
1798 | """create an actions dict, to be populated and passed to applyupdates()""" |
|
1798 | """create an actions dict, to be populated and passed to applyupdates()""" | |
1799 |
return |
|
1799 | return { | |
1800 |
|
|
1800 | m: [] | |
1801 | for m in ( |
|
1801 | for m in ( | |
1802 | ACTION_ADD, |
|
1802 | ACTION_ADD, | |
1803 | ACTION_ADD_MODIFIED, |
|
1803 | ACTION_ADD_MODIFIED, | |
@@ -1814,7 +1814,7 b' def emptyactions():' | |||||
1814 | ACTION_PATH_CONFLICT, |
|
1814 | ACTION_PATH_CONFLICT, | |
1815 | ACTION_PATH_CONFLICT_RESOLVE, |
|
1815 | ACTION_PATH_CONFLICT_RESOLVE, | |
1816 | ) |
|
1816 | ) | |
1817 |
|
|
1817 | } | |
1818 |
|
1818 | |||
1819 |
|
1819 | |||
1820 | def applyupdates( |
|
1820 | def applyupdates( | |
@@ -2070,7 +2070,7 b' def applyupdates(' | |||||
2070 |
|
2070 | |||
2071 | extraactions = ms.actions() |
|
2071 | extraactions = ms.actions() | |
2072 | if extraactions: |
|
2072 | if extraactions: | |
2073 |
mfiles = |
|
2073 | mfiles = {a[0] for a in actions[ACTION_MERGE]} | |
2074 | for k, acts in pycompat.iteritems(extraactions): |
|
2074 | for k, acts in pycompat.iteritems(extraactions): | |
2075 | actions[k].extend(acts) |
|
2075 | actions[k].extend(acts) | |
2076 | if k == ACTION_GET and wantfiledata: |
|
2076 | if k == ACTION_GET and wantfiledata: |
@@ -939,7 +939,7 b' def _computeobsoleteset(repo):' | |||||
939 | getnode = repo.changelog.node |
|
939 | getnode = repo.changelog.node | |
940 | notpublic = _mutablerevs(repo) |
|
940 | notpublic = _mutablerevs(repo) | |
941 | isobs = repo.obsstore.successors.__contains__ |
|
941 | isobs = repo.obsstore.successors.__contains__ | |
942 |
obs = |
|
942 | obs = {r for r in notpublic if isobs(getnode(r))} | |
943 | return obs |
|
943 | return obs | |
944 |
|
944 | |||
945 |
|
945 | |||
@@ -965,7 +965,7 b' def _computeorphanset(repo):' | |||||
965 | def _computesuspendedset(repo): |
|
965 | def _computesuspendedset(repo): | |
966 | """the set of obsolete parents with non obsolete descendants""" |
|
966 | """the set of obsolete parents with non obsolete descendants""" | |
967 | suspended = repo.changelog.ancestors(getrevs(repo, b'orphan')) |
|
967 | suspended = repo.changelog.ancestors(getrevs(repo, b'orphan')) | |
968 |
return |
|
968 | return {r for r in getrevs(repo, b'obsolete') if r in suspended} | |
969 |
|
969 | |||
970 |
|
970 | |||
971 | @cachefor(b'extinct') |
|
971 | @cachefor(b'extinct') |
@@ -194,7 +194,7 b' def allsuccessors(obsstore, nodes, ignor' | |||||
194 |
|
194 | |||
195 | def _filterprunes(markers): |
|
195 | def _filterprunes(markers): | |
196 | """return a set with no prune markers""" |
|
196 | """return a set with no prune markers""" | |
197 |
return |
|
197 | return {m for m in markers if m[1]} | |
198 |
|
198 | |||
199 |
|
199 | |||
200 | def exclusivemarkers(repo, nodes): |
|
200 | def exclusivemarkers(repo, nodes): | |
@@ -338,12 +338,12 b' def foreground(repo, nodes):' | |||||
338 | # compute the whole set of successors or descendants |
|
338 | # compute the whole set of successors or descendants | |
339 | while len(foreground) != plen: |
|
339 | while len(foreground) != plen: | |
340 | plen = len(foreground) |
|
340 | plen = len(foreground) | |
341 |
succs = |
|
341 | succs = {c.node() for c in foreground} | |
342 | mutable = [c.node() for c in foreground if c.mutable()] |
|
342 | mutable = [c.node() for c in foreground if c.mutable()] | |
343 | succs.update(allsuccessors(repo.obsstore, mutable)) |
|
343 | succs.update(allsuccessors(repo.obsstore, mutable)) | |
344 | known = (n for n in succs if has_node(n)) |
|
344 | known = (n for n in succs if has_node(n)) | |
345 | foreground = set(repo.set(b'%ln::', known)) |
|
345 | foreground = set(repo.set(b'%ln::', known)) | |
346 |
return |
|
346 | return {c.node() for c in foreground} | |
347 |
|
347 | |||
348 |
|
348 | |||
349 | # effectflag field |
|
349 | # effectflag field | |
@@ -855,11 +855,11 b' def markersusers(markers):' | |||||
855 | """ Returns a sorted list of markers users without duplicates |
|
855 | """ Returns a sorted list of markers users without duplicates | |
856 | """ |
|
856 | """ | |
857 | markersmeta = [dict(m[3]) for m in markers] |
|
857 | markersmeta = [dict(m[3]) for m in markers] | |
858 |
users = |
|
858 | users = { | |
859 | encoding.tolocal(meta[b'user']) |
|
859 | encoding.tolocal(meta[b'user']) | |
860 | for meta in markersmeta |
|
860 | for meta in markersmeta | |
861 | if meta.get(b'user') |
|
861 | if meta.get(b'user') | |
862 |
|
|
862 | } | |
863 |
|
863 | |||
864 | return sorted(users) |
|
864 | return sorted(users) | |
865 |
|
865 | |||
@@ -868,9 +868,9 b' def markersoperations(markers):' | |||||
868 | """ Returns a sorted list of markers operations without duplicates |
|
868 | """ Returns a sorted list of markers operations without duplicates | |
869 | """ |
|
869 | """ | |
870 | markersmeta = [dict(m[3]) for m in markers] |
|
870 | markersmeta = [dict(m[3]) for m in markers] | |
871 |
operations = |
|
871 | operations = { | |
872 | meta.get(b'operation') for meta in markersmeta if meta.get(b'operation') |
|
872 | meta.get(b'operation') for meta in markersmeta if meta.get(b'operation') | |
873 |
|
|
873 | } | |
874 |
|
874 | |||
875 | return sorted(operations) |
|
875 | return sorted(operations) | |
876 |
|
876 |
@@ -2888,7 +2888,7 b' def _filepairs(modified, added, removed,' | |||||
2888 | or 'rename' (the latter two only if opts.git is set).''' |
|
2888 | or 'rename' (the latter two only if opts.git is set).''' | |
2889 | gone = set() |
|
2889 | gone = set() | |
2890 |
|
2890 | |||
2891 |
copyto = |
|
2891 | copyto = {v: k for k, v in copy.items()} | |
2892 |
|
2892 | |||
2893 | addedset, removedset = set(added), set(removed) |
|
2893 | addedset, removedset = set(added), set(removed) | |
2894 |
|
2894 |
@@ -445,10 +445,10 b' class phasecache(object):' | |||||
445 | phasetracking, r, self.phase(repo, r), targetphase |
|
445 | phasetracking, r, self.phase(repo, r), targetphase | |
446 | ) |
|
446 | ) | |
447 |
|
447 | |||
448 |
roots = |
|
448 | roots = { | |
449 | ctx.node() |
|
449 | ctx.node() | |
450 | for ctx in repo.set(b'roots((%ln::) - %ld)', olds, affected) |
|
450 | for ctx in repo.set(b'roots((%ln::) - %ld)', olds, affected) | |
451 |
|
|
451 | } | |
452 | if olds != roots: |
|
452 | if olds != roots: | |
453 | self._updateroots(phase, roots, tr) |
|
453 | self._updateroots(phase, roots, tr) | |
454 | # some roots may need to be declared for lower phases |
|
454 | # some roots may need to be declared for lower phases | |
@@ -518,9 +518,7 b' class phasecache(object):' | |||||
518 | ] |
|
518 | ] | |
519 | updatedroots = repo.set(b'roots(%ln::)', aboveroots) |
|
519 | updatedroots = repo.set(b'roots(%ln::)', aboveroots) | |
520 |
|
520 | |||
521 | finalroots = set( |
|
521 | finalroots = {n for n in currentroots if repo[n].rev() < minnewroot} | |
522 | n for n in currentroots if repo[n].rev() < minnewroot |
|
|||
523 | ) |
|
|||
524 | finalroots.update(ctx.node() for ctx in updatedroots) |
|
522 | finalroots.update(ctx.node() for ctx in updatedroots) | |
525 | if finalroots != oldroots: |
|
523 | if finalroots != oldroots: | |
526 | self._updateroots(targetphase, finalroots, tr) |
|
524 | self._updateroots(targetphase, finalroots, tr) | |
@@ -760,7 +758,7 b' def newheads(repo, heads, roots):' | |||||
760 | if not heads or heads == [nullid]: |
|
758 | if not heads or heads == [nullid]: | |
761 | return [] |
|
759 | return [] | |
762 | # The logic operated on revisions, convert arguments early for convenience |
|
760 | # The logic operated on revisions, convert arguments early for convenience | |
763 |
new_heads = |
|
761 | new_heads = {rev(n) for n in heads if n != nullid} | |
764 | roots = [rev(n) for n in roots] |
|
762 | roots = [rev(n) for n in roots] | |
765 | # compute the area we need to remove |
|
763 | # compute the area we need to remove | |
766 | affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads) |
|
764 | affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads) |
@@ -334,7 +334,7 b' if ispy3:' | |||||
334 | they can be passed as keyword arguments as dictonaries with bytes keys |
|
334 | they can be passed as keyword arguments as dictonaries with bytes keys | |
335 | can't be passed as keyword arguments to functions on Python 3. |
|
335 | can't be passed as keyword arguments to functions on Python 3. | |
336 | """ |
|
336 | """ | |
337 |
dic = |
|
337 | dic = {k.decode('latin-1'): v for k, v in dic.items()} | |
338 | return dic |
|
338 | return dic | |
339 |
|
339 | |||
340 | def byteskwargs(dic): |
|
340 | def byteskwargs(dic): | |
@@ -342,7 +342,7 b' if ispy3:' | |||||
342 | Converts keys of python dictonaries to bytes as they were converted to |
|
342 | Converts keys of python dictonaries to bytes as they were converted to | |
343 | str to pass that dictonary as a keyword argument on Python 3. |
|
343 | str to pass that dictonary as a keyword argument on Python 3. | |
344 | """ |
|
344 | """ | |
345 |
dic = |
|
345 | dic = {k.encode('latin-1'): v for k, v in dic.items()} | |
346 | return dic |
|
346 | return dic | |
347 |
|
347 | |||
348 | # TODO: handle shlex.shlex(). |
|
348 | # TODO: handle shlex.shlex(). |
@@ -351,7 +351,7 b' def _createstripbackup(repo, stripbases,' | |||||
351 | def safestriproots(ui, repo, nodes): |
|
351 | def safestriproots(ui, repo, nodes): | |
352 | """return list of roots of nodes where descendants are covered by nodes""" |
|
352 | """return list of roots of nodes where descendants are covered by nodes""" | |
353 | torev = repo.unfiltered().changelog.rev |
|
353 | torev = repo.unfiltered().changelog.rev | |
354 |
revs = |
|
354 | revs = {torev(n) for n in nodes} | |
355 | # tostrip = wanted - unsafe = wanted - ancestors(orphaned) |
|
355 | # tostrip = wanted - unsafe = wanted - ancestors(orphaned) | |
356 | # orphaned = affected - wanted |
|
356 | # orphaned = affected - wanted | |
357 | # affected = descendants(roots(wanted)) |
|
357 | # affected = descendants(roots(wanted)) |
@@ -1286,7 +1286,7 b' class revlog(object):' | |||||
1286 | else: |
|
1286 | else: | |
1287 | start = self.rev(start) |
|
1287 | start = self.rev(start) | |
1288 |
|
1288 | |||
1289 |
stoprevs = |
|
1289 | stoprevs = {self.rev(n) for n in stop or []} | |
1290 |
|
1290 | |||
1291 | revs = dagop.headrevssubset( |
|
1291 | revs = dagop.headrevssubset( | |
1292 | self.revs, self.parentrevs, startrev=start, stoprevs=stoprevs |
|
1292 | self.revs, self.parentrevs, startrev=start, stoprevs=stoprevs |
@@ -1875,7 +1875,7 b' def parents(repo, subset, x):' | |||||
1875 | The set of all parents for all changesets in set, or the working directory. |
|
1875 | The set of all parents for all changesets in set, or the working directory. | |
1876 | """ |
|
1876 | """ | |
1877 | if x is None: |
|
1877 | if x is None: | |
1878 |
ps = |
|
1878 | ps = {p.rev() for p in repo[x].parents()} | |
1879 | else: |
|
1879 | else: | |
1880 | ps = set() |
|
1880 | ps = set() | |
1881 | cl = repo.changelog |
|
1881 | cl = repo.changelog | |
@@ -2437,7 +2437,7 b' def _mapbynodefunc(repo, s, f):' | |||||
2437 | cl = repo.unfiltered().changelog |
|
2437 | cl = repo.unfiltered().changelog | |
2438 | torev = cl.index.get_rev |
|
2438 | torev = cl.index.get_rev | |
2439 | tonode = cl.node |
|
2439 | tonode = cl.node | |
2440 |
result = |
|
2440 | result = {torev(n) for n in f(tonode(r) for r in s)} | |
2441 | result.discard(None) |
|
2441 | result.discard(None) | |
2442 | return smartset.baseset(result - repo.changelog.filteredrevs) |
|
2442 | return smartset.baseset(result - repo.changelog.filteredrevs) | |
2443 |
|
2443 |
@@ -1457,10 +1457,10 b' def movedirstate(repo, newctx, match=Non' | |||||
1457 | # Merge old parent and old working dir copies |
|
1457 | # Merge old parent and old working dir copies | |
1458 | oldcopies = copiesmod.pathcopies(newctx, oldctx, match) |
|
1458 | oldcopies = copiesmod.pathcopies(newctx, oldctx, match) | |
1459 | oldcopies.update(copies) |
|
1459 | oldcopies.update(copies) | |
1460 |
copies = |
|
1460 | copies = { | |
1461 |
|
|
1461 | dst: oldcopies.get(src, src) | |
1462 | for dst, src in pycompat.iteritems(oldcopies) |
|
1462 | for dst, src in pycompat.iteritems(oldcopies) | |
1463 |
|
|
1463 | } | |
1464 | # Adjust the dirstate copies |
|
1464 | # Adjust the dirstate copies | |
1465 | for dst, src in pycompat.iteritems(copies): |
|
1465 | for dst, src in pycompat.iteritems(copies): | |
1466 | if src not in newctx or dst in newctx or ds[dst] != b'a': |
|
1466 | if src not in newctx or dst in newctx or ds[dst] != b'a': |
@@ -137,7 +137,7 b' def _buildencodefun():' | |||||
137 | asciistr = list(map(xchr, range(127))) |
|
137 | asciistr = list(map(xchr, range(127))) | |
138 | capitals = list(range(ord(b"A"), ord(b"Z") + 1)) |
|
138 | capitals = list(range(ord(b"A"), ord(b"Z") + 1)) | |
139 |
|
139 | |||
140 |
cmap = |
|
140 | cmap = {x: x for x in asciistr} | |
141 | for x in _reserved(): |
|
141 | for x in _reserved(): | |
142 | cmap[xchr(x)] = b"~%02x" % x |
|
142 | cmap[xchr(x)] = b"~%02x" % x | |
143 | for x in capitals + [ord(e)]: |
|
143 | for x in capitals + [ord(e)]: | |
@@ -200,7 +200,7 b' def _buildlowerencodefun():' | |||||
200 | 'the~07quick~adshot' |
|
200 | 'the~07quick~adshot' | |
201 | ''' |
|
201 | ''' | |
202 | xchr = pycompat.bytechr |
|
202 | xchr = pycompat.bytechr | |
203 |
cmap = |
|
203 | cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)} | |
204 | for x in _reserved(): |
|
204 | for x in _reserved(): | |
205 | cmap[xchr(x)] = b"~%02x" % x |
|
205 | cmap[xchr(x)] = b"~%02x" % x | |
206 | for x in range(ord(b"A"), ord(b"Z") + 1): |
|
206 | for x in range(ord(b"A"), ord(b"Z") + 1): |
@@ -1129,7 +1129,7 b' def upgraderepo(' | |||||
1129 | """Upgrade a repository in place.""" |
|
1129 | """Upgrade a repository in place.""" | |
1130 | if optimize is None: |
|
1130 | if optimize is None: | |
1131 | optimize = [] |
|
1131 | optimize = [] | |
1132 |
optimize = |
|
1132 | optimize = {legacy_opts_map.get(o, o) for o in optimize} | |
1133 | repo = repo.unfiltered() |
|
1133 | repo = repo.unfiltered() | |
1134 |
|
1134 | |||
1135 | revlogs = set(UPGRADE_ALL_REVLOGS) |
|
1135 | revlogs = set(UPGRADE_ALL_REVLOGS) |
@@ -224,13 +224,11 b' def _generic_start_transaction(handler, ' | |||||
224 |
|
224 | |||
225 |
|
225 | |||
226 | def _generic_proxytunnel(self): |
|
226 | def _generic_proxytunnel(self): | |
227 |
proxyheaders = |
|
227 | proxyheaders = { | |
228 | [ |
|
228 | x: self.headers[x] | |
229 |
|
|
229 | for x in self.headers | |
230 | for x in self.headers |
|
230 | if x.lower().startswith('proxy-') | |
231 | if x.lower().startswith('proxy-') |
|
231 | } | |
232 | ] |
|
|||
233 | ) |
|
|||
234 | self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport) |
|
232 | self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport) | |
235 | for header in pycompat.iteritems(proxyheaders): |
|
233 | for header in pycompat.iteritems(proxyheaders): | |
236 | self.send(b'%s: %s\r\n' % header) |
|
234 | self.send(b'%s: %s\r\n' % header) |
@@ -2213,7 +2213,7 b' def fspath(name, root):' | |||||
2213 | ''' |
|
2213 | ''' | |
2214 |
|
2214 | |||
2215 | def _makefspathcacheentry(dir): |
|
2215 | def _makefspathcacheentry(dir): | |
2216 |
return |
|
2216 | return {normcase(n): n for n in os.listdir(dir)} | |
2217 |
|
2217 | |||
2218 | seps = pycompat.ossep |
|
2218 | seps = pycompat.ossep | |
2219 | if pycompat.osaltsep: |
|
2219 | if pycompat.osaltsep: |
@@ -364,7 +364,7 b' def emitrevisions(' | |||||
364 | if nodesorder == b'nodes': |
|
364 | if nodesorder == b'nodes': | |
365 | revs = [frev(n) for n in nodes] |
|
365 | revs = [frev(n) for n in nodes] | |
366 | elif nodesorder == b'linear': |
|
366 | elif nodesorder == b'linear': | |
367 |
revs = |
|
367 | revs = {frev(n) for n in nodes} | |
368 | revs = dagop.linearize(revs, store.parentrevs) |
|
368 | revs = dagop.linearize(revs, store.parentrevs) | |
369 | else: # storage and default |
|
369 | else: # storage and default | |
370 | revs = sorted(frev(n) for n in nodes) |
|
370 | revs = sorted(frev(n) for n in nodes) |
@@ -535,13 +535,11 b' def statfiles(files):' | |||||
535 | cache = dircache.get(dir, None) |
|
535 | cache = dircache.get(dir, None) | |
536 | if cache is None: |
|
536 | if cache is None: | |
537 | try: |
|
537 | try: | |
538 |
dmap = |
|
538 | dmap = { | |
539 |
|
|
539 | normcase(n): s | |
540 | (normcase(n), s) |
|
540 | for n, k, s in listdir(dir, True) | |
541 | for n, k, s in listdir(dir, True) |
|
541 | if getkind(s.st_mode) in _wantedkinds | |
542 | if getkind(s.st_mode) in _wantedkinds |
|
542 | } | |
543 | ] |
|
|||
544 | ) |
|
|||
545 | except OSError as err: |
|
543 | except OSError as err: | |
546 | # Python >= 2.5 returns ENOENT and adds winerror field |
|
544 | # Python >= 2.5 returns ENOENT and adds winerror field | |
547 | # EINVAL is raised if dir is not a directory. |
|
545 | # EINVAL is raised if dir is not a directory. |
@@ -383,8 +383,8 b' def supportedcompengines(ui, role):' | |||||
383 | # reason for it (like server operators wanting to achieve specific |
|
383 | # reason for it (like server operators wanting to achieve specific | |
384 | # performance characteristics). So fail fast if the config references |
|
384 | # performance characteristics). So fail fast if the config references | |
385 | # unusable compression engines. |
|
385 | # unusable compression engines. | |
386 |
validnames = |
|
386 | validnames = {e.name() for e in compengines} | |
387 |
invalidnames = |
|
387 | invalidnames = {e for e in configengines if e not in validnames} | |
388 | if invalidnames: |
|
388 | if invalidnames: | |
389 | raise error.Abort( |
|
389 | raise error.Abort( | |
390 | _(b'invalid compression engine defined in %s: %s') |
|
390 | _(b'invalid compression engine defined in %s: %s') |
@@ -935,11 +935,11 b' class hgbuilddoc(Command):' | |||||
935 | normalizecrlf('doc/%s.html' % root) |
|
935 | normalizecrlf('doc/%s.html' % root) | |
936 |
|
936 | |||
937 | # This logic is duplicated in doc/Makefile. |
|
937 | # This logic is duplicated in doc/Makefile. | |
938 |
sources = |
|
938 | sources = { | |
939 | f |
|
939 | f | |
940 | for f in os.listdir('mercurial/helptext') |
|
940 | for f in os.listdir('mercurial/helptext') | |
941 | if re.search(r'[0-9]\.txt$', f) |
|
941 | if re.search(r'[0-9]\.txt$', f) | |
942 |
|
|
942 | } | |
943 |
|
943 | |||
944 | # common.txt is a one-off. |
|
944 | # common.txt is a one-off. | |
945 | gentxt('common') |
|
945 | gentxt('common') | |
@@ -979,7 +979,7 b' class hginstall(install):' | |||||
979 | # Screen out egg related commands to prevent egg generation. But allow |
|
979 | # Screen out egg related commands to prevent egg generation. But allow | |
980 | # mercurial.egg-info generation, since that is part of modern |
|
980 | # mercurial.egg-info generation, since that is part of modern | |
981 | # packaging. |
|
981 | # packaging. | |
982 |
excl = |
|
982 | excl = {'bdist_egg'} | |
983 | return filter(lambda x: x not in excl, install.get_sub_commands(self)) |
|
983 | return filter(lambda x: x not in excl, install.get_sub_commands(self)) | |
984 |
|
984 | |||
985 |
|
985 |
@@ -1553,7 +1553,7 b' class TTest(Test):' | |||||
1553 | NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search |
|
1553 | NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search | |
1554 |
|
1554 | |||
1555 | ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub |
|
1555 | ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub | |
1556 |
ESCAPEMAP = |
|
1556 | ESCAPEMAP = {bchr(i): br'\x%02x' % i for i in range(256)} | |
1557 | ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'}) |
|
1557 | ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'}) | |
1558 |
|
1558 | |||
1559 | def __init__(self, path, *args, **kwds): |
|
1559 | def __init__(self, path, *args, **kwds): | |
@@ -2524,7 +2524,7 b' def loadtimes(outputdir):' | |||||
2524 | def savetimes(outputdir, result): |
|
2524 | def savetimes(outputdir, result): | |
2525 | saved = dict(loadtimes(outputdir)) |
|
2525 | saved = dict(loadtimes(outputdir)) | |
2526 | maxruns = 5 |
|
2526 | maxruns = 5 | |
2527 |
skipped = |
|
2527 | skipped = {str(t[0]) for t in result.skipped} | |
2528 | for tdata in result.times: |
|
2528 | for tdata in result.times: | |
2529 | test, real = tdata[0], tdata[3] |
|
2529 | test, real = tdata[0], tdata[3] | |
2530 | if test not in skipped: |
|
2530 | if test not in skipped: | |
@@ -2737,7 +2737,7 b' class TextTestRunner(unittest.TextTestRu' | |||||
2737 | @staticmethod |
|
2737 | @staticmethod | |
2738 | def _writexunit(result, outf): |
|
2738 | def _writexunit(result, outf): | |
2739 | # See http://llg.cubic.org/docs/junit/ for a reference. |
|
2739 | # See http://llg.cubic.org/docs/junit/ for a reference. | |
2740 |
timesd = |
|
2740 | timesd = {t[0]: t[3] for t in result.times} | |
2741 | doc = minidom.Document() |
|
2741 | doc = minidom.Document() | |
2742 | s = doc.createElement('testsuite') |
|
2742 | s = doc.createElement('testsuite') | |
2743 | s.setAttribute('errors', "0") # TODO |
|
2743 | s.setAttribute('errors', "0") # TODO | |
@@ -3343,7 +3343,7 b' class TestRunner(object):' | |||||
3343 | tmpdir = os.path.join(self._hgtmp, b'child%d' % count) |
|
3343 | tmpdir = os.path.join(self._hgtmp, b'child%d' % count) | |
3344 |
|
3344 | |||
3345 | # extra keyword parameters. 'case' is used by .t tests |
|
3345 | # extra keyword parameters. 'case' is used by .t tests | |
3346 |
kwds = |
|
3346 | kwds = {k: testdesc[k] for k in ['case'] if k in testdesc} | |
3347 |
|
3347 | |||
3348 | t = testcls( |
|
3348 | t = testcls( | |
3349 | refpath, |
|
3349 | refpath, |
@@ -588,7 +588,7 b' class filestorage(object):' | |||||
588 | start = nullid |
|
588 | start = nullid | |
589 | if stop is None: |
|
589 | if stop is None: | |
590 | stop = [] |
|
590 | stop = [] | |
591 |
stoprevs = |
|
591 | stoprevs = {self.rev(n) for n in stop} | |
592 | startrev = self.rev(start) |
|
592 | startrev = self.rev(start) | |
593 | reachable = {startrev} |
|
593 | reachable = {startrev} | |
594 | heads = {startrev} |
|
594 | heads = {startrev} |
@@ -165,7 +165,7 b' def testcopyfrom():' | |||||
165 | rm2.flush() |
|
165 | rm2.flush() | |
166 |
|
166 | |||
167 | # two files should be the same |
|
167 | # two files should be the same | |
168 |
ensure(len( |
|
168 | ensure(len({util.readfile(p) for p in [path, path2]}) == 1) | |
169 |
|
169 | |||
170 | os.unlink(path) |
|
170 | os.unlink(path) | |
171 | os.unlink(path2) |
|
171 | os.unlink(path2) |
@@ -52,7 +52,7 b' def test(auth, urls=None):' | |||||
52 | for name in (b'.username', b'.password'): |
|
52 | for name in (b'.username', b'.password'): | |
53 | if (p + name) not in auth: |
|
53 | if (p + name) not in auth: | |
54 | auth[p + name] = p |
|
54 | auth[p + name] = p | |
55 |
auth = |
|
55 | auth = {k: v for k, v in auth.items() if v is not None} | |
56 |
|
56 | |||
57 | ui = writeauth(auth) |
|
57 | ui = writeauth(auth) | |
58 |
|
58 |
@@ -229,7 +229,7 b' def genbits(n):' | |||||
229 |
|
229 | |||
230 | # Gray Code. See https://en.wikipedia.org/wiki/Gray_code |
|
230 | # Gray Code. See https://en.wikipedia.org/wiki/Gray_code | |
231 | gray = lambda x: x ^ (x >> 1) |
|
231 | gray = lambda x: x ^ (x >> 1) | |
232 |
reversegray = |
|
232 | reversegray = {gray(i): i for i in range(m)} | |
233 |
|
233 | |||
234 | # Generate (n * 2) bit gray code, yield lower n bits as X, and look for |
|
234 | # Generate (n * 2) bit gray code, yield lower n bits as X, and look for | |
235 | # the next unused gray code where higher n bits equal to X. |
|
235 | # the next unused gray code where higher n bits equal to X. |
@@ -116,7 +116,7 b' class memorycacher(object):' | |||||
116 | redirectable = False |
|
116 | redirectable = False | |
117 | else: |
|
117 | else: | |
118 | clienttargets = set(self.redirecttargets) |
|
118 | clienttargets = set(self.redirecttargets) | |
119 |
ourtargets = |
|
119 | ourtargets = {t[b'name'] for t in loadredirecttargets(self.ui)} | |
120 |
|
120 | |||
121 | # We only ever redirect to a single target (for now). So we don't |
|
121 | # We only ever redirect to a single target (for now). So we don't | |
122 | # need to store which target matched. |
|
122 | # need to store which target matched. |
General Comments 0
You need to be logged in to leave comments.
Login now