Show More
@@ -733,7 +733,7 b' class fixupstate(object):' | |||
|
733 | 733 | |
|
734 | 734 | def apply(self): |
|
735 | 735 | """apply fixups to individual filefixupstates""" |
|
736 |
for path, state in |
|
|
736 | for path, state in self.fixupmap.items(): | |
|
737 | 737 | if self.ui.debugflag: |
|
738 | 738 | self.ui.write(_(b'applying fixups to %s\n') % path) |
|
739 | 739 | state.apply() |
@@ -741,10 +741,7 b' class fixupstate(object):' | |||
|
741 | 741 | @property |
|
742 | 742 | def chunkstats(self): |
|
743 | 743 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" |
|
744 | return { | |
|
745 | path: state.chunkstats | |
|
746 | for path, state in pycompat.iteritems(self.fixupmap) | |
|
747 | } | |
|
744 | return {path: state.chunkstats for path, state in self.fixupmap.items()} | |
|
748 | 745 | |
|
749 | 746 | def commit(self): |
|
750 | 747 | """commit changes. update self.finalnode, self.replacemap""" |
@@ -762,7 +759,7 b' class fixupstate(object):' | |||
|
762 | 759 | chunkstats = self.chunkstats |
|
763 | 760 | if ui.verbose: |
|
764 | 761 | # chunkstats for each file |
|
765 |
for path, stat in |
|
|
762 | for path, stat in chunkstats.items(): | |
|
766 | 763 | if stat[0]: |
|
767 | 764 | ui.write( |
|
768 | 765 | _(b'%s: %d of %d chunk(s) applied\n') |
@@ -845,7 +842,7 b' class fixupstate(object):' | |||
|
845 | 842 | repo = self.repo |
|
846 | 843 | needupdate = [ |
|
847 | 844 | (name, self.replacemap[hsh]) |
|
848 |
for name, hsh in |
|
|
845 | for name, hsh in repo._bookmarks.items() | |
|
849 | 846 | if hsh in self.replacemap |
|
850 | 847 | ] |
|
851 | 848 | changes = [] |
@@ -908,7 +905,7 b' class fixupstate(object):' | |||
|
908 | 905 | # ctx changes more files (not a subset of memworkingcopy) |
|
909 | 906 | if not set(ctx.files()).issubset(set(memworkingcopy)): |
|
910 | 907 | return False |
|
911 |
for path, content in |
|
|
908 | for path, content in memworkingcopy.items(): | |
|
912 | 909 | if path not in pctx or path not in ctx: |
|
913 | 910 | return False |
|
914 | 911 | fctx = ctx[path] |
@@ -951,7 +948,7 b' class fixupstate(object):' | |||
|
951 | 948 | def _cleanupoldcommits(self): |
|
952 | 949 | replacements = { |
|
953 | 950 | k: ([v] if v is not None else []) |
|
954 |
for k, v in |
|
|
951 | for k, v in self.replacemap.items() | |
|
955 | 952 | } |
|
956 | 953 | if replacements: |
|
957 | 954 | scmutil.cleanupnodes( |
@@ -1001,7 +998,7 b' def overlaydiffcontext(ctx, chunks):' | |||
|
1001 | 998 | if not path or not info: |
|
1002 | 999 | continue |
|
1003 | 1000 | patchmap[path].append(info) |
|
1004 |
for path, patches in p |
|
|
1001 | for path, patches in patchmap.items(): | |
|
1005 | 1002 | if path not in ctx or not patches: |
|
1006 | 1003 | continue |
|
1007 | 1004 | patches.sort(reverse=True) |
@@ -15,7 +15,6 b' from mercurial.i18n import _' | |||
|
15 | 15 | from mercurial import ( |
|
16 | 16 | demandimport, |
|
17 | 17 | error, |
|
18 | pycompat, | |
|
19 | 18 | util, |
|
20 | 19 | ) |
|
21 | 20 | from . import common |
@@ -209,7 +208,7 b' class bzr_source(common.converter_source' | |||
|
209 | 208 | if not branch.supports_tags(): |
|
210 | 209 | return {} |
|
211 | 210 | tagdict = branch.tags.get_tag_dict() |
|
212 |
for name, rev in |
|
|
211 | for name, rev in tagdict.items(): | |
|
213 | 212 | bytetags[self.recode(name)] = rev |
|
214 | 213 | return bytetags |
|
215 | 214 |
@@ -417,7 +417,7 b' class commandline(object):' | |||
|
417 | 417 | def _cmdline(self, cmd, *args, **kwargs): |
|
418 | 418 | kwargs = pycompat.byteskwargs(kwargs) |
|
419 | 419 | cmdline = [self.command, cmd] + list(args) |
|
420 |
for k, v in |
|
|
420 | for k, v in kwargs.items(): | |
|
421 | 421 | if len(k) == 1: |
|
422 | 422 | cmdline.append(b'-' + k) |
|
423 | 423 | else: |
@@ -584,9 +584,7 b' class converter(object):' | |||
|
584 | 584 | # write another hash correspondence to override the |
|
585 | 585 | # previous one so we don't end up with extra tag heads |
|
586 | 586 | tagsparents = [ |
|
587 | e | |
|
588 | for e in pycompat.iteritems(self.map) | |
|
589 | if e[1] == tagsparent | |
|
587 | e for e in self.map.items() if e[1] == tagsparent | |
|
590 | 588 | ] |
|
591 | 589 | if tagsparents: |
|
592 | 590 | self.map[tagsparents[0][0]] = nrev |
@@ -466,7 +466,7 b' def createlog(ui, directory=None, root=b' | |||
|
466 | 466 | |
|
467 | 467 | # find the branches starting from this revision |
|
468 | 468 | branchpoints = set() |
|
469 |
for branch, revision in |
|
|
469 | for branch, revision in branchmap.items(): | |
|
470 | 470 | revparts = tuple([int(i) for i in revision.split(b'.')]) |
|
471 | 471 | if len(revparts) < 2: # bad tags |
|
472 | 472 | continue |
@@ -125,7 +125,7 b' class filemapper(object):' | |||
|
125 | 125 | repo belong to the source repo and what parts don't.""" |
|
126 | 126 | if self.targetprefixes is None: |
|
127 | 127 | self.targetprefixes = set() |
|
128 |
for before, after in |
|
|
128 | for before, after in self.rename.items(): | |
|
129 | 129 | self.targetprefixes.add(after) |
|
130 | 130 | |
|
131 | 131 | # If "." is a target, then all target files are considered from the |
@@ -138,7 +138,7 b' class mercurial_sink(common.converter_si' | |||
|
138 | 138 | |
|
139 | 139 | if missings: |
|
140 | 140 | self.after() |
|
141 |
for pbranch, heads in sorted( |
|
|
141 | for pbranch, heads in sorted(missings.items()): | |
|
142 | 142 | pbranchpath = os.path.join(self.path, pbranch) |
|
143 | 143 | prepo = hg.peer(self.ui, {}, pbranchpath) |
|
144 | 144 | self.ui.note( |
@@ -595,7 +595,7 b' class mercurial_source(common.converter_' | |||
|
595 | 595 | maappend = ma.append |
|
596 | 596 | rappend = r.append |
|
597 | 597 | d = ctx1.manifest().diff(ctx2.manifest()) |
|
598 |
for f, ((node1, flag1), (node2, flag2)) in |
|
|
598 | for f, ((node1, flag1), (node2, flag2)) in d.items(): | |
|
599 | 599 | if node2 is None: |
|
600 | 600 | rappend(f) |
|
601 | 601 | else: |
@@ -621,7 +621,7 b' class mercurial_source(common.converter_' | |||
|
621 | 621 | cleanp2 = set() |
|
622 | 622 | if len(parents) == 2: |
|
623 | 623 | d = parents[1].manifest().diff(ctx.manifest(), clean=True) |
|
624 |
for f, value in |
|
|
624 | for f, value in d.items(): | |
|
625 | 625 | if value is None: |
|
626 | 626 | cleanp2.add(f) |
|
627 | 627 | changes = [(f, rev) for f in files if f not in self.ignored] |
@@ -102,7 +102,7 b' class monotone_source(common.converter_s' | |||
|
102 | 102 | # Prepare the command in automate stdio format |
|
103 | 103 | kwargs = pycompat.byteskwargs(kwargs) |
|
104 | 104 | command = [] |
|
105 |
for k, v in |
|
|
105 | for k, v in kwargs.items(): | |
|
106 | 106 | command.append(b"%d:%s" % (len(k), k)) |
|
107 | 107 | if v: |
|
108 | 108 | command.append(b"%d:%s" % (len(v), v)) |
@@ -202,7 +202,7 b' def get_log_child(' | |||
|
202 | 202 | def receiver(orig_paths, revnum, author, date, message, pool): |
|
203 | 203 | paths = {} |
|
204 | 204 | if orig_paths is not None: |
|
205 |
for k, v in |
|
|
205 | for k, v in orig_paths.items(): | |
|
206 | 206 | paths[k] = changedpath(v) |
|
207 | 207 | pickle.dump((paths, revnum, author, date, message), fp, protocol) |
|
208 | 208 | |
@@ -297,7 +297,7 b' class directlogstream(list):' | |||
|
297 | 297 | def receiver(orig_paths, revnum, author, date, message, pool): |
|
298 | 298 | paths = {} |
|
299 | 299 | if orig_paths is not None: |
|
300 |
for k, v in |
|
|
300 | for k, v in orig_paths.items(): | |
|
301 | 301 | paths[k] = changedpath(v) |
|
302 | 302 | self.append((paths, revnum, author, date, message)) |
|
303 | 303 | |
@@ -729,7 +729,7 b' class svn_source(converter_source):' | |||
|
729 | 729 | ) |
|
730 | 730 | files = [ |
|
731 | 731 | n |
|
732 |
for n, e in |
|
|
732 | for n, e in entries.items() | |
|
733 | 733 | if e.kind == svn.core.svn_node_file |
|
734 | 734 | ] |
|
735 | 735 | self.removed = set() |
@@ -819,7 +819,7 b' class svn_source(converter_source):' | |||
|
819 | 819 | origpaths = [] |
|
820 | 820 | copies = [ |
|
821 | 821 | (e.copyfrom_path, e.copyfrom_rev, p) |
|
822 |
for p, e in |
|
|
822 | for p, e in origpaths.items() | |
|
823 | 823 | if e.copyfrom_path |
|
824 | 824 | ] |
|
825 | 825 | # Apply moves/copies from more specific to general |
@@ -850,7 +850,7 b' class svn_source(converter_source):' | |||
|
850 | 850 | # be represented in mercurial. |
|
851 | 851 | addeds = { |
|
852 | 852 | p: e.copyfrom_path |
|
853 |
for p, e in |
|
|
853 | for p, e in origpaths.items() | |
|
854 | 854 | if e.action == b'A' and e.copyfrom_path |
|
855 | 855 | } |
|
856 | 856 | badroots = set() |
@@ -1139,7 +1139,7 b' class svn_source(converter_source):' | |||
|
1139 | 1139 | parents = [] |
|
1140 | 1140 | # check whether this revision is the start of a branch or part |
|
1141 | 1141 | # of a branch renaming |
|
1142 |
orig_paths = sorted( |
|
|
1142 | orig_paths = sorted(orig_paths.items()) | |
|
1143 | 1143 | root_paths = [ |
|
1144 | 1144 | (p, e) for p, e in orig_paths if self.module.startswith(p) |
|
1145 | 1145 | ] |
@@ -1301,7 +1301,7 b' class svn_source(converter_source):' | |||
|
1301 | 1301 | path += b'/' |
|
1302 | 1302 | return ( |
|
1303 | 1303 | (path + p) |
|
1304 |
for p, e in |
|
|
1304 | for p, e in entries.items() | |
|
1305 | 1305 | if e.kind == svn.core.svn_node_file |
|
1306 | 1306 | ) |
|
1307 | 1307 |
@@ -378,7 +378,7 b' def reposetup(ui, repo):' | |||
|
378 | 378 | |
|
379 | 379 | if not repo.local(): |
|
380 | 380 | return |
|
381 |
for name, fn in |
|
|
381 | for name, fn in filters.items(): | |
|
382 | 382 | repo.adddatafilter(name, fn) |
|
383 | 383 | |
|
384 | 384 | ui.setconfig(b'patch', b'eol', b'auto', b'eol') |
@@ -174,7 +174,7 b' class annotateopts(object):' | |||
|
174 | 174 | |
|
175 | 175 | def __init__(self, **opts): |
|
176 | 176 | opts = pycompat.byteskwargs(opts) |
|
177 |
for k, v in |
|
|
177 | for k, v in self.defaults.items(): | |
|
178 | 178 | setattr(self, k, opts.get(k, v)) |
|
179 | 179 | |
|
180 | 180 | @util.propertycache |
@@ -583,7 +583,7 b' class _annotatecontext(object):' | |||
|
583 | 583 | # find an unresolved line and its linelog rev to annotate |
|
584 | 584 | hsh = None |
|
585 | 585 | try: |
|
586 |
for (rev, _linenum), idxs in |
|
|
586 | for (rev, _linenum), idxs in key2idxs.items(): | |
|
587 | 587 | if revmap.rev2flag(rev) & revmapmod.sidebranchflag: |
|
588 | 588 | continue |
|
589 | 589 | hsh = annotateresult[idxs[0]][0] |
@@ -594,7 +594,7 b' class _annotatecontext(object):' | |||
|
594 | 594 | # the remaining key2idxs are not in main branch, resolving them |
|
595 | 595 | # using the hard way... |
|
596 | 596 | revlines = {} |
|
597 |
for (rev, linenum), idxs in |
|
|
597 | for (rev, linenum), idxs in key2idxs.items(): | |
|
598 | 598 | if rev not in revlines: |
|
599 | 599 | hsh = annotateresult[idxs[0]][0] |
|
600 | 600 | if self.ui.debugflag: |
@@ -14,7 +14,6 b' from mercurial import (' | |||
|
14 | 14 | error, |
|
15 | 15 | extensions, |
|
16 | 16 | hg, |
|
17 | pycompat, | |
|
18 | 17 | util, |
|
19 | 18 | wireprotov1peer, |
|
20 | 19 | wireprotov1server, |
@@ -189,7 +188,7 b' def clientfetch(repo, paths, lastnodemap' | |||
|
189 | 188 | for result in results: |
|
190 | 189 | r = result.result() |
|
191 | 190 | # TODO: pconvert these paths on the server? |
|
192 |
r = {util.pconvert(p): v for p, v in |
|
|
191 | r = {util.pconvert(p): v for p, v in r.items()} | |
|
193 | 192 | for path in sorted(r): |
|
194 | 193 | # ignore malicious paths |
|
195 | 194 | if not path.startswith(b'fastannotate/') or b'/../' in ( |
@@ -377,9 +377,7 b' def cleanup(repo, replacements, wdirwrit' | |||
|
377 | 377 | Useful as a hook point for extending "hg fix" with output summarizing the |
|
378 | 378 | effects of the command, though we choose not to output anything here. |
|
379 | 379 | """ |
|
380 | replacements = { | |
|
381 | prec: [succ] for prec, succ in pycompat.iteritems(replacements) | |
|
382 | } | |
|
380 | replacements = {prec: [succ] for prec, succ in replacements.items()} | |
|
383 | 381 | scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True) |
|
384 | 382 | |
|
385 | 383 | |
@@ -692,7 +690,7 b' def fixfile(ui, repo, opts, fixers, fixc' | |||
|
692 | 690 | """ |
|
693 | 691 | metadata = {} |
|
694 | 692 | newdata = fixctx[path].data() |
|
695 |
for fixername, fixer in |
|
|
693 | for fixername, fixer in fixers.items(): | |
|
696 | 694 | if fixer.affects(opts, fixctx, path): |
|
697 | 695 | ranges = lineranges( |
|
698 | 696 | opts, path, basepaths, basectxs, fixctx, newdata |
@@ -770,7 +768,7 b' def writeworkingdir(repo, ctx, filedata,' | |||
|
770 | 768 | |
|
771 | 769 | Directly updates the dirstate for the affected files. |
|
772 | 770 | """ |
|
773 |
for path, data in |
|
|
771 | for path, data in filedata.items(): | |
|
774 | 772 | fctx = ctx[path] |
|
775 | 773 | fctx.write(data, fctx.flags()) |
|
776 | 774 |
@@ -501,15 +501,11 b' def overridewalk(orig, self, match, subr' | |||
|
501 | 501 | visit.update(f for f in copymap if f not in results and matchfn(f)) |
|
502 | 502 | else: |
|
503 | 503 | if matchalways: |
|
504 | visit.update( | |
|
505 | f for f, st in pycompat.iteritems(dmap) if f not in results | |
|
506 | ) | |
|
504 | visit.update(f for f, st in dmap.items() if f not in results) | |
|
507 | 505 | visit.update(f for f in copymap if f not in results) |
|
508 | 506 | else: |
|
509 | 507 | visit.update( |
|
510 | f | |
|
511 | for f, st in pycompat.iteritems(dmap) | |
|
512 | if f not in results and matchfn(f) | |
|
508 | f for f, st in dmap.items() if f not in results and matchfn(f) | |
|
513 | 509 | ) |
|
514 | 510 | visit.update(f for f in copymap if f not in results and matchfn(f)) |
|
515 | 511 |
@@ -115,7 +115,7 b' def parseoptions(ui, cmdoptions, args):' | |||
|
115 | 115 | opts = dict( |
|
116 | 116 | [ |
|
117 | 117 | (k, convert(v)) if isinstance(v, bytes) else (k, v) |
|
118 |
for k, v in |
|
|
118 | for k, v in opts.items() | |
|
119 | 119 | ] |
|
120 | 120 | ) |
|
121 | 121 | |
@@ -131,7 +131,7 b' class Command(object):' | |||
|
131 | 131 | def __bytes__(self): |
|
132 | 132 | cmd = b"hg " + self.name |
|
133 | 133 | if self.opts: |
|
134 |
for k, values in sorted( |
|
|
134 | for k, values in sorted(self.opts.items()): | |
|
135 | 135 | for v in values: |
|
136 | 136 | if v: |
|
137 | 137 | if isinstance(v, int): |
@@ -376,9 +376,7 b' def view(ui, repo, *etc, **opts):' | |||
|
376 | 376 | """start interactive history viewer""" |
|
377 | 377 | opts = pycompat.byteskwargs(opts) |
|
378 | 378 | os.chdir(repo.root) |
|
379 | optstr = b' '.join( | |
|
380 | [b'--%s %s' % (k, v) for k, v in pycompat.iteritems(opts) if v] | |
|
381 | ) | |
|
379 | optstr = b' '.join([b'--%s %s' % (k, v) for k, v in opts.items() if v]) | |
|
382 | 380 | if repo.filtername is None: |
|
383 | 381 | optstr += b'--hidden' |
|
384 | 382 |
@@ -2101,7 +2101,7 b' def _finishhistedit(ui, repo, state, fm)' | |||
|
2101 | 2101 | |
|
2102 | 2102 | mapping, tmpnodes, created, ntm = processreplacement(state) |
|
2103 | 2103 | if mapping: |
|
2104 |
for prec, succs in |
|
|
2104 | for prec, succs in mapping.items(): | |
|
2105 | 2105 | if not succs: |
|
2106 | 2106 | ui.debug(b'histedit: %s is dropped\n' % short(prec)) |
|
2107 | 2107 | else: |
@@ -2139,7 +2139,7 b' def _finishhistedit(ui, repo, state, fm)' | |||
|
2139 | 2139 | nodechanges = fd( |
|
2140 | 2140 | { |
|
2141 | 2141 | hf(oldn): fl([hf(n) for n in newn], name=b'node') |
|
2142 |
for oldn, newn in |
|
|
2142 | for oldn, newn in mapping.items() | |
|
2143 | 2143 | }, |
|
2144 | 2144 | key=b"oldnode", |
|
2145 | 2145 | value=b"newnodes", |
@@ -2387,7 +2387,7 b' def ruleeditor(repo, ui, actions, editco' | |||
|
2387 | 2387 | tsum = summary[len(fword) + 1 :].lstrip() |
|
2388 | 2388 | # safe but slow: reverse iterate over the actions so we |
|
2389 | 2389 | # don't clash on two commits having the same summary |
|
2390 |
for na, l in reversed(list( |
|
|
2390 | for na, l in reversed(list(newact.items())): | |
|
2391 | 2391 | actx = repo[na.node] |
|
2392 | 2392 | asum = _getsummary(actx) |
|
2393 | 2393 | if asum == tsum: |
@@ -2400,7 +2400,7 b' def ruleeditor(repo, ui, actions, editco' | |||
|
2400 | 2400 | |
|
2401 | 2401 | # copy over and flatten the new list |
|
2402 | 2402 | actions = [] |
|
2403 |
for na, l in |
|
|
2403 | for na, l in newact.items(): | |
|
2404 | 2404 | actions.append(na) |
|
2405 | 2405 | actions += l |
|
2406 | 2406 |
@@ -419,7 +419,7 b' def localrepolistkeys(orig, self, namesp' | |||
|
419 | 419 | if pattern.endswith(b'*'): |
|
420 | 420 | pattern = b're:^' + pattern[:-1] + b'.*' |
|
421 | 421 | kind, pat, matcher = stringutil.stringmatcher(pattern) |
|
422 |
for bookmark, node in |
|
|
422 | for bookmark, node in bookmarks.items(): | |
|
423 | 423 | if matcher(bookmark): |
|
424 | 424 | results[bookmark] = node |
|
425 | 425 | return results |
@@ -542,7 +542,7 b' def _generateoutputparts(head, bundlerep' | |||
|
542 | 542 | if part.type == b'changegroup': |
|
543 | 543 | haschangegroup = True |
|
544 | 544 | newpart = bundle2.bundlepart(part.type, data=part.read()) |
|
545 |
for key, value in |
|
|
545 | for key, value in part.params.items(): | |
|
546 | 546 | newpart.addparam(key, value) |
|
547 | 547 | parts.append(newpart) |
|
548 | 548 | |
@@ -794,7 +794,7 b' def _saveremotebookmarks(repo, newbookma' | |||
|
794 | 794 | # saveremotenames expects 20 byte binary nodes for branches |
|
795 | 795 | branches[rname].append(bin(hexnode)) |
|
796 | 796 | |
|
797 |
for bookmark, hexnode in |
|
|
797 | for bookmark, hexnode in newbookmarks.items(): | |
|
798 | 798 | bookmarks[bookmark] = hexnode |
|
799 | 799 | remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks) |
|
800 | 800 | |
@@ -804,7 +804,7 b' def _savelocalbookmarks(repo, bookmarks)' | |||
|
804 | 804 | return |
|
805 | 805 | with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr: |
|
806 | 806 | changes = [] |
|
807 |
for scratchbook, node in |
|
|
807 | for scratchbook, node in bookmarks.items(): | |
|
808 | 808 | changectx = repo[node] |
|
809 | 809 | changes.append((scratchbook, changectx.node())) |
|
810 | 810 | repo._bookmarks.applychanges(repo, tr, changes) |
@@ -1045,7 +1045,7 b' def storetobundlestore(orig, repo, op, u' | |||
|
1045 | 1045 | bundle2._processpart(op, part) |
|
1046 | 1046 | else: |
|
1047 | 1047 | bundlepart = bundle2.bundlepart(part.type, data=part.read()) |
|
1048 |
for key, value in |
|
|
1048 | for key, value in part.params.items(): | |
|
1049 | 1049 | bundlepart.addparam(key, value) |
|
1050 | 1050 | |
|
1051 | 1051 | # Certain parts require a response |
@@ -1137,7 +1137,7 b' def processparts(orig, repo, op, unbundl' | |||
|
1137 | 1137 | # differs from previous behavior, we need to put it behind a |
|
1138 | 1138 | # config flag for incremental rollout. |
|
1139 | 1139 | bundlepart = bundle2.bundlepart(part.type, data=part.read()) |
|
1140 |
for key, value in |
|
|
1140 | for key, value in part.params.items(): | |
|
1141 | 1141 | bundlepart.addparam(key, value) |
|
1142 | 1142 | |
|
1143 | 1143 | # Certain parts require a response |
@@ -1323,9 +1323,7 b' def _maybeaddpushbackpart(op, bookmark, ' | |||
|
1323 | 1323 | b'new': newnode, |
|
1324 | 1324 | b'old': oldnode, |
|
1325 | 1325 | } |
|
1326 | op.reply.newpart( | |
|
1327 | b'pushkey', mandatoryparams=pycompat.iteritems(params) | |
|
1328 | ) | |
|
1326 | op.reply.newpart(b'pushkey', mandatoryparams=params.items()) | |
|
1329 | 1327 | |
|
1330 | 1328 | |
|
1331 | 1329 | def bundle2pushkey(orig, op, part): |
@@ -12,7 +12,6 b' from mercurial import (' | |||
|
12 | 12 | changegroup, |
|
13 | 13 | error, |
|
14 | 14 | extensions, |
|
15 | pycompat, | |
|
16 | 15 | revsetlang, |
|
17 | 16 | util, |
|
18 | 17 | ) |
@@ -67,7 +66,7 b' def getscratchbranchparts(repo, peer, ou' | |||
|
67 | 66 | parts.append( |
|
68 | 67 | bundle2.bundlepart( |
|
69 | 68 | scratchbranchparttype.upper(), |
|
70 |
advisoryparams=p |
|
|
69 | advisoryparams=params.items(), | |
|
71 | 70 | data=cg, |
|
72 | 71 | ) |
|
73 | 72 | ) |
@@ -13,8 +13,6 b' import time' | |||
|
13 | 13 | import warnings |
|
14 | 14 | import mysql.connector |
|
15 | 15 | |
|
16 | from mercurial import pycompat | |
|
17 | ||
|
18 | 16 | from . import indexapi |
|
19 | 17 | |
|
20 | 18 | |
@@ -179,7 +177,7 b' class sqlindexapi(indexapi.indexapi):' | |||
|
179 | 177 | self.sqlconnect() |
|
180 | 178 | args = [] |
|
181 | 179 | values = [] |
|
182 |
for bookmark, node in |
|
|
180 | for bookmark, node in bookmarks.items(): | |
|
183 | 181 | args.append(b'(%s, %s, %s)') |
|
184 | 182 | values.extend((bookmark, node, self.reponame)) |
|
185 | 183 | args = b','.join(args) |
@@ -128,7 +128,7 b' def recordbookmarks(orig, store, fp):' | |||
|
128 | 128 | repo = store._repo |
|
129 | 129 | if util.safehasattr(repo, 'journal'): |
|
130 | 130 | oldmarks = bookmarks.bmstore(repo) |
|
131 |
for mark, value in |
|
|
131 | for mark, value in store.items(): | |
|
132 | 132 | oldvalue = oldmarks.get(mark, repo.nullid) |
|
133 | 133 | if value != oldvalue: |
|
134 | 134 | repo.journal.record(bookmarktype, mark, oldvalue, value) |
@@ -513,7 +513,7 b' def demo(ui, repo, *args, **opts):' | |||
|
513 | 513 | kwmaps = _defaultkwmaps(ui) |
|
514 | 514 | if uikwmaps: |
|
515 | 515 | ui.status(_(b'\tdisabling current template maps\n')) |
|
516 |
for k, v in |
|
|
516 | for k, v in kwmaps.items(): | |
|
517 | 517 | ui.setconfig(b'keywordmaps', k, v, b'keyword') |
|
518 | 518 | else: |
|
519 | 519 | ui.status(_(b'\n\tconfiguration using current keyword template maps\n')) |
@@ -527,7 +527,7 b' def demo(ui, repo, *args, **opts):' | |||
|
527 | 527 | ui.writenoi18n(b'[extensions]\nkeyword =\n') |
|
528 | 528 | demoitems(b'keyword', ui.configitems(b'keyword')) |
|
529 | 529 | demoitems(b'keywordset', ui.configitems(b'keywordset')) |
|
530 |
demoitems(b'keywordmaps', |
|
|
530 | demoitems(b'keywordmaps', kwmaps.items()) | |
|
531 | 531 | keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n' |
|
532 | 532 | repo.wvfs.write(fn, keywords) |
|
533 | 533 | repo[None].add([fn]) |
@@ -713,7 +713,7 b' def copiespathcopies(orig, ctx1, ctx2, m' | |||
|
713 | 713 | copies = orig(ctx1, ctx2, match=match) |
|
714 | 714 | updated = {} |
|
715 | 715 | |
|
716 |
for k, v in |
|
|
716 | for k, v in copies.items(): | |
|
717 | 717 | updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v |
|
718 | 718 | |
|
719 | 719 | return updated |
@@ -62,7 +62,7 b' class gitlfspointer(dict):' | |||
|
62 | 62 | def validate(self): |
|
63 | 63 | """raise InvalidPointer on error. return self if there is no error""" |
|
64 | 64 | requiredcount = 0 |
|
65 |
for k, v in |
|
|
65 | for k, v in self.items(): | |
|
66 | 66 | if k in self._requiredre: |
|
67 | 67 | if not self._requiredre[k].match(v): |
|
68 | 68 | raise InvalidPointer( |
@@ -24,7 +24,6 b' from mercurial import (' | |||
|
24 | 24 | exchange, |
|
25 | 25 | exthelper, |
|
26 | 26 | localrepo, |
|
27 | pycompat, | |
|
28 | 27 | revlog, |
|
29 | 28 | scmutil, |
|
30 | 29 | util, |
@@ -142,7 +141,7 b' def writetostore(self, text):' | |||
|
142 | 141 | |
|
143 | 142 | # translate hg filelog metadata to lfs metadata with "x-hg-" prefix |
|
144 | 143 | if hgmeta is not None: |
|
145 |
for k, v in |
|
|
144 | for k, v in hgmeta.items(): | |
|
146 | 145 | metadata[b'x-hg-%s' % k] = v |
|
147 | 146 | |
|
148 | 147 | rawtext = metadata.serialize() |
@@ -2024,7 +2024,7 b' class queue(object):' | |||
|
2024 | 2024 | # we can't copy a file created by the patch itself |
|
2025 | 2025 | if dst in copies: |
|
2026 | 2026 | del copies[dst] |
|
2027 |
for src, dsts in |
|
|
2027 | for src, dsts in copies.items(): | |
|
2028 | 2028 | for dst in dsts: |
|
2029 | 2029 | repo.dirstate.copy(src, dst) |
|
2030 | 2030 | else: |
@@ -4287,7 +4287,7 b' def extsetup(ui):' | |||
|
4287 | 4287 | entry[1].extend(mqopt) |
|
4288 | 4288 | |
|
4289 | 4289 | def dotable(cmdtable): |
|
4290 |
for cmd, entry in |
|
|
4290 | for cmd, entry in cmdtable.items(): | |
|
4291 | 4291 | cmd = cmdutil.parsealiases(cmd)[0] |
|
4292 | 4292 | func = entry[0] |
|
4293 | 4293 | if func.norepo: |
@@ -2279,7 +2279,7 b' def phabupdate(ui, repo, *specs, **opts)' | |||
|
2279 | 2279 | drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs])) |
|
2280 | 2280 | specs = [] |
|
2281 | 2281 | unknown = [] |
|
2282 |
for r, d in |
|
|
2282 | for r, d in drevmap.items(): | |
|
2283 | 2283 | if d is None: |
|
2284 | 2284 | unknown.append(repo[r]) |
|
2285 | 2285 | else: |
@@ -2364,7 +2364,7 b' def phabstatusshowview(ui, repo, display' | |||
|
2364 | 2364 | revs = repo.revs('sort(_underway(), topo)') |
|
2365 | 2365 | drevmap = getdrevmap(repo, revs) |
|
2366 | 2366 | unknownrevs, drevids, revsbydrevid = [], set(), {} |
|
2367 |
for rev, drevid in |
|
|
2367 | for rev, drevid in drevmap.items(): | |
|
2368 | 2368 | if drevid is not None: |
|
2369 | 2369 | drevids.add(drevid) |
|
2370 | 2370 | revsbydrevid.setdefault(drevid, set()).add(rev) |
@@ -243,7 +243,7 b' class rebaseruntime(object):' | |||
|
243 | 243 | f.write(b'%d\n' % int(self.keepbranchesf)) |
|
244 | 244 | f.write(b'%s\n' % (self.activebookmark or b'')) |
|
245 | 245 | destmap = self.destmap |
|
246 |
for d, v in |
|
|
246 | for d, v in self.state.items(): | |
|
247 | 247 | oldrev = repo[d].hex() |
|
248 | 248 | if v >= 0: |
|
249 | 249 | newrev = repo[v].hex() |
@@ -505,7 +505,7 b' class rebaseruntime(object):' | |||
|
505 | 505 | # commits. |
|
506 | 506 | self.storestatus(tr) |
|
507 | 507 | |
|
508 |
cands = [k for k, v in |
|
|
508 | cands = [k for k, v in self.state.items() if v == revtodo] | |
|
509 | 509 | p = repo.ui.makeprogress( |
|
510 | 510 | _(b"rebasing"), unit=_(b'changesets'), total=len(cands) |
|
511 | 511 | ) |
@@ -1336,7 +1336,7 b' def _definedestmap(ui, repo, inmemory, d' | |||
|
1336 | 1336 | # emulate the old behavior, showing "nothing to rebase" (a better |
|
1337 | 1337 | # behavior may be abort with "cannot find branching point" error) |
|
1338 | 1338 | bpbase.clear() |
|
1339 |
for bp, bs in |
|
|
1339 | for bp, bs in bpbase.items(): # calculate roots | |
|
1340 | 1340 | roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs)) |
|
1341 | 1341 | |
|
1342 | 1342 | rebaseset = repo.revs(b'%ld::', roots) |
@@ -2103,7 +2103,7 b' def clearrebased(' | |||
|
2103 | 2103 | fl = fm.formatlist |
|
2104 | 2104 | fd = fm.formatdict |
|
2105 | 2105 | changes = {} |
|
2106 |
for oldns, newn in |
|
|
2106 | for oldns, newn in replacements.items(): | |
|
2107 | 2107 | for oldn in oldns: |
|
2108 | 2108 | changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node') |
|
2109 | 2109 | nodechanges = fd(changes, key=b"oldnode", value=b"newnodes") |
@@ -177,7 +177,7 b' class releasenotessections(object):' | |||
|
177 | 177 | custom_sections = getcustomadmonitions(repo) |
|
178 | 178 | if custom_sections: |
|
179 | 179 | sections.update(custom_sections) |
|
180 |
self._sections = list( |
|
|
180 | self._sections = list(sections.items()) | |
|
181 | 181 | else: |
|
182 | 182 | self._sections = list(DEFAULT_SECTIONS) |
|
183 | 183 |
@@ -146,7 +146,7 b' class basestore(object):' | |||
|
146 | 146 | |
|
147 | 147 | filenamemap = self._resolvefilenames(existing.keys()) |
|
148 | 148 | |
|
149 |
for filename, sha in |
|
|
149 | for filename, sha in filenamemap.items(): | |
|
150 | 150 | yield (filename, existing[sha]) |
|
151 | 151 | |
|
152 | 152 | def _resolvefilenames(self, hashes): |
@@ -453,7 +453,7 b' class mutabledatapack(basepack.mutableba' | |||
|
453 | 453 | |
|
454 | 454 | def createindex(self, nodelocations, indexoffset): |
|
455 | 455 | entries = sorted( |
|
456 |
(n, db, o, s) for n, (db, o, s) in |
|
|
456 | (n, db, o, s) for n, (db, o, s) in self.entries.items() | |
|
457 | 457 | ) |
|
458 | 458 | |
|
459 | 459 | rawindex = b'' |
@@ -519,7 +519,7 b' class mutablehistorypack(basepack.mutabl' | |||
|
519 | 519 | |
|
520 | 520 | files = ( |
|
521 | 521 | (hashutil.sha1(filename).digest(), filename, offset, size) |
|
522 |
for filename, (offset, size) in |
|
|
522 | for filename, (offset, size) in self.files.items() | |
|
523 | 523 | ) |
|
524 | 524 | files = sorted(files) |
|
525 | 525 | |
@@ -555,7 +555,7 b' class mutablehistorypack(basepack.mutabl' | |||
|
555 | 555 | ) |
|
556 | 556 | nodeindexoffset += constants.FILENAMESIZE + len(filename) |
|
557 | 557 | |
|
558 |
for node, location in sorted( |
|
|
558 | for node, location in sorted(nodelocations.items()): | |
|
559 | 559 | nodeindexentries.append( |
|
560 | 560 | struct.pack(nodeindexformat, node, location) |
|
561 | 561 | ) |
@@ -15,7 +15,6 b' from mercurial import (' | |||
|
15 | 15 | ancestor, |
|
16 | 16 | error, |
|
17 | 17 | mdiff, |
|
18 | pycompat, | |
|
19 | 18 | revlog, |
|
20 | 19 | ) |
|
21 | 20 | from mercurial.utils import storageutil |
@@ -423,7 +422,7 b' class remotefilelog(object):' | |||
|
423 | 422 | return self.repo.nullid |
|
424 | 423 | |
|
425 | 424 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
426 |
nodemap = {v: k for (k, v) in |
|
|
425 | nodemap = {v: k for (k, v) in revmap.items()} | |
|
427 | 426 | |
|
428 | 427 | ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b]) |
|
429 | 428 | if ancs: |
@@ -438,7 +437,7 b' class remotefilelog(object):' | |||
|
438 | 437 | return self.repo.nullid |
|
439 | 438 | |
|
440 | 439 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
441 |
nodemap = {v: k for (k, v) in |
|
|
440 | nodemap = {v: k for (k, v) in revmap.items()} | |
|
442 | 441 | |
|
443 | 442 | ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b]) |
|
444 | 443 | return map(nodemap.__getitem__, ancs) |
@@ -454,7 +453,7 b' class remotefilelog(object):' | |||
|
454 | 453 | parentsmap = collections.defaultdict(list) |
|
455 | 454 | allparents = set() |
|
456 | 455 | for mapping in (amap, bmap): |
|
457 |
for node, pdata in |
|
|
456 | for node, pdata in mapping.items(): | |
|
458 | 457 | parents = parentsmap[node] |
|
459 | 458 | p1, p2, linknode, copyfrom = pdata |
|
460 | 459 | # Don't follow renames (copyfrom). |
@@ -21,7 +21,6 b' from mercurial import (' | |||
|
21 | 21 | error, |
|
22 | 22 | extensions, |
|
23 | 23 | match, |
|
24 | pycompat, | |
|
25 | 24 | scmutil, |
|
26 | 25 | store, |
|
27 | 26 | streamclone, |
@@ -416,7 +415,7 b' def gcserver(ui, repo):' | |||
|
416 | 415 | cachepath = repo.vfs.join(b"remotefilelogcache") |
|
417 | 416 | for head in heads: |
|
418 | 417 | mf = repo[head].manifest() |
|
419 |
for filename, filenode in |
|
|
418 | for filename, filenode in mf.items(): | |
|
420 | 419 | filecachepath = os.path.join(cachepath, filename, hex(filenode)) |
|
421 | 420 | neededfiles.add(filecachepath) |
|
422 | 421 |
@@ -487,12 +487,12 b' def keepset(repo, keyfn, lastkeepkeys=No' | |||
|
487 | 487 | if type(m) is dict: |
|
488 | 488 | # m is a result of diff of two manifests and is a dictionary that |
|
489 | 489 | # maps filename to ((newnode, newflag), (oldnode, oldflag)) tuple |
|
490 |
for filename, diff in |
|
|
490 | for filename, diff in m.items(): | |
|
491 | 491 | if diff[0][0] is not None: |
|
492 | 492 | keepkeys.add(keyfn(filename, diff[0][0])) |
|
493 | 493 | else: |
|
494 | 494 | # m is a manifest object |
|
495 |
for filename, filenode in |
|
|
495 | for filename, filenode in m.items(): | |
|
496 | 496 | keepkeys.add(keyfn(filename, filenode)) |
|
497 | 497 | |
|
498 | 498 | return keepkeys |
@@ -602,7 +602,7 b' class repacker(object):' | |||
|
602 | 602 | repackprogress = ui.makeprogress( |
|
603 | 603 | _(b"repacking data"), unit=self.unit, total=len(byfile) |
|
604 | 604 | ) |
|
605 |
for filename, entries in sorted( |
|
|
605 | for filename, entries in sorted(byfile.items()): | |
|
606 | 606 | repackprogress.update(count) |
|
607 | 607 | |
|
608 | 608 | ancestors = {} |
@@ -756,7 +756,7 b' class repacker(object):' | |||
|
756 | 756 | progress = ui.makeprogress( |
|
757 | 757 | _(b"repacking history"), unit=self.unit, total=len(byfile) |
|
758 | 758 | ) |
|
759 |
for filename, entries in sorted( |
|
|
759 | for filename, entries in sorted(byfile.items()): | |
|
760 | 760 | ancestors = {} |
|
761 | 761 | nodes = list(node for node in entries) |
|
762 | 762 |
@@ -14,7 +14,6 b' from mercurial import (' | |||
|
14 | 14 | error, |
|
15 | 15 | localrepo, |
|
16 | 16 | match, |
|
17 | pycompat, | |
|
18 | 17 | scmutil, |
|
19 | 18 | sparse, |
|
20 | 19 | util, |
@@ -268,7 +267,7 b' def wraprepo(repo):' | |||
|
268 | 267 | mfrevlog = mfl.getstorage(b'') |
|
269 | 268 | if base is not None: |
|
270 | 269 | mfdict = mfl[repo[base].manifestnode()].read() |
|
271 |
skip = set( |
|
|
270 | skip = set(mfdict.items()) | |
|
272 | 271 | else: |
|
273 | 272 | skip = set() |
|
274 | 273 | |
@@ -298,7 +297,7 b' def wraprepo(repo):' | |||
|
298 | 297 | else: |
|
299 | 298 | mfdict = mfl[mfnode].read() |
|
300 | 299 | |
|
301 |
diff = |
|
|
300 | diff = mfdict.items() | |
|
302 | 301 | if pats: |
|
303 | 302 | diff = (pf for pf in diff if m(pf[0])) |
|
304 | 303 | if sparsematch: |
@@ -102,7 +102,7 b' def sumdicts(*dicts):' | |||
|
102 | 102 | """ |
|
103 | 103 | result = collections.defaultdict(lambda: 0) |
|
104 | 104 | for dict in dicts: |
|
105 |
for k, v in |
|
|
105 | for k, v in dict.items(): | |
|
106 | 106 | result[k] += v |
|
107 | 107 | return result |
|
108 | 108 | |
@@ -110,7 +110,7 b' def sumdicts(*dicts):' | |||
|
110 | 110 | def prefixkeys(dict, prefix): |
|
111 | 111 | """Returns ``dict`` with ``prefix`` prepended to all its keys.""" |
|
112 | 112 | result = {} |
|
113 |
for k, v in |
|
|
113 | for k, v in dict.items(): | |
|
114 | 114 | result[prefix + k] = v |
|
115 | 115 | return result |
|
116 | 116 | |
@@ -208,7 +208,7 b' def parsepackmeta(metabuf):' | |||
|
208 | 208 | integers. |
|
209 | 209 | """ |
|
210 | 210 | metadict = _parsepackmeta(metabuf) |
|
211 |
for k, v in |
|
|
211 | for k, v in metadict.items(): | |
|
212 | 212 | if k in _metaitemtypes and int in _metaitemtypes[k]: |
|
213 | 213 | metadict[k] = bin2int(v) |
|
214 | 214 | return metadict |
@@ -170,7 +170,7 b' class lazyremotenamedict(mutablemapping)' | |||
|
170 | 170 | if not self.loaded: |
|
171 | 171 | self._load() |
|
172 | 172 | |
|
173 |
for k, vtup in |
|
|
173 | for k, vtup in self.potentialentries.items(): | |
|
174 | 174 | yield (k, [bin(vtup[0])]) |
|
175 | 175 | |
|
176 | 176 | items = iteritems |
@@ -207,7 +207,7 b' class remotenames(object):' | |||
|
207 | 207 | if not self._nodetobmarks: |
|
208 | 208 | bmarktonodes = self.bmarktonodes() |
|
209 | 209 | self._nodetobmarks = {} |
|
210 |
for name, node in |
|
|
210 | for name, node in bmarktonodes.items(): | |
|
211 | 211 | self._nodetobmarks.setdefault(node[0], []).append(name) |
|
212 | 212 | return self._nodetobmarks |
|
213 | 213 | |
@@ -218,7 +218,7 b' class remotenames(object):' | |||
|
218 | 218 | if not self._nodetobranch: |
|
219 | 219 | branchtonodes = self.branchtonodes() |
|
220 | 220 | self._nodetobranch = {} |
|
221 |
for name, nodes in |
|
|
221 | for name, nodes in branchtonodes.items(): | |
|
222 | 222 | for node in nodes: |
|
223 | 223 | self._nodetobranch.setdefault(node, []).append(name) |
|
224 | 224 | return self._nodetobranch |
@@ -228,7 +228,7 b' class remotenames(object):' | |||
|
228 | 228 | marktonodes = self.bmarktonodes() |
|
229 | 229 | self._hoisttonodes = {} |
|
230 | 230 | hoist += b'/' |
|
231 |
for name, node in |
|
|
231 | for name, node in marktonodes.items(): | |
|
232 | 232 | if name.startswith(hoist): |
|
233 | 233 | name = name[len(hoist) :] |
|
234 | 234 | self._hoisttonodes[name] = node |
@@ -239,7 +239,7 b' class remotenames(object):' | |||
|
239 | 239 | marktonodes = self.bmarktonodes() |
|
240 | 240 | self._nodetohoists = {} |
|
241 | 241 | hoist += b'/' |
|
242 |
for name, node in |
|
|
242 | for name, node in marktonodes.items(): | |
|
243 | 243 | if name.startswith(hoist): |
|
244 | 244 | name = name[len(hoist) :] |
|
245 | 245 | self._nodetohoists.setdefault(node[0], []).append(name) |
@@ -80,9 +80,7 b' def _commitfiltered(' | |||
|
80 | 80 | files = initialfiles - exclude |
|
81 | 81 | # Filter copies |
|
82 | 82 | copied = copiesmod.pathcopies(base, ctx) |
|
83 | copied = { | |
|
84 | dst: src for dst, src in pycompat.iteritems(copied) if dst in files | |
|
85 | } | |
|
83 | copied = {dst: src for dst, src in copied.items() if dst in files} | |
|
86 | 84 | |
|
87 | 85 | def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): |
|
88 | 86 | if path not in contentctx: |
@@ -212,7 +212,7 b' def forbidcr(ui, repo, hooktype, node, *' | |||
|
212 | 212 | def reposetup(ui, repo): |
|
213 | 213 | if not repo.local(): |
|
214 | 214 | return |
|
215 |
for name, fn in |
|
|
215 | for name, fn in _filters.items(): | |
|
216 | 216 | repo.adddatafilter(name, fn) |
|
217 | 217 | |
|
218 | 218 |
@@ -75,7 +75,7 b' exts = {' | |||
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | def guesskind(dest): |
|
78 |
for kind, extensions in |
|
|
78 | for kind, extensions in exts.items(): | |
|
79 | 79 | if any(dest.endswith(ext) for ext in extensions): |
|
80 | 80 | return kind |
|
81 | 81 | return None |
@@ -137,7 +137,7 b' class bmstore(object):' | |||
|
137 | 137 | return iter(self._refmap) |
|
138 | 138 | |
|
139 | 139 | def iteritems(self): |
|
140 |
return |
|
|
140 | return self._refmap.items() | |
|
141 | 141 | |
|
142 | 142 | def items(self): |
|
143 | 143 | return self._refmap.items() |
@@ -250,7 +250,7 b' class bmstore(object):' | |||
|
250 | 250 | self._aclean = True |
|
251 | 251 | |
|
252 | 252 | def _write(self, fp): |
|
253 |
for name, node in sorted( |
|
|
253 | for name, node in sorted(self._refmap.items()): | |
|
254 | 254 | fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name))) |
|
255 | 255 | self._clean = True |
|
256 | 256 | self._repo.invalidatevolatilesets() |
@@ -418,7 +418,7 b' def headsforactive(repo):' | |||
|
418 | 418 | ) |
|
419 | 419 | name = repo._activebookmark.split(b'@', 1)[0] |
|
420 | 420 | heads = [] |
|
421 |
for mark, n in |
|
|
421 | for mark, n in repo._bookmarks.items(): | |
|
422 | 422 | if mark.split(b'@', 1)[0] == name: |
|
423 | 423 | heads.append(n) |
|
424 | 424 | return heads |
@@ -476,7 +476,7 b' def listbinbookmarks(repo):' | |||
|
476 | 476 | marks = getattr(repo, '_bookmarks', {}) |
|
477 | 477 | |
|
478 | 478 | hasnode = repo.changelog.hasnode |
|
479 |
for k, v in |
|
|
479 | for k, v in marks.items(): | |
|
480 | 480 | # don't expose local divergent bookmarks |
|
481 | 481 | if hasnode(v) and not isdivergent(k): |
|
482 | 482 | yield k, v |
@@ -687,7 +687,7 b' def mirroring_remote(ui, repo, remotemar' | |||
|
687 | 687 | remotemarks""" |
|
688 | 688 | changed = [] |
|
689 | 689 | localmarks = repo._bookmarks |
|
690 |
for (b, id) in |
|
|
690 | for (b, id) in remotemarks.items(): | |
|
691 | 691 | if id != localmarks.get(b, None) and id in repo: |
|
692 | 692 | changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b)) |
|
693 | 693 | for b in localmarks: |
@@ -1074,7 +1074,7 b' def _printbookmarks(ui, repo, fm, bmarks' | |||
|
1074 | 1074 | hexfn = fm.hexfunc |
|
1075 | 1075 | if len(bmarks) == 0 and fm.isplain(): |
|
1076 | 1076 | ui.status(_(b"no bookmarks set\n")) |
|
1077 |
for bmark, (n, prefix, label) in sorted( |
|
|
1077 | for bmark, (n, prefix, label) in sorted(bmarks.items()): | |
|
1078 | 1078 | fm.startitem() |
|
1079 | 1079 | fm.context(repo=repo) |
|
1080 | 1080 | if not ui.quiet: |
@@ -270,7 +270,7 b' class branchcache(object):' | |||
|
270 | 270 | return key in self._entries |
|
271 | 271 | |
|
272 | 272 | def iteritems(self): |
|
273 |
for k, v in |
|
|
273 | for k, v in self._entries.items(): | |
|
274 | 274 | self._verifybranch(k) |
|
275 | 275 | yield k, v |
|
276 | 276 | |
@@ -400,7 +400,7 b' class branchcache(object):' | |||
|
400 | 400 | return heads |
|
401 | 401 | |
|
402 | 402 | def iterbranches(self): |
|
403 |
for bn, heads in |
|
|
403 | for bn, heads in self.items(): | |
|
404 | 404 | yield (bn, heads) + self._branchtip(heads) |
|
405 | 405 | |
|
406 | 406 | def iterheads(self): |
@@ -434,7 +434,7 b' class branchcache(object):' | |||
|
434 | 434 | cachekey.append(hex(self.filteredhash)) |
|
435 | 435 | f.write(b" ".join(cachekey) + b'\n') |
|
436 | 436 | nodecount = 0 |
|
437 |
for label, nodes in sorted( |
|
|
437 | for label, nodes in sorted(self._entries.items()): | |
|
438 | 438 | label = encoding.fromlocal(label) |
|
439 | 439 | for node in nodes: |
|
440 | 440 | nodecount += 1 |
@@ -490,7 +490,7 b' class branchcache(object):' | |||
|
490 | 490 | # Faster than using ctx.obsolete() |
|
491 | 491 | obsrevs = obsolete.getrevs(repo, b'obsolete') |
|
492 | 492 | |
|
493 |
for branch, newheadrevs in |
|
|
493 | for branch, newheadrevs in newbranches.items(): | |
|
494 | 494 | # For every branch, compute the new branchheads. |
|
495 | 495 | # A branchhead is a revision such that no descendant is on |
|
496 | 496 | # the same branch. |
@@ -2239,7 +2239,7 b' def handlecheckphases(op, inpart):' | |||
|
2239 | 2239 | b'remote repository changed while pushing - please try again ' |
|
2240 | 2240 | b'(%s is %s expected %s)' |
|
2241 | 2241 | ) |
|
2242 |
for expectedphase, nodes in p |
|
|
2242 | for expectedphase, nodes in phasetonodes.items(): | |
|
2243 | 2243 | for n in nodes: |
|
2244 | 2244 | actualphase = phasecache.phase(unfi, cl.rev(n)) |
|
2245 | 2245 | if actualphase != expectedphase: |
@@ -424,7 +424,7 b' class cg1unpacker(object):' | |||
|
424 | 424 | mfnode = cl.changelogrevision(cset).manifest |
|
425 | 425 | mfest = ml[mfnode].readdelta() |
|
426 | 426 | # store file nodes we must see |
|
427 |
for f, n in |
|
|
427 | for f, n in mfest.items(): | |
|
428 | 428 | needfiles.setdefault(f, set()).add(n) |
|
429 | 429 | |
|
430 | 430 | on_filelog_rev = None |
@@ -1966,7 +1966,7 b' def _addchangegroupfiles(' | |||
|
1966 | 1966 | del needfiles[f] |
|
1967 | 1967 | progress.complete() |
|
1968 | 1968 | |
|
1969 |
for f, needs in |
|
|
1969 | for f, needs in needfiles.items(): | |
|
1970 | 1970 | fl = repo.file(f) |
|
1971 | 1971 | for n in needs: |
|
1972 | 1972 | try: |
@@ -134,7 +134,7 b' def _confighash(ui):' | |||
|
134 | 134 | ignored = set() |
|
135 | 135 | envitems = [ |
|
136 | 136 | (k, v) |
|
137 |
for k, v in |
|
|
137 | for k, v in encoding.environ.items() | |
|
138 | 138 | if _envre.match(k) and k not in ignored |
|
139 | 139 | ] |
|
140 | 140 | envhash = _hashlist(sorted(envitems)) |
@@ -320,7 +320,7 b' class channeledsystem(object):' | |||
|
320 | 320 | |
|
321 | 321 | def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None): |
|
322 | 322 | args = [type, cmd, util.abspath(cwd or b'.')] |
|
323 |
args.extend(b'%s=%s' % (k, v) for k, v in |
|
|
323 | args.extend(b'%s=%s' % (k, v) for k, v in environ.items()) | |
|
324 | 324 | data = b'\0'.join(args) |
|
325 | 325 | self.out.write(struct.pack(b'>cI', self.channel, len(data))) |
|
326 | 326 | self.out.write(data) |
@@ -626,7 +626,7 b' def dorecord(' | |||
|
626 | 626 | # 5. finally restore backed-up files |
|
627 | 627 | try: |
|
628 | 628 | dirstate = repo.dirstate |
|
629 |
for realname, tmpname in |
|
|
629 | for realname, tmpname in backups.items(): | |
|
630 | 630 | ui.debug(b'restoring %r to %r\n' % (tmpname, realname)) |
|
631 | 631 | |
|
632 | 632 | if dirstate.get_entry(realname).maybe_clean: |
@@ -2469,7 +2469,7 b' def copy(ui, repo, *pats, **opts):' | |||
|
2469 | 2469 | ) |
|
2470 | 2470 | def debugcommands(ui, cmd=b'', *args): |
|
2471 | 2471 | """list all available commands and options""" |
|
2472 |
for cmd, vals in sorted( |
|
|
2472 | for cmd, vals in sorted(table.items()): | |
|
2473 | 2473 | cmd = cmd.split(b'|')[0] |
|
2474 | 2474 | opts = b', '.join([i[1] for i in vals[1]]) |
|
2475 | 2475 | ui.write(b'%s: %s\n' % (cmd, opts)) |
@@ -7089,7 +7089,7 b' def summary(ui, repo, **opts):' | |||
|
7089 | 7089 | |
|
7090 | 7090 | c = repo.dirstate.copies() |
|
7091 | 7091 | copied, renamed = [], [] |
|
7092 |
for d, s in |
|
|
7092 | for d, s in c.items(): | |
|
7093 | 7093 | if s in status.removed: |
|
7094 | 7094 | status.removed.remove(s) |
|
7095 | 7095 | renamed.append(d) |
@@ -123,7 +123,7 b' class basectx(object):' | |||
|
123 | 123 | deleted, unknown, ignored = s.deleted, s.unknown, s.ignored |
|
124 | 124 | deletedset = set(deleted) |
|
125 | 125 | d = mf1.diff(mf2, match=match, clean=listclean) |
|
126 |
for fn, value in |
|
|
126 | for fn, value in d.items(): | |
|
127 | 127 | if fn in deletedset: |
|
128 | 128 | continue |
|
129 | 129 | if value is None: |
@@ -17,7 +17,6 b' from . import (' | |||
|
17 | 17 | match as matchmod, |
|
18 | 18 | pathutil, |
|
19 | 19 | policy, |
|
20 | pycompat, | |
|
21 | 20 | util, |
|
22 | 21 | ) |
|
23 | 22 | |
@@ -68,7 +67,7 b' def _filter(src, dst, t):' | |||
|
68 | 67 | def _chain(prefix, suffix): |
|
69 | 68 | """chain two sets of copies 'prefix' and 'suffix'""" |
|
70 | 69 | result = prefix.copy() |
|
71 |
for key, value in |
|
|
70 | for key, value in suffix.items(): | |
|
72 | 71 | result[key] = prefix.get(value, value) |
|
73 | 72 | return result |
|
74 | 73 | |
@@ -408,7 +407,7 b' def _combine_changeset_copies(' | |||
|
408 | 407 | |
|
409 | 408 | if childcopies: |
|
410 | 409 | newcopies = copies.copy() |
|
411 |
for dest, source in |
|
|
410 | for dest, source in childcopies.items(): | |
|
412 | 411 | prev = copies.get(source) |
|
413 | 412 | if prev is not None and prev[1] is not None: |
|
414 | 413 | source = prev[1] |
@@ -623,7 +622,7 b' def _combine_changeset_copies_extra(' | |||
|
623 | 622 | newcopies = copies |
|
624 | 623 | if childcopies: |
|
625 | 624 | newcopies = copies.copy() |
|
626 |
for dest, source in |
|
|
625 | for dest, source in childcopies.items(): | |
|
627 | 626 | prev = copies.get(source) |
|
628 | 627 | if prev is not None and prev[1] is not None: |
|
629 | 628 | source = prev[1] |
@@ -721,7 +720,7 b' def _reverse_renames(copies, dst, match)' | |||
|
721 | 720 | # can still exist (e.g. hg cp a b; hg mv a c). In those cases we |
|
722 | 721 | # arbitrarily pick one of the renames. |
|
723 | 722 | r = {} |
|
724 |
for k, v in sorted( |
|
|
723 | for k, v in sorted(copies.items()): | |
|
725 | 724 | if match and not match(v): |
|
726 | 725 | continue |
|
727 | 726 | # remove copies |
@@ -1080,7 +1079,7 b' def _dir_renames(repo, ctx, copy, fullco' | |||
|
1080 | 1079 | |
|
1081 | 1080 | # examine each file copy for a potential directory move, which is |
|
1082 | 1081 | # when all the files in a directory are moved to a new directory |
|
1083 |
for dst, src in |
|
|
1082 | for dst, src in fullcopy.items(): | |
|
1084 | 1083 | dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst) |
|
1085 | 1084 | if dsrc in invalid: |
|
1086 | 1085 | # already seen to be uninteresting |
@@ -1103,7 +1102,7 b' def _dir_renames(repo, ctx, copy, fullco' | |||
|
1103 | 1102 | if not dirmove: |
|
1104 | 1103 | return {}, {} |
|
1105 | 1104 | |
|
1106 |
dirmove = {k + b"/": v + b"/" for k, v in |
|
|
1105 | dirmove = {k + b"/": v + b"/" for k, v in dirmove.items()} | |
|
1107 | 1106 | |
|
1108 | 1107 | for d in dirmove: |
|
1109 | 1108 | repo.ui.debug( |
@@ -1186,7 +1185,7 b' def _heuristicscopytracing(repo, c1, c2,' | |||
|
1186 | 1185 | |
|
1187 | 1186 | copies2 = {} |
|
1188 | 1187 | cp = _forwardcopies(base, c2) |
|
1189 |
for dst, src in |
|
|
1188 | for dst, src in cp.items(): | |
|
1190 | 1189 | if src in m1: |
|
1191 | 1190 | copies2[dst] = src |
|
1192 | 1191 | |
@@ -1304,5 +1303,5 b' def graftcopies(wctx, ctx, base):' | |||
|
1304 | 1303 | for dest, __ in list(new_copies.items()): |
|
1305 | 1304 | if dest in parent: |
|
1306 | 1305 | del new_copies[dest] |
|
1307 |
for dst, src in |
|
|
1306 | for dst, src in new_copies.items(): | |
|
1308 | 1307 | wctx[dst].markcopied(src) |
@@ -492,7 +492,7 b' def debugcapabilities(ui, path, **opts):' | |||
|
492 | 492 | b2caps = bundle2.bundle2caps(peer) |
|
493 | 493 | if b2caps: |
|
494 | 494 | ui.writenoi18n(b'Bundle2 capabilities:\n') |
|
495 |
for key, values in sorted( |
|
|
495 | for key, values in sorted(b2caps.items()): | |
|
496 | 496 | ui.write(b' %s\n' % key) |
|
497 | 497 | for v in values: |
|
498 | 498 | ui.write(b' %s\n' % v) |
@@ -2388,7 +2388,7 b' def debugmergestate(ui, repo, *args, **o' | |||
|
2388 | 2388 | if f in ms: |
|
2389 | 2389 | # If file is in mergestate, we have already processed it's extras |
|
2390 | 2390 | continue |
|
2391 |
for k, v in |
|
|
2391 | for k, v in d.items(): | |
|
2392 | 2392 | fm_extras.startitem() |
|
2393 | 2393 | fm_extras.data(file=f) |
|
2394 | 2394 | fm_extras.data(key=k) |
@@ -2405,7 +2405,7 b' def debugnamecomplete(ui, repo, *args):' | |||
|
2405 | 2405 | names = set() |
|
2406 | 2406 | # since we previously only listed open branches, we will handle that |
|
2407 | 2407 | # specially (after this for loop) |
|
2408 |
for name, ns in |
|
|
2408 | for name, ns in repo.names.items(): | |
|
2409 | 2409 | if name != b'branches': |
|
2410 | 2410 | names.update(ns.listnames(repo)) |
|
2411 | 2411 | names.update( |
@@ -2699,7 +2699,7 b' def debugpathcomplete(ui, repo, *specs, ' | |||
|
2699 | 2699 | fullpaths = opts['full'] |
|
2700 | 2700 | files, dirs = set(), set() |
|
2701 | 2701 | adddir, addfile = dirs.add, files.add |
|
2702 |
for f, st in |
|
|
2702 | for f, st in dirstate.items(): | |
|
2703 | 2703 | if f.startswith(spec) and st.state in acceptable: |
|
2704 | 2704 | if fixpaths: |
|
2705 | 2705 | f = f.replace(b'/', pycompat.ossep) |
@@ -4270,7 +4270,7 b' def debugwireargs(ui, repopath, *vals, *' | |||
|
4270 | 4270 | for opt in cmdutil.remoteopts: |
|
4271 | 4271 | del opts[opt[1]] |
|
4272 | 4272 | args = {} |
|
4273 |
for k, v in |
|
|
4273 | for k, v in opts.items(): | |
|
4274 | 4274 | if v: |
|
4275 | 4275 | args[k] = v |
|
4276 | 4276 | args = pycompat.strkwargs(args) |
@@ -342,7 +342,7 b' class dirstate(object):' | |||
|
342 | 342 | return iter(sorted(self._map)) |
|
343 | 343 | |
|
344 | 344 | def items(self): |
|
345 |
return |
|
|
345 | return self._map.items() | |
|
346 | 346 | |
|
347 | 347 | iteritems = items |
|
348 | 348 | |
@@ -770,9 +770,7 b' class dirstate(object):' | |||
|
770 | 770 | def _writedirstate(self, tr, st): |
|
771 | 771 | # notify callbacks about parents change |
|
772 | 772 | if self._origpl is not None and self._origpl != self._pl: |
|
773 | for c, callback in sorted( | |
|
774 | pycompat.iteritems(self._plchangecallbacks) | |
|
775 | ): | |
|
773 | for c, callback in sorted(self._plchangecallbacks.items()): | |
|
776 | 774 | callback(self, self._origpl, self._pl) |
|
777 | 775 | self._origpl = None |
|
778 | 776 | self._map.write(tr, st) |
@@ -935,7 +933,7 b' class dirstate(object):' | |||
|
935 | 933 | if match.isexact() and self._checkcase: |
|
936 | 934 | normed = {} |
|
937 | 935 | |
|
938 |
for f, st in |
|
|
936 | for f, st in results.items(): | |
|
939 | 937 | if st is None: |
|
940 | 938 | continue |
|
941 | 939 | |
@@ -948,7 +946,7 b' class dirstate(object):' | |||
|
948 | 946 | |
|
949 | 947 | paths.add(f) |
|
950 | 948 | |
|
951 |
for norm, paths in |
|
|
949 | for norm, paths in normed.items(): | |
|
952 | 950 | if len(paths) > 1: |
|
953 | 951 | for path in paths: |
|
954 | 952 | folded = self._discoverpath( |
@@ -12,7 +12,6 b' from . import (' | |||
|
12 | 12 | error, |
|
13 | 13 | pathutil, |
|
14 | 14 | policy, |
|
15 | pycompat, | |
|
16 | 15 | txnutil, |
|
17 | 16 | util, |
|
18 | 17 | ) |
@@ -354,7 +353,7 b' class dirstatemap(_dirstatemapcommon):' | |||
|
354 | 353 | util.clearcachedproperty(self, b"dirfoldmap") |
|
355 | 354 | |
|
356 | 355 | def items(self): |
|
357 |
return |
|
|
356 | return self._map.items() | |
|
358 | 357 | |
|
359 | 358 | # forward for python2,3 compat |
|
360 | 359 | iteritems = items |
@@ -378,7 +377,7 b' class dirstatemap(_dirstatemapcommon):' | |||
|
378 | 377 | self._dirtyparents = True |
|
379 | 378 | copies = {} |
|
380 | 379 | if fold_p2: |
|
381 |
for f, s in |
|
|
380 | for f, s in self._map.items(): | |
|
382 | 381 | # Discard "merged" markers when moving away from a merge state |
|
383 | 382 | if s.p2_info: |
|
384 | 383 | source = self.copymap.pop(f, None) |
@@ -501,7 +500,7 b' class dirstatemap(_dirstatemapcommon):' | |||
|
501 | 500 | |
|
502 | 501 | f = {} |
|
503 | 502 | normcase = util.normcase |
|
504 |
for name, s in |
|
|
503 | for name, s in self._map.items(): | |
|
505 | 504 | if not s.removed: |
|
506 | 505 | f[normcase(name)] = name |
|
507 | 506 | f[b'.'] = b'.' # prevents useless util.fspath() invocation |
@@ -237,7 +237,7 b' def _headssummary(pushop):' | |||
|
237 | 237 | |
|
238 | 238 | knownnode = cl.hasnode # do not use nodemap until it is filtered |
|
239 | 239 | # A. register remote heads of branches which are in outgoing set |
|
240 |
for branch, heads in |
|
|
240 | for branch, heads in remotemap.items(): | |
|
241 | 241 | # don't add head info about branches which we don't have locally |
|
242 | 242 | if branch not in branches: |
|
243 | 243 | continue |
@@ -261,14 +261,14 b' def _headssummary(pushop):' | |||
|
261 | 261 | repo, |
|
262 | 262 | ( |
|
263 | 263 | (branch, heads[1]) |
|
264 |
for branch, heads in |
|
|
264 | for branch, heads in headssum.items() | |
|
265 | 265 | if heads[0] is not None |
|
266 | 266 | ), |
|
267 | 267 | ) |
|
268 | 268 | newmap.update(repo, (ctx.rev() for ctx in missingctx)) |
|
269 |
for branch, newheads in |
|
|
269 | for branch, newheads in newmap.items(): | |
|
270 | 270 | headssum[branch][1][:] = newheads |
|
271 |
for branch, items in |
|
|
271 | for branch, items in headssum.items(): | |
|
272 | 272 | for l in items: |
|
273 | 273 | if l is not None: |
|
274 | 274 | l.sort() |
@@ -379,9 +379,7 b' def checkheads(pushop):' | |||
|
379 | 379 | headssum = _oldheadssummary(repo, remoteheads, outgoing, inc) |
|
380 | 380 | pushop.pushbranchmap = headssum |
|
381 | 381 | newbranches = [ |
|
382 | branch | |
|
383 | for branch, heads in pycompat.iteritems(headssum) | |
|
384 | if heads[0] is None | |
|
382 | branch for branch, heads in headssum.items() if heads[0] is None | |
|
385 | 383 | ] |
|
386 | 384 | # 1. Check for new branches on the remote. |
|
387 | 385 | if newbranches and not newbranch: # new branch requires --new-branch |
@@ -572,7 +572,7 b' class cmdalias(object):' | |||
|
572 | 572 | |
|
573 | 573 | try: |
|
574 | 574 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) |
|
575 |
for alias, e in |
|
|
575 | for alias, e in cmdtable.items(): | |
|
576 | 576 | if e is entry: |
|
577 | 577 | self.cmd = alias |
|
578 | 578 | break |
@@ -805,7 +805,7 b' def _pushb2ctxcheckheads(pushop, bundler' | |||
|
805 | 805 | bundler.newpart(b'check:heads', data=iter(pushop.remoteheads)) |
|
806 | 806 | else: |
|
807 | 807 | affected = set() |
|
808 |
for branch, heads in |
|
|
808 | for branch, heads in pushop.pushbranchmap.items(): | |
|
809 | 809 | remoteheads, newheads, unsyncedheads, discardedheads = heads |
|
810 | 810 | if remoteheads is not None: |
|
811 | 811 | remote = set(remoteheads) |
@@ -1116,7 +1116,7 b' def _getbundlesendvars(pushop, bundler):' | |||
|
1116 | 1116 | |
|
1117 | 1117 | part = bundler.newpart(b'pushvars') |
|
1118 | 1118 | |
|
1119 |
for key, value in |
|
|
1119 | for key, value in shellvars.items(): | |
|
1120 | 1120 | part.addparam(key, value, mandatory=False) |
|
1121 | 1121 | |
|
1122 | 1122 |
@@ -73,7 +73,7 b' def find(name):' | |||
|
73 | 73 | try: |
|
74 | 74 | mod = _extensions[name] |
|
75 | 75 | except KeyError: |
|
76 |
for k, v in |
|
|
76 | for k, v in _extensions.items(): | |
|
77 | 77 | if k.endswith(b'.' + name) or k.endswith(b'/' + name): |
|
78 | 78 | mod = v |
|
79 | 79 | break |
@@ -170,7 +170,7 b' def _rejectunicode(name, xs):' | |||
|
170 | 170 | |
|
171 | 171 | def _validatecmdtable(ui, cmdtable): |
|
172 | 172 | """Check if extension commands have required attributes""" |
|
173 |
for c, e in |
|
|
173 | for c, e in cmdtable.items(): | |
|
174 | 174 | f = e[0] |
|
175 | 175 | missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)] |
|
176 | 176 | if not missing: |
@@ -578,7 +578,7 b' def wrapcommand(table, command, wrapper,' | |||
|
578 | 578 | ''' |
|
579 | 579 | assert callable(wrapper) |
|
580 | 580 | aliases, entry = cmdutil.findcmd(command, table) |
|
581 |
for alias, e in |
|
|
581 | for alias, e in table.items(): | |
|
582 | 582 | if e is entry: |
|
583 | 583 | key = alias |
|
584 | 584 | break |
@@ -755,7 +755,7 b' def _disabledpaths():' | |||
|
755 | 755 | if name in exts or name in _order or name == b'__init__': |
|
756 | 756 | continue |
|
757 | 757 | exts[name] = path |
|
758 |
for name, path in |
|
|
758 | for name, path in _disabledextensions.items(): | |
|
759 | 759 | # If no path was provided for a disabled extension (e.g. "color=!"), |
|
760 | 760 | # don't replace the path we already found by the scan above. |
|
761 | 761 | if path: |
@@ -817,7 +817,7 b' def disabled():' | |||
|
817 | 817 | |
|
818 | 818 | return { |
|
819 | 819 | name: gettext(desc) |
|
820 |
for name, desc in |
|
|
820 | for name, desc in __index__.docs.items() | |
|
821 | 821 | if name not in _order |
|
822 | 822 | } |
|
823 | 823 | except (ImportError, AttributeError): |
@@ -828,7 +828,7 b' def disabled():' | |||
|
828 | 828 | return {} |
|
829 | 829 | |
|
830 | 830 | exts = {} |
|
831 |
for name, path in |
|
|
831 | for name, path in paths.items(): | |
|
832 | 832 | doc = _disabledhelp(path) |
|
833 | 833 | if doc and name != b'__index__': |
|
834 | 834 | exts[name] = doc.splitlines()[0] |
@@ -917,7 +917,7 b' def disabledcmd(ui, cmd, strict=False):' | |||
|
917 | 917 | ext = _finddisabledcmd(ui, cmd, cmd, path, strict=strict) |
|
918 | 918 | if not ext: |
|
919 | 919 | # otherwise, interrogate each extension until there's a match |
|
920 |
for name, path in |
|
|
920 | for name, path in paths.items(): | |
|
921 | 921 | ext = _finddisabledcmd(ui, cmd, name, path, strict=strict) |
|
922 | 922 | if ext: |
|
923 | 923 | break |
@@ -942,9 +942,7 b' def enabled(shortname=True):' | |||
|
942 | 942 | |
|
943 | 943 | def notloaded(): |
|
944 | 944 | '''return short names of extensions that failed to load''' |
|
945 | return [ | |
|
946 | name for name, mod in pycompat.iteritems(_extensions) if mod is None | |
|
947 | ] | |
|
945 | return [name for name, mod in _extensions.items() if mod is None] | |
|
948 | 946 | |
|
949 | 947 | |
|
950 | 948 | def moduleversion(module): |
@@ -14,7 +14,6 b' from . import (' | |||
|
14 | 14 | commands, |
|
15 | 15 | error, |
|
16 | 16 | extensions, |
|
17 | pycompat, | |
|
18 | 17 | registrar, |
|
19 | 18 | ) |
|
20 | 19 | |
@@ -114,7 +113,7 b' class exthelper(object):' | |||
|
114 | 113 | self._extcommandwrappers.extend(other._extcommandwrappers) |
|
115 | 114 | self._functionwrappers.extend(other._functionwrappers) |
|
116 | 115 | self.cmdtable.update(other.cmdtable) |
|
117 |
for section, items in |
|
|
116 | for section, items in other.configtable.items(): | |
|
118 | 117 | if section in self.configtable: |
|
119 | 118 | self.configtable[section].update(items) |
|
120 | 119 | else: |
@@ -1199,7 +1199,7 b' def _workingpath(repo, ctx):' | |||
|
1199 | 1199 | |
|
1200 | 1200 | def loadinternalmerge(ui, extname, registrarobj): |
|
1201 | 1201 | """Load internal merge tool from specified registrarobj""" |
|
1202 |
for name, func in |
|
|
1202 | for name, func in registrarobj._table.items(): | |
|
1203 | 1203 | fullname = b':' + name |
|
1204 | 1204 | internals[fullname] = func |
|
1205 | 1205 | internals[b'internal:' + name] = func |
@@ -613,7 +613,7 b' def match(ctx, cwd, expr, badfn=None):' | |||
|
613 | 613 | |
|
614 | 614 | def loadpredicate(ui, extname, registrarobj): |
|
615 | 615 | """Load fileset predicates from specified registrarobj""" |
|
616 |
for name, func in |
|
|
616 | for name, func in registrarobj._table.items(): | |
|
617 | 617 | symbols[name] = func |
|
618 | 618 | |
|
619 | 619 |
@@ -293,7 +293,7 b' class _nestedformatter(baseformatter):' | |||
|
293 | 293 | def _iteritems(data): |
|
294 | 294 | '''iterate key-value pairs in stable order''' |
|
295 | 295 | if isinstance(data, dict): |
|
296 |
return sorted( |
|
|
296 | return sorted(data.items()) | |
|
297 | 297 | return data |
|
298 | 298 | |
|
299 | 299 |
@@ -125,7 +125,7 b' def listexts(header, exts, indent=1, sho' | |||
|
125 | 125 | '''return a text listing of the given extensions''' |
|
126 | 126 | rst = [] |
|
127 | 127 | if exts: |
|
128 |
for name, desc in sorted( |
|
|
128 | for name, desc in sorted(exts.items()): | |
|
129 | 129 | if not showdeprecated and any(w in desc for w in _exclkeywords): |
|
130 | 130 | continue |
|
131 | 131 | rst.append(b'%s:%s: %s\n' % (b' ' * indent, name, desc)) |
@@ -280,7 +280,7 b' def topicmatch(ui, commands, kw):' | |||
|
280 | 280 | name = names[0] |
|
281 | 281 | if not filtertopic(ui, name): |
|
282 | 282 | results[b'topics'].append((names[0], header)) |
|
283 |
for cmd, entry in |
|
|
283 | for cmd, entry in commands.table.items(): | |
|
284 | 284 | if len(entry) == 3: |
|
285 | 285 | summary = entry[2] |
|
286 | 286 | else: |
@@ -664,7 +664,7 b' def _getcategorizedhelpcmds(ui, cmdtable' | |||
|
664 | 664 | h = {} |
|
665 | 665 | # Command -> string showing synonyms |
|
666 | 666 | syns = {} |
|
667 |
for c, e in |
|
|
667 | for c, e in cmdtable.items(): | |
|
668 | 668 | fs = cmdutil.parsealiases(c) |
|
669 | 669 | f = fs[0] |
|
670 | 670 | syns[f] = fs |
@@ -412,7 +412,7 b' class hgweb(object):' | |||
|
412 | 412 | |
|
413 | 413 | if cmd == b'archive': |
|
414 | 414 | fn = req.qsparams[b'node'] |
|
415 |
for type_, spec in |
|
|
415 | for type_, spec in webutil.archivespecs.items(): | |
|
416 | 416 | ext = spec[2] |
|
417 | 417 | if fn.endswith(ext): |
|
418 | 418 | req.qsparams[b'node'] = fn[: -len(ext)] |
@@ -77,7 +77,7 b' class multidict(object):' | |||
|
77 | 77 | return vals[0] |
|
78 | 78 | |
|
79 | 79 | def asdictoflists(self): |
|
80 |
return {k: list(v) for k, v in |
|
|
80 | return {k: list(v) for k, v in self._items.items()} | |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | @attr.s(frozen=True) |
@@ -175,7 +175,7 b' def parserequestfromenv(env, reponame=No' | |||
|
175 | 175 | # This is what is documented to be used for os.environ on Unix. |
|
176 | 176 | return pycompat.fsencode(s) |
|
177 | 177 | |
|
178 |
env = {tobytes(k): tobytes(v) for k, v in |
|
|
178 | env = {tobytes(k): tobytes(v) for k, v in env.items()} | |
|
179 | 179 | |
|
180 | 180 | # Some hosting solutions are emulating hgwebdir, and dispatching directly |
|
181 | 181 | # to an hgweb instance using this environment variable. This was always |
@@ -309,7 +309,7 b' def parserequestfromenv(env, reponame=No' | |||
|
309 | 309 | # perform case normalization for us. We just rewrite underscore to dash |
|
310 | 310 | # so keys match what likely went over the wire. |
|
311 | 311 | headers = [] |
|
312 |
for k, v in |
|
|
312 | for k, v in env.items(): | |
|
313 | 313 | if k.startswith(b'HTTP_'): |
|
314 | 314 | headers.append((k[len(b'HTTP_') :].replace(b'_', b'-'), v)) |
|
315 | 315 |
@@ -563,7 +563,7 b' def manifest(web):' | |||
|
563 | 563 | l = len(path) |
|
564 | 564 | abspath = b"/" + path |
|
565 | 565 | |
|
566 |
for full, n in |
|
|
566 | for full, n in mf.items(): | |
|
567 | 567 | # the virtual path (working copy path) used for the full |
|
568 | 568 | # (repository) path |
|
569 | 569 | f = decodepath(full) |
@@ -1520,7 +1520,7 b' def help(web):' | |||
|
1520 | 1520 | |
|
1521 | 1521 | early, other = [], [] |
|
1522 | 1522 | primary = lambda s: s.partition(b'|')[0] |
|
1523 |
for c, e in |
|
|
1523 | for c, e in commands.table.items(): | |
|
1524 | 1524 | doc = _getdoc(e) |
|
1525 | 1525 | if b'DEPRECATED' in doc or c.startswith(b'debug'): |
|
1526 | 1526 | continue |
@@ -56,7 +56,7 b' def archivelist(ui, nodeid, url=None):' | |||
|
56 | 56 | allowed = ui.configlist(b'web', b'allow-archive', untrusted=True) |
|
57 | 57 | archives = [] |
|
58 | 58 | |
|
59 |
for typ, spec in |
|
|
59 | for typ, spec in archivespecs.items(): | |
|
60 | 60 | if typ in allowed or ui.configbool( |
|
61 | 61 | b'web', b'allow' + typ, untrusted=True |
|
62 | 62 | ): |
@@ -863,7 +863,7 b' class sessionvars(templateutil.wrapped):' | |||
|
863 | 863 | |
|
864 | 864 | def itermaps(self, context): |
|
865 | 865 | separator = self._start |
|
866 |
for key, value in sorted( |
|
|
866 | for key, value in sorted(self._vars.items()): | |
|
867 | 867 | yield { |
|
868 | 868 | b'name': key, |
|
869 | 869 | b'value': pycompat.bytestr(value), |
@@ -23,7 +23,7 b' def launch(application):' | |||
|
23 | 23 | procutil.setbinary(procutil.stdin) |
|
24 | 24 | procutil.setbinary(procutil.stdout) |
|
25 | 25 | |
|
26 |
environ = dict( |
|
|
26 | environ = dict(os.environ.items()) # re-exports | |
|
27 | 27 | environ.setdefault('PATH_INFO', '') |
|
28 | 28 | if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): |
|
29 | 29 | # IIS includes script_name in PATH_INFO |
@@ -166,7 +166,7 b' def _exthook(ui, repo, htype, name, cmd,' | |||
|
166 | 166 | else: |
|
167 | 167 | env[b'HGPLAIN'] = b'' |
|
168 | 168 | |
|
169 |
for k, v in |
|
|
169 | for k, v in args.items(): | |
|
170 | 170 | # transaction changes can accumulate MBs of data, so skip it |
|
171 | 171 | # for external hooks |
|
172 | 172 | if k == b'changes': |
@@ -93,7 +93,7 b' def readauthforuri(ui, uri, user):' | |||
|
93 | 93 | bestuser = None |
|
94 | 94 | bestlen = 0 |
|
95 | 95 | bestauth = None |
|
96 |
for group, auth in |
|
|
96 | for group, auth in groups.items(): | |
|
97 | 97 | if user and user != auth.get(b'username', user): |
|
98 | 98 | # If a username was set in the URI, the entry username |
|
99 | 99 | # must either match it or be unset |
@@ -2068,7 +2068,7 b' class localrepository(object):' | |||
|
2068 | 2068 | else: |
|
2069 | 2069 | tags = self._tagscache.tags |
|
2070 | 2070 | rev = self.changelog.rev |
|
2071 |
for k, v in |
|
|
2071 | for k, v in tags.items(): | |
|
2072 | 2072 | try: |
|
2073 | 2073 | # ignore tags to unknown nodes |
|
2074 | 2074 | rev(v) |
@@ -2103,13 +2103,12 b' class localrepository(object):' | |||
|
2103 | 2103 | # writing to the cache), but the rest of Mercurial wants them in |
|
2104 | 2104 | # local encoding. |
|
2105 | 2105 | tags = {} |
|
2106 |
for (name, (node, hist)) in |
|
|
2106 | for (name, (node, hist)) in alltags.items(): | |
|
2107 | 2107 | if node != self.nullid: |
|
2108 | 2108 | tags[encoding.tolocal(name)] = node |
|
2109 | 2109 | tags[b'tip'] = self.changelog.tip() |
|
2110 | 2110 | tagtypes = { |
|
2111 | encoding.tolocal(name): value | |
|
2112 | for (name, value) in pycompat.iteritems(tagtypes) | |
|
2111 | encoding.tolocal(name): value for (name, value) in tagtypes.items() | |
|
2113 | 2112 | } |
|
2114 | 2113 | return (tags, tagtypes) |
|
2115 | 2114 | |
@@ -2138,7 +2137,7 b' class localrepository(object):' | |||
|
2138 | 2137 | '''return the tags associated with a node''' |
|
2139 | 2138 | if not self._tagscache.nodetagscache: |
|
2140 | 2139 | nodetagscache = {} |
|
2141 |
for t, n in |
|
|
2140 | for t, n in self._tagscache.tags.items(): | |
|
2142 | 2141 | nodetagscache.setdefault(n, []).append(t) |
|
2143 | 2142 | for tags in pycompat.itervalues(nodetagscache): |
|
2144 | 2143 | tags.sort() |
@@ -2256,7 +2255,7 b' class localrepository(object):' | |||
|
2256 | 2255 | mf = matchmod.match(self.root, b'', [pat]) |
|
2257 | 2256 | fn = None |
|
2258 | 2257 | params = cmd |
|
2259 |
for name, filterfn in |
|
|
2258 | for name, filterfn in self._datafilters.items(): | |
|
2260 | 2259 | if cmd.startswith(name): |
|
2261 | 2260 | fn = filterfn |
|
2262 | 2261 | params = cmd[len(name) :].lstrip() |
@@ -327,7 +327,7 b' class changesetprinter(object):' | |||
|
327 | 327 | if branch != b'default': |
|
328 | 328 | self.ui.write(columns[b'branch'] % branch, label=b'log.branch') |
|
329 | 329 | |
|
330 |
for nsname, ns in |
|
|
330 | for nsname, ns in self.repo.names.items(): | |
|
331 | 331 | # branches has special logic already handled above, so here we just |
|
332 | 332 | # skip it |
|
333 | 333 | if nsname == b'branches': |
@@ -991,7 +991,7 b' def _makerevset(repo, wopts, slowpath):' | |||
|
991 | 991 | opts[b'_patslog'] = list(wopts.pats) |
|
992 | 992 | |
|
993 | 993 | expr = [] |
|
994 |
for op, val in sorted( |
|
|
994 | for op, val in sorted(opts.items()): | |
|
995 | 995 | if not val: |
|
996 | 996 | continue |
|
997 | 997 | revop, listop = _opt2logrevset[op] |
@@ -10,7 +10,6 b'' | |||
|
10 | 10 | from .node import hex |
|
11 | 11 | |
|
12 | 12 | from . import ( |
|
13 | pycompat, | |
|
14 | 13 | util, |
|
15 | 14 | vfs as vfsmod, |
|
16 | 15 | ) |
@@ -77,7 +76,7 b' def writeremotenamefile(repo, remotepath' | |||
|
77 | 76 | if oldpath != remotepath: |
|
78 | 77 | f.write(b'%s\0%s\0%s\n' % (node, oldpath, rname)) |
|
79 | 78 | |
|
80 |
for name, node in sorted( |
|
|
79 | for name, node in sorted(names.items()): | |
|
81 | 80 | if nametype == b"branches": |
|
82 | 81 | for n in node: |
|
83 | 82 | f.write(b'%s\0%s\0%s\n' % (n, remotepath, name)) |
@@ -159,7 +158,7 b' def pullremotenames(localrepo, remoterep' | |||
|
159 | 158 | with remoterepo.commandexecutor() as e: |
|
160 | 159 | branchmap = e.callcommand(b'branchmap', {}).result() |
|
161 | 160 | |
|
162 |
for branch, nodes in |
|
|
161 | for branch, nodes in branchmap.items(): | |
|
163 | 162 | bmap[branch] = [] |
|
164 | 163 | for node in nodes: |
|
165 | 164 | if node in repo and not repo[node].obsolete(): |
@@ -2,7 +2,6 b' import _lsprof' | |||
|
2 | 2 | import sys |
|
3 | 3 | |
|
4 | 4 | from .pycompat import getattr |
|
5 | from . import pycompat | |
|
6 | 5 | |
|
7 | 6 | Profiler = _lsprof.Profiler |
|
8 | 7 | |
@@ -124,7 +123,7 b' def label(code):' | |||
|
124 | 123 | try: |
|
125 | 124 | mname = _fn2mod[code.co_filename] |
|
126 | 125 | except KeyError: |
|
127 |
for k, v in list( |
|
|
126 | for k, v in list(sys.modules.items()): | |
|
128 | 127 | if v is None: |
|
129 | 128 | continue |
|
130 | 129 | if not isinstance(getattr(v, '__file__', None), str): |
@@ -867,11 +867,11 b' class treemanifest(object):' | |||
|
867 | 867 | differs, load it in both |
|
868 | 868 | """ |
|
869 | 869 | toloadlazy = [] |
|
870 |
for d, v1 in |
|
|
870 | for d, v1 in t1._lazydirs.items(): | |
|
871 | 871 | v2 = t2._lazydirs.get(d) |
|
872 | 872 | if not v2 or v2[0] != v1[0]: |
|
873 | 873 | toloadlazy.append(d) |
|
874 |
for d, v1 in |
|
|
874 | for d, v1 in t2._lazydirs.items(): | |
|
875 | 875 | if d not in t1._lazydirs: |
|
876 | 876 | toloadlazy.append(d) |
|
877 | 877 | |
@@ -953,7 +953,7 b' class treemanifest(object):' | |||
|
953 | 953 | if p in self._files: |
|
954 | 954 | yield self._subpath(p), n |
|
955 | 955 | else: |
|
956 |
for f, sn in |
|
|
956 | for f, sn in n.items(): | |
|
957 | 957 | yield f, sn |
|
958 | 958 | |
|
959 | 959 | iteritems = items |
@@ -1104,11 +1104,10 b' class treemanifest(object):' | |||
|
1104 | 1104 | def _copyfunc(s): |
|
1105 | 1105 | self._load() |
|
1106 | 1106 | s._lazydirs = { |
|
1107 | d: (n, r, True) | |
|
1108 | for d, (n, r, c) in pycompat.iteritems(self._lazydirs) | |
|
1107 | d: (n, r, True) for d, (n, r, c) in self._lazydirs.items() | |
|
1109 | 1108 | } |
|
1110 | 1109 | sdirs = s._dirs |
|
1111 |
for d, v in |
|
|
1110 | for d, v in self._dirs.items(): | |
|
1112 | 1111 | sdirs[d] = v.copy() |
|
1113 | 1112 | s._files = dict.copy(self._files) |
|
1114 | 1113 | s._flags = dict.copy(self._flags) |
@@ -1136,7 +1135,7 b' class treemanifest(object):' | |||
|
1136 | 1135 | t1._load() |
|
1137 | 1136 | t2._load() |
|
1138 | 1137 | self._loaddifflazy(t1, t2) |
|
1139 |
for d, m1 in |
|
|
1138 | for d, m1 in t1._dirs.items(): | |
|
1140 | 1139 | if d in t2._dirs: |
|
1141 | 1140 | m2 = t2._dirs[d] |
|
1142 | 1141 | _filesnotin(m1, m2) |
@@ -1249,7 +1248,7 b' class treemanifest(object):' | |||
|
1249 | 1248 | ret._flags[fn] = self._flags[fn] |
|
1250 | 1249 | |
|
1251 | 1250 | visit = self._loadchildrensetlazy(visit) |
|
1252 |
for dir, subm in |
|
|
1251 | for dir, subm in self._dirs.items(): | |
|
1253 | 1252 | if visit and dir[:-1] not in visit: |
|
1254 | 1253 | continue |
|
1255 | 1254 | m = subm._matches_inner(match) |
@@ -1294,15 +1293,15 b' class treemanifest(object):' | |||
|
1294 | 1293 | t2._load() |
|
1295 | 1294 | self._loaddifflazy(t1, t2) |
|
1296 | 1295 | |
|
1297 |
for d, m1 in |
|
|
1296 | for d, m1 in t1._dirs.items(): | |
|
1298 | 1297 | m2 = t2._dirs.get(d, emptytree) |
|
1299 | 1298 | stack.append((m1, m2)) |
|
1300 | 1299 | |
|
1301 |
for d, m2 in |
|
|
1300 | for d, m2 in t2._dirs.items(): | |
|
1302 | 1301 | if d not in t1._dirs: |
|
1303 | 1302 | stack.append((emptytree, m2)) |
|
1304 | 1303 | |
|
1305 |
for fn, n1 in |
|
|
1304 | for fn, n1 in t1._files.items(): | |
|
1306 | 1305 | fl1 = t1._flags.get(fn, b'') |
|
1307 | 1306 | n2 = t2._files.get(fn, None) |
|
1308 | 1307 | fl2 = t2._flags.get(fn, b'') |
@@ -1311,7 +1310,7 b' class treemanifest(object):' | |||
|
1311 | 1310 | elif clean: |
|
1312 | 1311 | result[t1._subpath(fn)] = None |
|
1313 | 1312 | |
|
1314 |
for fn, n2 in |
|
|
1313 | for fn, n2 in t2._files.items(): | |
|
1315 | 1314 | if fn not in t1._files: |
|
1316 | 1315 | fl2 = t2._flags.get(fn, b'') |
|
1317 | 1316 | result[t2._subpath(fn)] = ((None, b''), (n2, fl2)) |
@@ -1361,9 +1360,7 b' class treemanifest(object):' | |||
|
1361 | 1360 | """ |
|
1362 | 1361 | self._load() |
|
1363 | 1362 | flags = self.flags |
|
1364 | lazydirs = [ | |
|
1365 | (d[:-1], v[0], b't') for d, v in pycompat.iteritems(self._lazydirs) | |
|
1366 | ] | |
|
1363 | lazydirs = [(d[:-1], v[0], b't') for d, v in self._lazydirs.items()] | |
|
1367 | 1364 | dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs] |
|
1368 | 1365 | files = [(f, self._files[f], flags(f)) for f in self._files] |
|
1369 | 1366 | return _text(sorted(dirs + files + lazydirs)) |
@@ -1392,7 +1389,7 b' class treemanifest(object):' | |||
|
1392 | 1389 | visit = self._loadchildrensetlazy(visit) |
|
1393 | 1390 | if visit == b'this' or visit == b'all': |
|
1394 | 1391 | visit = None |
|
1395 |
for d, subm in |
|
|
1392 | for d, subm in self._dirs.items(): | |
|
1396 | 1393 | if visit and d[:-1] not in visit: |
|
1397 | 1394 | continue |
|
1398 | 1395 | subp1 = getnode(m1, d) |
@@ -1415,7 +1412,7 b' class treemanifest(object):' | |||
|
1415 | 1412 | self._load() |
|
1416 | 1413 | # OPT: use visitchildrenset to avoid loading everything. |
|
1417 | 1414 | self._loadalllazy() |
|
1418 |
for d, subm in |
|
|
1415 | for d, subm in self._dirs.items(): | |
|
1419 | 1416 | for subtree in subm.walksubtrees(matcher=matcher): |
|
1420 | 1417 | yield subtree |
|
1421 | 1418 |
@@ -1638,7 +1638,7 b' def readpatternfile(filepath, warn, sour' | |||
|
1638 | 1638 | continue |
|
1639 | 1639 | |
|
1640 | 1640 | linesyntax = syntax |
|
1641 |
for s, rels in |
|
|
1641 | for s, rels in syntaxes.items(): | |
|
1642 | 1642 | if line.startswith(rels): |
|
1643 | 1643 | linesyntax = rels |
|
1644 | 1644 | line = line[len(rels) :] |
@@ -643,10 +643,10 b' class mergeresult(object):' | |||
|
643 | 643 | |
|
644 | 644 | def filemap(self, sort=False): |
|
645 | 645 | if sorted: |
|
646 |
for key, val in sorted( |
|
|
646 | for key, val in sorted(self._filemapping.items()): | |
|
647 | 647 | yield key, val |
|
648 | 648 | else: |
|
649 |
for key, val in |
|
|
649 | for key, val in self._filemapping.items(): | |
|
650 | 650 | yield key, val |
|
651 | 651 | |
|
652 | 652 | def addcommitinfo(self, filename, key, value): |
@@ -671,15 +671,15 b' class mergeresult(object):' | |||
|
671 | 671 | """returns a dictionary of actions to be perfomed with action as key |
|
672 | 672 | and a list of files and related arguments as values""" |
|
673 | 673 | res = collections.defaultdict(list) |
|
674 |
for a, d in |
|
|
675 |
for f, (args, msg) in |
|
|
674 | for a, d in self._actionmapping.items(): | |
|
675 | for f, (args, msg) in d.items(): | |
|
676 | 676 | res[a].append((f, args, msg)) |
|
677 | 677 | return res |
|
678 | 678 | |
|
679 | 679 | def setactions(self, actions): |
|
680 | 680 | self._filemapping = actions |
|
681 | 681 | self._actionmapping = collections.defaultdict(dict) |
|
682 |
for f, (act, data, msg) in |
|
|
682 | for f, (act, data, msg) in self._filemapping.items(): | |
|
683 | 683 | self._actionmapping[act][f] = data, msg |
|
684 | 684 | |
|
685 | 685 | def hasconflicts(self): |
@@ -786,7 +786,7 b' def manifestmerge(' | |||
|
786 | 786 | relevantfiles = set(ma.diff(m2).keys()) |
|
787 | 787 | |
|
788 | 788 | # For copied and moved files, we need to add the source file too. |
|
789 |
for copykey, copyvalue in |
|
|
789 | for copykey, copyvalue in branch_copies1.copy.items(): | |
|
790 | 790 | if copyvalue in relevantfiles: |
|
791 | 791 | relevantfiles.add(copykey) |
|
792 | 792 | for movedirkey in branch_copies1.movewithdir: |
@@ -796,7 +796,7 b' def manifestmerge(' | |||
|
796 | 796 | |
|
797 | 797 | diff = m1.diff(m2, match=matcher) |
|
798 | 798 | |
|
799 |
for f, ((n1, fl1), (n2, fl2)) in |
|
|
799 | for f, ((n1, fl1), (n2, fl2)) in diff.items(): | |
|
800 | 800 | if n1 and n2: # file exists on both local and remote side |
|
801 | 801 | if f not in ma: |
|
802 | 802 | # TODO: what if they're renamed from different sources? |
@@ -1511,7 +1511,7 b' def applyupdates(' | |||
|
1511 | 1511 | ms = wctx.mergestate(clean=True) |
|
1512 | 1512 | ms.start(wctx.p1().node(), mctx.node(), labels) |
|
1513 | 1513 | |
|
1514 |
for f, op in |
|
|
1514 | for f, op in mresult.commitinfo.items(): | |
|
1515 | 1515 | # the other side of filenode was choosen while merging, store this in |
|
1516 | 1516 | # mergestate so that it can be reused on commit |
|
1517 | 1517 | ms.addcommitinfo(f, op) |
@@ -2072,7 +2072,7 b' def _update(' | |||
|
2072 | 2072 | _checkcollision(repo, wc.manifest(), mresult) |
|
2073 | 2073 | |
|
2074 | 2074 | # divergent renames |
|
2075 |
for f, fl in sorted( |
|
|
2075 | for f, fl in sorted(mresult.diverge.items()): | |
|
2076 | 2076 | repo.ui.warn( |
|
2077 | 2077 | _( |
|
2078 | 2078 | b"note: possible conflict - %s was renamed " |
@@ -2084,7 +2084,7 b' def _update(' | |||
|
2084 | 2084 | repo.ui.warn(b" %s\n" % nf) |
|
2085 | 2085 | |
|
2086 | 2086 | # rename and delete |
|
2087 |
for f, fl in sorted( |
|
|
2087 | for f, fl in sorted(mresult.renamedelete.items()): | |
|
2088 | 2088 | repo.ui.warn( |
|
2089 | 2089 | _( |
|
2090 | 2090 | b"note: possible conflict - %s was deleted " |
@@ -2124,7 +2124,7 b' def _update(' | |||
|
2124 | 2124 | |
|
2125 | 2125 | if updatedirstate: |
|
2126 | 2126 | if extraactions: |
|
2127 |
for k, acts in |
|
|
2127 | for k, acts in extraactions.items(): | |
|
2128 | 2128 | for a in acts: |
|
2129 | 2129 | mresult.addfile(a[0], k, *a[1:]) |
|
2130 | 2130 | if k == mergestatemod.ACTION_GET and wantfiledata: |
@@ -2195,10 +2195,10 b' def _update(' | |||
|
2195 | 2195 | getfiledata = None |
|
2196 | 2196 | else: |
|
2197 | 2197 | now_sec = now[0] |
|
2198 |
for f, m in |
|
|
2198 | for f, m in getfiledata.items(): | |
|
2199 | 2199 | if m is not None and m[2][0] >= now_sec: |
|
2200 | 2200 | ambiguous_mtime[f] = (m[0], m[1], None) |
|
2201 |
for f, m in |
|
|
2201 | for f, m in ambiguous_mtime.items(): | |
|
2202 | 2202 | getfiledata[f] = m |
|
2203 | 2203 | |
|
2204 | 2204 | repo.setparents(fp1, fp2) |
@@ -363,7 +363,7 b' class _mergestate_base(object):' | |||
|
363 | 363 | def unresolved(self): |
|
364 | 364 | """Obtain the paths of unresolved files.""" |
|
365 | 365 | |
|
366 |
for f, entry in |
|
|
366 | for f, entry in self._state.items(): | |
|
367 | 367 | if entry[0] in ( |
|
368 | 368 | MERGE_RECORD_UNRESOLVED, |
|
369 | 369 | MERGE_RECORD_UNRESOLVED_PATH, |
@@ -490,7 +490,7 b' class _mergestate_base(object):' | |||
|
490 | 490 | ACTION_ADD_MODIFIED: [], |
|
491 | 491 | ACTION_GET: [], |
|
492 | 492 | } |
|
493 |
for f, (r, action) in |
|
|
493 | for f, (r, action) in self._results.items(): | |
|
494 | 494 | if action is not None: |
|
495 | 495 | actions[action].append((f, None, b"merge result")) |
|
496 | 496 | return actions |
@@ -690,7 +690,7 b' class mergestate(_mergestate_base):' | |||
|
690 | 690 | # the type of state that is stored, and capital-letter records are used |
|
691 | 691 | # to prevent older versions of Mercurial that do not support the feature |
|
692 | 692 | # from loading them. |
|
693 |
for filename, v in |
|
|
693 | for filename, v in self._state.items(): | |
|
694 | 694 | if v[0] in ( |
|
695 | 695 | MERGE_RECORD_UNRESOLVED_PATH, |
|
696 | 696 | MERGE_RECORD_RESOLVED_PATH, |
@@ -714,9 +714,9 b' class mergestate(_mergestate_base):' | |||
|
714 | 714 | else: |
|
715 | 715 | # Normal files. These are stored in 'F' records. |
|
716 | 716 | records.append((RECORD_MERGED, b'\0'.join([filename] + v))) |
|
717 |
for filename, extras in sorted( |
|
|
717 | for filename, extras in sorted(self._stateextras.items()): | |
|
718 | 718 | rawextras = b'\0'.join( |
|
719 |
b'%s\0%s' % (k, v) for k, v in |
|
|
719 | b'%s\0%s' % (k, v) for k, v in extras.items() | |
|
720 | 720 | ) |
|
721 | 721 | records.append( |
|
722 | 722 | (RECORD_FILE_VALUES, b'%s\0%s' % (filename, rawextras)) |
@@ -1,6 +1,5 b'' | |||
|
1 | 1 | from .i18n import _ |
|
2 | 2 | from . import ( |
|
3 | pycompat, | |
|
4 | 3 | registrar, |
|
5 | 4 | templatekw, |
|
6 | 5 | util, |
@@ -85,7 +84,7 b' class namespaces(object):' | |||
|
85 | 84 | return self._names.get(namespace, default) |
|
86 | 85 | |
|
87 | 86 | def items(self): |
|
88 |
return |
|
|
87 | return self._names.items() | |
|
89 | 88 | |
|
90 | 89 | iteritems = items |
|
91 | 90 | |
@@ -118,7 +117,7 b' class namespaces(object):' | |||
|
118 | 117 | |
|
119 | 118 | Raises a KeyError if there is no such node. |
|
120 | 119 | """ |
|
121 |
for ns, v in |
|
|
120 | for ns, v in self._names.items(): | |
|
122 | 121 | n = v.singlenode(repo, name) |
|
123 | 122 | if n: |
|
124 | 123 | return n |
@@ -248,7 +248,7 b' def _fm0readmarkers(data, off, stop):' | |||
|
248 | 248 | # if content cannot be translated to nodeid drop the data. |
|
249 | 249 | parents = None |
|
250 | 250 | |
|
251 |
metadata = tuple(sorted( |
|
|
251 | metadata = tuple(sorted(metadata.items())) | |
|
252 | 252 | |
|
253 | 253 | yield (pre, sucs, flags, metadata, date, parents) |
|
254 | 254 | |
@@ -278,7 +278,7 b' def _fm0encodemeta(meta):' | |||
|
278 | 278 | """Return encoded metadata string to string mapping. |
|
279 | 279 | |
|
280 | 280 | Assume no ':' in key and no '\0' in both key and value.""" |
|
281 |
for key, value in |
|
|
281 | for key, value in meta.items(): | |
|
282 | 282 | if b':' in key or b'\0' in key: |
|
283 | 283 | raise ValueError(b"':' and '\0' are forbidden in metadata key'") |
|
284 | 284 | if b'\0' in value: |
@@ -652,7 +652,7 b' class obsstore(object):' | |||
|
652 | 652 | 'in-marker cycle with %s' % pycompat.sysstr(hex(prec)) |
|
653 | 653 | ) |
|
654 | 654 | |
|
655 |
metadata = tuple(sorted( |
|
|
655 | metadata = tuple(sorted(metadata.items())) | |
|
656 | 656 | for k, v in metadata: |
|
657 | 657 | try: |
|
658 | 658 | # might be better to reject non-ASCII keys |
@@ -18,7 +18,6 b' from . import (' | |||
|
18 | 18 | encoding, |
|
19 | 19 | error, |
|
20 | 20 | phases, |
|
21 | pycompat, | |
|
22 | 21 | util, |
|
23 | 22 | ) |
|
24 | 23 | from .utils import dateutil |
@@ -997,7 +996,7 b' def divergentsets(repo, ctx):' | |||
|
997 | 996 | base[tuple(nsuccset)] = n |
|
998 | 997 | return [ |
|
999 | 998 | {b'divergentnodes': divset, b'commonpredecessor': b} |
|
1000 |
for divset, b in |
|
|
999 | for divset, b in base.items() | |
|
1001 | 1000 | ] |
|
1002 | 1001 | |
|
1003 | 1002 |
@@ -2643,11 +2643,7 b' def diffhunks(' | |||
|
2643 | 2643 | if copysourcematch: |
|
2644 | 2644 | # filter out copies where source side isn't inside the matcher |
|
2645 | 2645 | # (copies.pathcopies() already filtered out the destination) |
|
2646 | copy = { | |
|
2647 | dst: src | |
|
2648 | for dst, src in pycompat.iteritems(copy) | |
|
2649 | if copysourcematch(src) | |
|
2650 | } | |
|
2646 | copy = {dst: src for dst, src in copy.items() if copysourcematch(src)} | |
|
2651 | 2647 | |
|
2652 | 2648 | modifiedset = set(modified) |
|
2653 | 2649 | addedset = set(added) |
@@ -324,7 +324,7 b' class dirs(object):' | |||
|
324 | 324 | self._dirs = {} |
|
325 | 325 | addpath = self.addpath |
|
326 | 326 | if isinstance(map, dict) and only_tracked: |
|
327 |
for f, s in |
|
|
327 | for f, s in map.items(): | |
|
328 | 328 | if s.state != b'r': |
|
329 | 329 | addpath(f) |
|
330 | 330 | elif only_tracked: |
@@ -219,7 +219,7 b' def binaryencode(phasemapping):' | |||
|
219 | 219 | The revision lists are encoded as (phase, root) pairs. |
|
220 | 220 | """ |
|
221 | 221 | binarydata = [] |
|
222 |
for phase, nodes in p |
|
|
222 | for phase, nodes in phasemapping.items(): | |
|
223 | 223 | for head in nodes: |
|
224 | 224 | binarydata.append(_fphasesentry.pack(phase, head)) |
|
225 | 225 | return b''.join(binarydata) |
@@ -363,9 +363,7 b' class phasecache(object):' | |||
|
363 | 363 | self.invalidate() |
|
364 | 364 | self.loadphaserevs(repo) |
|
365 | 365 | return any( |
|
366 | revs | |
|
367 | for phase, revs in pycompat.iteritems(self.phaseroots) | |
|
368 | if phase != public | |
|
366 | revs for phase, revs in self.phaseroots.items() if phase != public | |
|
369 | 367 | ) |
|
370 | 368 | |
|
371 | 369 | def nonpublicphaseroots(self, repo): |
@@ -383,7 +381,7 b' class phasecache(object):' | |||
|
383 | 381 | return set().union( |
|
384 | 382 | *[ |
|
385 | 383 | revs |
|
386 |
for phase, revs in |
|
|
384 | for phase, revs in self.phaseroots.items() | |
|
387 | 385 | if phase != public |
|
388 | 386 | ] |
|
389 | 387 | ) |
@@ -528,7 +526,7 b' class phasecache(object):' | |||
|
528 | 526 | f.close() |
|
529 | 527 | |
|
530 | 528 | def _write(self, fp): |
|
531 |
for phase, roots in |
|
|
529 | for phase, roots in self.phaseroots.items(): | |
|
532 | 530 | for h in sorted(roots): |
|
533 | 531 | fp.write(b'%i %s\n' % (phase, hex(h))) |
|
534 | 532 | self.dirty = False |
@@ -612,7 +610,7 b' class phasecache(object):' | |||
|
612 | 610 | def retractboundary(self, repo, tr, targetphase, nodes): |
|
613 | 611 | oldroots = { |
|
614 | 612 | phase: revs |
|
615 |
for phase, revs in |
|
|
613 | for phase, revs in self.phaseroots.items() | |
|
616 | 614 | if phase <= targetphase |
|
617 | 615 | } |
|
618 | 616 | if tr is None: |
@@ -690,7 +688,7 b' class phasecache(object):' | |||
|
690 | 688 | """ |
|
691 | 689 | filtered = False |
|
692 | 690 | has_node = repo.changelog.index.has_node # to filter unknown nodes |
|
693 |
for phase, nodes in |
|
|
691 | for phase, nodes in self.phaseroots.items(): | |
|
694 | 692 | missing = sorted(node for node in nodes if not has_node(node)) |
|
695 | 693 | if missing: |
|
696 | 694 | for mnode in missing: |
@@ -854,7 +852,7 b' def analyzeremotephases(repo, subset, ro' | |||
|
854 | 852 | # build list from dictionary |
|
855 | 853 | draftroots = [] |
|
856 | 854 | has_node = repo.changelog.index.has_node # to filter unknown nodes |
|
857 |
for nhex, phase in |
|
|
855 | for nhex, phase in roots.items(): | |
|
858 | 856 | if nhex == b'publishing': # ignore data related to publish option |
|
859 | 857 | continue |
|
860 | 858 | node = bin(nhex) |
@@ -18,7 +18,6 b' from ..node import (' | |||
|
18 | 18 | from ..thirdparty import attr |
|
19 | 19 | from .. import ( |
|
20 | 20 | error, |
|
21 | pycompat, | |
|
22 | 21 | revlogutils, |
|
23 | 22 | util, |
|
24 | 23 | ) |
@@ -959,7 +958,7 b' def pack_dirstate(dmap, copymap, pl):' | |||
|
959 | 958 | cs = stringio() |
|
960 | 959 | write = cs.write |
|
961 | 960 | write(b"".join(pl)) |
|
962 |
for f, e in |
|
|
961 | for f, e in dmap.items(): | |
|
963 | 962 | if f in copymap: |
|
964 | 963 | f = b"%s\0%s" % (f, copymap[f]) |
|
965 | 964 | e = _pack( |
@@ -1305,7 +1305,7 b' class revlog(object):' | |||
|
1305 | 1305 | # But, obviously its parents aren't. |
|
1306 | 1306 | for p in self.parents(n): |
|
1307 | 1307 | heads.pop(p, None) |
|
1308 |
heads = [head for head, flag in |
|
|
1308 | heads = [head for head, flag in heads.items() if flag] | |
|
1309 | 1309 | roots = list(roots) |
|
1310 | 1310 | assert orderedout |
|
1311 | 1311 | assert roots |
@@ -594,7 +594,7 b' def bookmark(repo, subset, x):' | |||
|
594 | 594 | bms.add(repo[bmrev].rev()) |
|
595 | 595 | else: |
|
596 | 596 | matchrevs = set() |
|
597 |
for name, bmrev in |
|
|
597 | for name, bmrev in repo._bookmarks.items(): | |
|
598 | 598 | if matcher(name): |
|
599 | 599 | matchrevs.add(bmrev) |
|
600 | 600 | for bmrev in matchrevs: |
@@ -1706,7 +1706,7 b' def named(repo, subset, x):' | |||
|
1706 | 1706 | ) |
|
1707 | 1707 | namespaces.add(repo.names[pattern]) |
|
1708 | 1708 | else: |
|
1709 |
for name, ns in |
|
|
1709 | for name, ns in repo.names.items(): | |
|
1710 | 1710 | if matcher(name): |
|
1711 | 1711 | namespaces.add(ns) |
|
1712 | 1712 | |
@@ -2803,7 +2803,7 b' def makematcher(tree):' | |||
|
2803 | 2803 | |
|
2804 | 2804 | def loadpredicate(ui, extname, registrarobj): |
|
2805 | 2805 | """Load revset predicates from specified registrarobj""" |
|
2806 |
for name, func in |
|
|
2806 | for name, func in registrarobj._table.items(): | |
|
2807 | 2807 | symbols[name] = func |
|
2808 | 2808 | if func._safe: |
|
2809 | 2809 | safesymbols.add(name) |
@@ -612,7 +612,7 b' def expandaliases(tree, aliases, warn=No' | |||
|
612 | 612 | tree = _aliasrules.expand(aliases, tree) |
|
613 | 613 | # warn about problematic (but not referred) aliases |
|
614 | 614 | if warn is not None: |
|
615 |
for name, alias in sorted( |
|
|
615 | for name, alias in sorted(aliases.items()): | |
|
616 | 616 | if alias.error and not alias.warned: |
|
617 | 617 | warn(_(b'warning: %s\n') % (alias.error)) |
|
618 | 618 | alias.warned = True |
@@ -108,7 +108,7 b' def itersubrepos(ctx1, ctx2):' | |||
|
108 | 108 | del subpaths[subpath] |
|
109 | 109 | missing.add(subpath) |
|
110 | 110 | |
|
111 |
for subpath, ctx in sorted( |
|
|
111 | for subpath, ctx in sorted(subpaths.items()): | |
|
112 | 112 | yield subpath, ctx.sub(subpath) |
|
113 | 113 | |
|
114 | 114 | # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way, |
@@ -1336,7 +1336,7 b' def _interestingfiles(repo, matcher):' | |||
|
1336 | 1336 | ignored=False, |
|
1337 | 1337 | full=False, |
|
1338 | 1338 | ) |
|
1339 |
for abs, st in |
|
|
1339 | for abs, st in walkresults.items(): | |
|
1340 | 1340 | entry = dirstate.get_entry(abs) |
|
1341 | 1341 | if (not entry.any_tracked) and audit_path.check(abs): |
|
1342 | 1342 | unknown.append(abs) |
@@ -1383,7 +1383,7 b' def _markchanges(repo, unknown, deleted,' | |||
|
1383 | 1383 | with repo.wlock(): |
|
1384 | 1384 | wctx.forget(deleted) |
|
1385 | 1385 | wctx.add(unknown) |
|
1386 |
for new, old in |
|
|
1386 | for new, old in renames.items(): | |
|
1387 | 1387 | wctx.copy(old, new) |
|
1388 | 1388 | |
|
1389 | 1389 | |
@@ -1509,12 +1509,9 b' def movedirstate(repo, newctx, match=Non' | |||
|
1509 | 1509 | # Merge old parent and old working dir copies |
|
1510 | 1510 | oldcopies = copiesmod.pathcopies(newctx, oldctx, match) |
|
1511 | 1511 | oldcopies.update(copies) |
|
1512 | copies = { | |
|
1513 | dst: oldcopies.get(src, src) | |
|
1514 | for dst, src in pycompat.iteritems(oldcopies) | |
|
1515 | } | |
|
1512 | copies = {dst: oldcopies.get(src, src) for dst, src in oldcopies.items()} | |
|
1516 | 1513 | # Adjust the dirstate copies |
|
1517 |
for dst, src in |
|
|
1514 | for dst, src in copies.items(): | |
|
1518 | 1515 | if src not in newctx or dst in newctx or not ds.get_entry(dst).added: |
|
1519 | 1516 | src = None |
|
1520 | 1517 | ds.copy(src, dst) |
@@ -9,7 +9,6 b'' | |||
|
9 | 9 | from .i18n import _ |
|
10 | 10 | from . import ( |
|
11 | 11 | mdiff, |
|
12 | pycompat, | |
|
13 | 12 | ) |
|
14 | 13 | |
|
15 | 14 | |
@@ -97,7 +96,7 b' def _findsimilarmatches(repo, added, rem' | |||
|
97 | 96 | copies[a] = (r, myscore) |
|
98 | 97 | progress.complete() |
|
99 | 98 | |
|
100 |
for dest, v in |
|
|
99 | for dest, v in copies.items(): | |
|
101 | 100 | source, bscore = v |
|
102 | 101 | yield source, dest, bscore |
|
103 | 102 |
@@ -554,7 +554,7 b' def refreshwdir(repo, origstatus, origsp' | |||
|
554 | 554 | ) |
|
555 | 555 | |
|
556 | 556 | # Check for files that were only in the dirstate. |
|
557 |
for file, state in |
|
|
557 | for file, state in dirstate.items(): | |
|
558 | 558 | if not file in files: |
|
559 | 559 | old = origsparsematch(file) |
|
560 | 560 | new = sparsematch(file) |
@@ -472,10 +472,10 b' class sshv1peer(wireprotov1peer.wirepeer' | |||
|
472 | 472 | else: |
|
473 | 473 | wireargs[k] = args[k] |
|
474 | 474 | del args[k] |
|
475 |
for k, v in sorted( |
|
|
475 | for k, v in sorted(wireargs.items()): | |
|
476 | 476 | self._pipeo.write(b"%s %d\n" % (k, len(v))) |
|
477 | 477 | if isinstance(v, dict): |
|
478 |
for dk, dv in |
|
|
478 | for dk, dv in v.items(): | |
|
479 | 479 | self._pipeo.write(b"%s %d\n" % (dk, len(dv))) |
|
480 | 480 | self._pipeo.write(dv) |
|
481 | 481 | else: |
@@ -573,7 +573,7 b' def display_by_method(data, fp):' | |||
|
573 | 573 | |
|
574 | 574 | # compute sums for each function |
|
575 | 575 | functiondata = [] |
|
576 |
for fname, sitestats in |
|
|
576 | for fname, sitestats in grouped.items(): | |
|
577 | 577 | total_cum_sec = 0 |
|
578 | 578 | total_self_sec = 0 |
|
579 | 579 | total_percent = 0 |
@@ -652,7 +652,7 b' def display_about_method(data, fp, funct' | |||
|
652 | 652 | else: |
|
653 | 653 | children[site] = 1 |
|
654 | 654 | |
|
655 |
parents = [(parent, count) for parent, count in |
|
|
655 | parents = [(parent, count) for parent, count in parents.items()] | |
|
656 | 656 | parents.sort(reverse=True, key=lambda x: x[1]) |
|
657 | 657 | for parent, count in parents: |
|
658 | 658 | fp.write( |
@@ -696,7 +696,7 b' def display_about_method(data, fp, funct' | |||
|
696 | 696 | ) |
|
697 | 697 | ) |
|
698 | 698 | |
|
699 |
children = [(child, count) for child, count in |
|
|
699 | children = [(child, count) for child, count in children.items()] | |
|
700 | 700 | children.sort(reverse=True, key=lambda x: x[1]) |
|
701 | 701 | for child, count in children: |
|
702 | 702 | fp.write( |
@@ -827,7 +827,7 b' def write_to_flame(data, fp, scriptpath=' | |||
|
827 | 827 | fd, path = pycompat.mkstemp() |
|
828 | 828 | |
|
829 | 829 | with open(path, b"w+") as file: |
|
830 |
for line, count in |
|
|
830 | for line, count in lines.items(): | |
|
831 | 831 | file.write(b"%s %d\n" % (line, count)) |
|
832 | 832 | |
|
833 | 833 | if outputfile is None: |
@@ -144,7 +144,7 b' def _buildencodefun():' | |||
|
144 | 144 | cmap[xchr(x)] = e + xchr(x).lower() |
|
145 | 145 | |
|
146 | 146 | dmap = {} |
|
147 |
for k, v in |
|
|
147 | for k, v in cmap.items(): | |
|
148 | 148 | dmap[v] = k |
|
149 | 149 | |
|
150 | 150 | def decode(s): |
@@ -193,7 +193,7 b' def debugstrip(ui, repo, *revs, **opts):' | |||
|
193 | 193 | # a revision we have to only delete the bookmark and not strip |
|
194 | 194 | # anything. revsets cannot detect that case. |
|
195 | 195 | nodetobookmarks = {} |
|
196 |
for mark, node in |
|
|
196 | for mark, node in repomarks.items(): | |
|
197 | 197 | nodetobookmarks.setdefault(node, []).append(mark) |
|
198 | 198 | for marks in nodetobookmarks.values(): |
|
199 | 199 | if bookmarks.issuperset(marks): |
@@ -1770,7 +1770,7 b' class gitsubrepo(abstractsubrepo):' | |||
|
1770 | 1770 | for b in rev2branch[self._state[1]]: |
|
1771 | 1771 | if b.startswith(b'refs/remotes/origin/'): |
|
1772 | 1772 | return True |
|
1773 |
for b, revision in |
|
|
1773 | for b, revision in branch2rev.items(): | |
|
1774 | 1774 | if b.startswith(b'refs/remotes/origin/'): |
|
1775 | 1775 | if self._gitisancestor(self._state[1], revision): |
|
1776 | 1776 | return True |
@@ -190,7 +190,7 b' def submerge(repo, wctx, mctx, actx, ove' | |||
|
190 | 190 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) |
|
191 | 191 | |
|
192 | 192 | promptssrc = filemerge.partextras(labels) |
|
193 |
for s, l in sorted( |
|
|
193 | for s, l in sorted(s1.items()): | |
|
194 | 194 | a = sa.get(s, nullstate) |
|
195 | 195 | ld = l # local state with possible dirty flag for compares |
|
196 | 196 | if wctx.sub(s).dirty(): |
@@ -25,7 +25,6 b' from . import (' | |||
|
25 | 25 | encoding, |
|
26 | 26 | error, |
|
27 | 27 | match as matchmod, |
|
28 | pycompat, | |
|
29 | 28 | scmutil, |
|
30 | 29 | util, |
|
31 | 30 | ) |
@@ -354,7 +353,7 b' def _updatetags(filetags, alltags, tagty' | |||
|
354 | 353 | if tagtype is None: |
|
355 | 354 | assert tagtypes is None |
|
356 | 355 | |
|
357 |
for name, nodehist in |
|
|
356 | for name, nodehist in filetags.items(): | |
|
358 | 357 | if name not in alltags: |
|
359 | 358 | alltags[name] = nodehist |
|
360 | 359 | if tagtype is not None: |
@@ -507,7 +506,7 b' def _getfnodes(ui, repo, nodes):' | |||
|
507 | 506 | |
|
508 | 507 | if unknown_entries: |
|
509 | 508 | fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries) |
|
510 |
for node, fnode in |
|
|
509 | for node, fnode in fixed_nodemap.items(): | |
|
511 | 510 | if fnode != repo.nullid: |
|
512 | 511 | cachefnode[node] = fnode |
|
513 | 512 | |
@@ -549,7 +548,7 b' def _writetagcache(ui, repo, valid, cach' | |||
|
549 | 548 | # we keep them in UTF-8 throughout this module. If we converted |
|
550 | 549 | # them local encoding on input, we would lose info writing them to |
|
551 | 550 | # the cache. |
|
552 |
for (name, (node, hist)) in sorted( |
|
|
551 | for (name, (node, hist)) in sorted(cachetags.items()): | |
|
553 | 552 | for n in hist: |
|
554 | 553 | cachefile.write(b"%s %s\n" % (hex(n), name)) |
|
555 | 554 | cachefile.write(b"%s %s\n" % (hex(node), name)) |
@@ -346,7 +346,7 b' def json(obj, paranoid=True):' | |||
|
346 | 346 | out = [ |
|
347 | 347 | b'"%s": %s' |
|
348 | 348 | % (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid)) |
|
349 |
for k, v in sorted( |
|
|
349 | for k, v in sorted(obj.items()) | |
|
350 | 350 | ] |
|
351 | 351 | return b'{' + b', '.join(out) + b'}' |
|
352 | 352 | elif util.safehasattr(obj, b'__iter__'): |
@@ -548,7 +548,7 b' def websub(text, websubtable):' | |||
|
548 | 548 | |
|
549 | 549 | def loadfilter(ui, extname, registrarobj): |
|
550 | 550 | """Load template filter from specified registrarobj""" |
|
551 |
for name, func in |
|
|
551 | for name, func in registrarobj._table.items(): | |
|
552 | 552 | filters[name] = func |
|
553 | 553 | |
|
554 | 554 |
@@ -910,7 +910,7 b' def word(context, mapping, args):' | |||
|
910 | 910 | |
|
911 | 911 | def loadfunction(ui, extname, registrarobj): |
|
912 | 912 | """Load template function from specified registrarobj""" |
|
913 |
for name, func in |
|
|
913 | for name, func in registrarobj._table.items(): | |
|
914 | 914 | funcs[name] = func |
|
915 | 915 | |
|
916 | 916 |
@@ -602,7 +602,7 b' def shownamespaces(context, mapping):' | |||
|
602 | 602 | # 'name' for iterating over namespaces, templatename for local reference |
|
603 | 603 | return lambda v: {b'name': v, ns.templatename: v} |
|
604 | 604 | |
|
605 |
for k, ns in |
|
|
605 | for k, ns in repo.names.items(): | |
|
606 | 606 | names = ns.names(repo, ctx.node()) |
|
607 | 607 | f = _showcompatlist(context, mapping, b'name', names) |
|
608 | 608 | namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity) |
@@ -690,7 +690,7 b' def showpeerurls(context, mapping):' | |||
|
690 | 690 | d.update(sub_opts) |
|
691 | 691 | path_dict = util.sortdict() |
|
692 | 692 | for p in ps: |
|
693 |
sub_opts = util.sortdict(sorted( |
|
|
693 | sub_opts = util.sortdict(sorted(p.suboptions.items())) | |
|
694 | 694 | path_dict[b'url'] = p.rawloc |
|
695 | 695 | path_dict.update(sub_opts) |
|
696 | 696 | d[b'urls'] = [path_dict] |
@@ -1023,7 +1023,7 b' def showwhyunstable(context, mapping):' | |||
|
1023 | 1023 | |
|
1024 | 1024 | def loadkeyword(ui, extname, registrarobj): |
|
1025 | 1025 | """Load template keyword from specified registrarobj""" |
|
1026 |
for name, func in |
|
|
1026 | for name, func in registrarobj._table.items(): | |
|
1027 | 1027 | keywords[name] = func |
|
1028 | 1028 | |
|
1029 | 1029 |
@@ -530,8 +530,7 b' def _buildfuncargs(exp, context, curmeth' | |||
|
530 | 530 | |
|
531 | 531 | def compiledict(xs): |
|
532 | 532 | return util.sortdict( |
|
533 | (k, compileexp(x, context, curmethods)) | |
|
534 | for k, x in pycompat.iteritems(xs) | |
|
533 | (k, compileexp(x, context, curmethods)) for k, x in xs.items() | |
|
535 | 534 | ) |
|
536 | 535 | |
|
537 | 536 | def compilelist(xs): |
@@ -708,7 +707,7 b' class engine(object):' | |||
|
708 | 707 | newres = self._resources.availablekeys(newmapping) |
|
709 | 708 | mapping = { |
|
710 | 709 | k: v |
|
711 |
for k, v in |
|
|
710 | for k, v in origmapping.items() | |
|
712 | 711 | if ( |
|
713 | 712 | k in knownres # not a symbol per self.symbol() |
|
714 | 713 | or newres.isdisjoint(self._defaultrequires(k)) |
@@ -310,7 +310,7 b' class hybrid(wrapped):' | |||
|
310 | 310 | if util.safehasattr(self._values, b'get'): |
|
311 | 311 | values = { |
|
312 | 312 | k: v |
|
313 |
for k, v in |
|
|
313 | for k, v in self._values.items() | |
|
314 | 314 | if select(self._wrapvalue(k, v)) |
|
315 | 315 | } |
|
316 | 316 | else: |
@@ -342,10 +342,7 b' class hybrid(wrapped):' | |||
|
342 | 342 | # TODO: make it non-recursive for trivial lists/dicts |
|
343 | 343 | xs = self._values |
|
344 | 344 | if util.safehasattr(xs, b'get'): |
|
345 | return { | |
|
346 | k: unwrapvalue(context, mapping, v) | |
|
347 | for k, v in pycompat.iteritems(xs) | |
|
348 | } | |
|
345 | return {k: unwrapvalue(context, mapping, v) for k, v in xs.items()} | |
|
349 | 346 | return [unwrapvalue(context, mapping, x) for x in xs] |
|
350 | 347 | |
|
351 | 348 | |
@@ -537,7 +534,7 b' class _mappingsequence(wrapped):' | |||
|
537 | 534 | items.append( |
|
538 | 535 | { |
|
539 | 536 | k: unwrapvalue(context, lm, v) |
|
540 |
for k, v in |
|
|
537 | for k, v in nm.items() | |
|
541 | 538 | if k not in knownres |
|
542 | 539 | } |
|
543 | 540 | ) |
@@ -715,7 +712,7 b' def compatdict(' | |||
|
715 | 712 | This exists for backward compatibility with the old-style template. Use |
|
716 | 713 | hybriddict() for new template keywords. |
|
717 | 714 | """ |
|
718 |
c = [{key: k, value: v} for k, v in |
|
|
715 | c = [{key: k, value: v} for k, v in data.items()] | |
|
719 | 716 | f = _showcompatlist(context, mapping, name, c, plural, separator) |
|
720 | 717 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) |
|
721 | 718 |
@@ -382,7 +382,7 b' class transaction(util.transactional):' | |||
|
382 | 382 | skip_pre = group == GEN_GROUP_POST_FINALIZE |
|
383 | 383 | skip_post = group == GEN_GROUP_PRE_FINALIZE |
|
384 | 384 | |
|
385 |
for id, entry in sorted( |
|
|
385 | for id, entry in sorted(self._filegenerators.items()): | |
|
386 | 386 | any = True |
|
387 | 387 | order, filenames, genfunc, location, post_finalize = entry |
|
388 | 388 |
@@ -240,7 +240,7 b' def _generic_proxytunnel(self):' | |||
|
240 | 240 | if x.lower().startswith('proxy-') |
|
241 | 241 | } |
|
242 | 242 | self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport) |
|
243 |
for header in p |
|
|
243 | for header in proxyheaders.items(): | |
|
244 | 244 | self.send(b'%s: %s\r\n' % header) |
|
245 | 245 | self.send(b'\r\n') |
|
246 | 246 |
@@ -1301,7 +1301,7 b' class sortdict(collections.OrderedDict):' | |||
|
1301 | 1301 | # __setitem__() isn't called as of PyPy 5.8.0 |
|
1302 | 1302 | def update(self, src, **f): |
|
1303 | 1303 | if isinstance(src, dict): |
|
1304 |
src = |
|
|
1304 | src = src.items() | |
|
1305 | 1305 | for k, v in src: |
|
1306 | 1306 | self[k] = v |
|
1307 | 1307 | for k in f: |
@@ -174,9 +174,7 b' def streamencodemap(d):' | |||
|
174 | 174 | """ |
|
175 | 175 | yield encodelength(MAJOR_TYPE_MAP, len(d)) |
|
176 | 176 | |
|
177 | for key, value in sorted( | |
|
178 | pycompat.iteritems(d), key=lambda x: _mixedtypesortkey(x[0]) | |
|
179 | ): | |
|
177 | for key, value in sorted(d.items(), key=lambda x: _mixedtypesortkey(x[0])): | |
|
180 | 178 | for chunk in streamencode(key): |
|
181 | 179 | yield chunk |
|
182 | 180 | for chunk in streamencode(value): |
@@ -342,7 +342,7 b' def tempfilter(s, cmd):' | |||
|
342 | 342 | def filter(s, cmd): |
|
343 | 343 | """filter a string through a command that transforms its input to its |
|
344 | 344 | output""" |
|
345 |
for name, fn in |
|
|
345 | for name, fn in _filtertable.items(): | |
|
346 | 346 | if cmd.startswith(name): |
|
347 | 347 | return fn(s, cmd[len(name) :].lstrip()) |
|
348 | 348 | return pipefilter(s, cmd) |
@@ -448,7 +448,7 b' def shellenviron(environ=None):' | |||
|
448 | 448 | |
|
449 | 449 | env = dict(encoding.environ) |
|
450 | 450 | if environ: |
|
451 |
env.update((k, py2shell(v)) for k, v in |
|
|
451 | env.update((k, py2shell(v)) for k, v in environ.items()) | |
|
452 | 452 | env[b'HG'] = hgexecutable() |
|
453 | 453 | return env |
|
454 | 454 |
@@ -453,7 +453,7 b' def list_paths(ui, target_path=None):' | |||
|
453 | 453 | """list all the (name, paths) in the passed ui""" |
|
454 | 454 | result = [] |
|
455 | 455 | if target_path is None: |
|
456 |
for name, paths in sorted( |
|
|
456 | for name, paths in sorted(ui.paths.items()): | |
|
457 | 457 | for p in paths: |
|
458 | 458 | result.append((name, p)) |
|
459 | 459 | |
@@ -919,7 +919,7 b' class path(object):' | |||
|
919 | 919 | # Now process the sub-options. If a sub-option is registered, its |
|
920 | 920 | # attribute will always be present. The value will be None if there |
|
921 | 921 | # was no valid sub-option. |
|
922 |
for suboption, (attr, func) in |
|
|
922 | for suboption, (attr, func) in _pathsuboptions.items(): | |
|
923 | 923 | if suboption not in sub_options: |
|
924 | 924 | setattr(self, attr, None) |
|
925 | 925 | continue |
@@ -945,7 +945,7 b' class path(object):' | |||
|
945 | 945 | This is intended to be used for presentation purposes. |
|
946 | 946 | """ |
|
947 | 947 | d = {} |
|
948 |
for subopt, (attr, _func) in |
|
|
948 | for subopt, (attr, _func) in _pathsuboptions.items(): | |
|
949 | 949 | value = getattr(self, attr) |
|
950 | 950 | if value is not None: |
|
951 | 951 | d[subopt] = value |
@@ -405,11 +405,11 b' class verifier(object):' | |||
|
405 | 405 | _(b'checking'), unit=_(b'manifests'), total=len(subdirs) |
|
406 | 406 | ) |
|
407 | 407 | |
|
408 |
for subdir, linkrevs in |
|
|
408 | for subdir, linkrevs in subdirnodes.items(): | |
|
409 | 409 | subdirfilenodes = self._verifymanifest( |
|
410 | 410 | linkrevs, subdir, storefiles, subdirprogress |
|
411 | 411 | ) |
|
412 |
for f, onefilenodes in |
|
|
412 | for f, onefilenodes in subdirfilenodes.items(): | |
|
413 | 413 | filenodes.setdefault(f, {}).update(onefilenodes) |
|
414 | 414 | |
|
415 | 415 | if not dir and subdirnodes: |
@@ -122,7 +122,7 b" ARGUMENT_RECORD_HEADER = struct.Struct('" | |||
|
122 | 122 | |
|
123 | 123 | def humanflags(mapping, value): |
|
124 | 124 | """Convert a numeric flags value to a human value, using a mapping table.""" |
|
125 |
namemap = {v: k for k, v in |
|
|
125 | namemap = {v: k for k, v in mapping.items()} | |
|
126 | 126 | flags = [] |
|
127 | 127 | val = 1 |
|
128 | 128 | while value >= val: |
@@ -159,7 +159,7 b' class frame(object):' | |||
|
159 | 159 | @encoding.strmethod |
|
160 | 160 | def __repr__(self): |
|
161 | 161 | typename = b'<unknown 0x%02x>' % self.typeid |
|
162 |
for name, value in |
|
|
162 | for name, value in FRAME_TYPES.items(): | |
|
163 | 163 | if value == self.typeid: |
|
164 | 164 | typename = name |
|
165 | 165 | break |
@@ -80,8 +80,7 b' def encodebatchcmds(req):' | |||
|
80 | 80 | assert all(escapearg(k) == k for k in argsdict) |
|
81 | 81 | |
|
82 | 82 | args = b','.join( |
|
83 | b'%s=%s' % (escapearg(k), escapearg(v)) | |
|
84 | for k, v in pycompat.iteritems(argsdict) | |
|
83 | b'%s=%s' % (escapearg(k), escapearg(v)) for k, v in argsdict.items() | |
|
85 | 84 | ) |
|
86 | 85 | cmds.append(b'%s %s' % (op, args)) |
|
87 | 86 | |
@@ -438,7 +437,7 b' class wirepeer(repository.peer):' | |||
|
438 | 437 | self.requirecap(b'getbundle', _(b'look up remote changes')) |
|
439 | 438 | opts = {} |
|
440 | 439 | bundlecaps = kwargs.get(b'bundlecaps') or set() |
|
441 |
for key, value in |
|
|
440 | for key, value in kwargs.items(): | |
|
442 | 441 | if value is None: |
|
443 | 442 | continue |
|
444 | 443 | keytype = wireprototypes.GETBUNDLE_ARGUMENTS.get(key) |
@@ -235,7 +235,7 b' def between(repo, proto, pairs):' | |||
|
235 | 235 | def branchmap(repo, proto): |
|
236 | 236 | branchmap = repo.branchmap() |
|
237 | 237 | heads = [] |
|
238 |
for branch, nodes in |
|
|
238 | for branch, nodes in branchmap.items(): | |
|
239 | 239 | branchname = urlreq.quote(encoding.fromlocal(branch)) |
|
240 | 240 | branchnodes = wireprototypes.encodelist(nodes) |
|
241 | 241 | heads.append(b'%s %s' % (branchname, branchnodes)) |
@@ -432,7 +432,7 b' def getbundle(repo, proto, others):' | |||
|
432 | 432 | opts = options( |
|
433 | 433 | b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others |
|
434 | 434 | ) |
|
435 |
for k, v in |
|
|
435 | for k, v in opts.items(): | |
|
436 | 436 | keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k] |
|
437 | 437 | if keytype == b'nodes': |
|
438 | 438 | opts[k] = wireprototypes.decodelist(v) |
General Comments 0
You need to be logged in to leave comments.
Login now