Show More
@@ -733,7 +733,7 b' class fixupstate(object):' | |||||
733 |
|
733 | |||
734 | def apply(self): |
|
734 | def apply(self): | |
735 | """apply fixups to individual filefixupstates""" |
|
735 | """apply fixups to individual filefixupstates""" | |
736 |
for path, state in |
|
736 | for path, state in self.fixupmap.items(): | |
737 | if self.ui.debugflag: |
|
737 | if self.ui.debugflag: | |
738 | self.ui.write(_(b'applying fixups to %s\n') % path) |
|
738 | self.ui.write(_(b'applying fixups to %s\n') % path) | |
739 | state.apply() |
|
739 | state.apply() | |
@@ -741,10 +741,7 b' class fixupstate(object):' | |||||
741 | @property |
|
741 | @property | |
742 | def chunkstats(self): |
|
742 | def chunkstats(self): | |
743 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" |
|
743 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" | |
744 | return { |
|
744 | return {path: state.chunkstats for path, state in self.fixupmap.items()} | |
745 | path: state.chunkstats |
|
|||
746 | for path, state in pycompat.iteritems(self.fixupmap) |
|
|||
747 | } |
|
|||
748 |
|
745 | |||
749 | def commit(self): |
|
746 | def commit(self): | |
750 | """commit changes. update self.finalnode, self.replacemap""" |
|
747 | """commit changes. update self.finalnode, self.replacemap""" | |
@@ -762,7 +759,7 b' class fixupstate(object):' | |||||
762 | chunkstats = self.chunkstats |
|
759 | chunkstats = self.chunkstats | |
763 | if ui.verbose: |
|
760 | if ui.verbose: | |
764 | # chunkstats for each file |
|
761 | # chunkstats for each file | |
765 |
for path, stat in |
|
762 | for path, stat in chunkstats.items(): | |
766 | if stat[0]: |
|
763 | if stat[0]: | |
767 | ui.write( |
|
764 | ui.write( | |
768 | _(b'%s: %d of %d chunk(s) applied\n') |
|
765 | _(b'%s: %d of %d chunk(s) applied\n') | |
@@ -845,7 +842,7 b' class fixupstate(object):' | |||||
845 | repo = self.repo |
|
842 | repo = self.repo | |
846 | needupdate = [ |
|
843 | needupdate = [ | |
847 | (name, self.replacemap[hsh]) |
|
844 | (name, self.replacemap[hsh]) | |
848 |
for name, hsh in |
|
845 | for name, hsh in repo._bookmarks.items() | |
849 | if hsh in self.replacemap |
|
846 | if hsh in self.replacemap | |
850 | ] |
|
847 | ] | |
851 | changes = [] |
|
848 | changes = [] | |
@@ -908,7 +905,7 b' class fixupstate(object):' | |||||
908 | # ctx changes more files (not a subset of memworkingcopy) |
|
905 | # ctx changes more files (not a subset of memworkingcopy) | |
909 | if not set(ctx.files()).issubset(set(memworkingcopy)): |
|
906 | if not set(ctx.files()).issubset(set(memworkingcopy)): | |
910 | return False |
|
907 | return False | |
911 |
for path, content in |
|
908 | for path, content in memworkingcopy.items(): | |
912 | if path not in pctx or path not in ctx: |
|
909 | if path not in pctx or path not in ctx: | |
913 | return False |
|
910 | return False | |
914 | fctx = ctx[path] |
|
911 | fctx = ctx[path] | |
@@ -951,7 +948,7 b' class fixupstate(object):' | |||||
951 | def _cleanupoldcommits(self): |
|
948 | def _cleanupoldcommits(self): | |
952 | replacements = { |
|
949 | replacements = { | |
953 | k: ([v] if v is not None else []) |
|
950 | k: ([v] if v is not None else []) | |
954 |
for k, v in |
|
951 | for k, v in self.replacemap.items() | |
955 | } |
|
952 | } | |
956 | if replacements: |
|
953 | if replacements: | |
957 | scmutil.cleanupnodes( |
|
954 | scmutil.cleanupnodes( | |
@@ -1001,7 +998,7 b' def overlaydiffcontext(ctx, chunks):' | |||||
1001 | if not path or not info: |
|
998 | if not path or not info: | |
1002 | continue |
|
999 | continue | |
1003 | patchmap[path].append(info) |
|
1000 | patchmap[path].append(info) | |
1004 |
for path, patches in p |
|
1001 | for path, patches in patchmap.items(): | |
1005 | if path not in ctx or not patches: |
|
1002 | if path not in ctx or not patches: | |
1006 | continue |
|
1003 | continue | |
1007 | patches.sort(reverse=True) |
|
1004 | patches.sort(reverse=True) |
@@ -15,7 +15,6 b' from mercurial.i18n import _' | |||||
15 | from mercurial import ( |
|
15 | from mercurial import ( | |
16 | demandimport, |
|
16 | demandimport, | |
17 | error, |
|
17 | error, | |
18 | pycompat, |
|
|||
19 | util, |
|
18 | util, | |
20 | ) |
|
19 | ) | |
21 | from . import common |
|
20 | from . import common | |
@@ -209,7 +208,7 b' class bzr_source(common.converter_source' | |||||
209 | if not branch.supports_tags(): |
|
208 | if not branch.supports_tags(): | |
210 | return {} |
|
209 | return {} | |
211 | tagdict = branch.tags.get_tag_dict() |
|
210 | tagdict = branch.tags.get_tag_dict() | |
212 |
for name, rev in |
|
211 | for name, rev in tagdict.items(): | |
213 | bytetags[self.recode(name)] = rev |
|
212 | bytetags[self.recode(name)] = rev | |
214 | return bytetags |
|
213 | return bytetags | |
215 |
|
214 |
@@ -417,7 +417,7 b' class commandline(object):' | |||||
417 | def _cmdline(self, cmd, *args, **kwargs): |
|
417 | def _cmdline(self, cmd, *args, **kwargs): | |
418 | kwargs = pycompat.byteskwargs(kwargs) |
|
418 | kwargs = pycompat.byteskwargs(kwargs) | |
419 | cmdline = [self.command, cmd] + list(args) |
|
419 | cmdline = [self.command, cmd] + list(args) | |
420 |
for k, v in |
|
420 | for k, v in kwargs.items(): | |
421 | if len(k) == 1: |
|
421 | if len(k) == 1: | |
422 | cmdline.append(b'-' + k) |
|
422 | cmdline.append(b'-' + k) | |
423 | else: |
|
423 | else: |
@@ -584,9 +584,7 b' class converter(object):' | |||||
584 | # write another hash correspondence to override the |
|
584 | # write another hash correspondence to override the | |
585 | # previous one so we don't end up with extra tag heads |
|
585 | # previous one so we don't end up with extra tag heads | |
586 | tagsparents = [ |
|
586 | tagsparents = [ | |
587 | e |
|
587 | e for e in self.map.items() if e[1] == tagsparent | |
588 | for e in pycompat.iteritems(self.map) |
|
|||
589 | if e[1] == tagsparent |
|
|||
590 | ] |
|
588 | ] | |
591 | if tagsparents: |
|
589 | if tagsparents: | |
592 | self.map[tagsparents[0][0]] = nrev |
|
590 | self.map[tagsparents[0][0]] = nrev |
@@ -466,7 +466,7 b' def createlog(ui, directory=None, root=b' | |||||
466 |
|
466 | |||
467 | # find the branches starting from this revision |
|
467 | # find the branches starting from this revision | |
468 | branchpoints = set() |
|
468 | branchpoints = set() | |
469 |
for branch, revision in |
|
469 | for branch, revision in branchmap.items(): | |
470 | revparts = tuple([int(i) for i in revision.split(b'.')]) |
|
470 | revparts = tuple([int(i) for i in revision.split(b'.')]) | |
471 | if len(revparts) < 2: # bad tags |
|
471 | if len(revparts) < 2: # bad tags | |
472 | continue |
|
472 | continue |
@@ -125,7 +125,7 b' class filemapper(object):' | |||||
125 | repo belong to the source repo and what parts don't.""" |
|
125 | repo belong to the source repo and what parts don't.""" | |
126 | if self.targetprefixes is None: |
|
126 | if self.targetprefixes is None: | |
127 | self.targetprefixes = set() |
|
127 | self.targetprefixes = set() | |
128 |
for before, after in |
|
128 | for before, after in self.rename.items(): | |
129 | self.targetprefixes.add(after) |
|
129 | self.targetprefixes.add(after) | |
130 |
|
130 | |||
131 | # If "." is a target, then all target files are considered from the |
|
131 | # If "." is a target, then all target files are considered from the |
@@ -138,7 +138,7 b' class mercurial_sink(common.converter_si' | |||||
138 |
|
138 | |||
139 | if missings: |
|
139 | if missings: | |
140 | self.after() |
|
140 | self.after() | |
141 |
for pbranch, heads in sorted( |
|
141 | for pbranch, heads in sorted(missings.items()): | |
142 | pbranchpath = os.path.join(self.path, pbranch) |
|
142 | pbranchpath = os.path.join(self.path, pbranch) | |
143 | prepo = hg.peer(self.ui, {}, pbranchpath) |
|
143 | prepo = hg.peer(self.ui, {}, pbranchpath) | |
144 | self.ui.note( |
|
144 | self.ui.note( | |
@@ -595,7 +595,7 b' class mercurial_source(common.converter_' | |||||
595 | maappend = ma.append |
|
595 | maappend = ma.append | |
596 | rappend = r.append |
|
596 | rappend = r.append | |
597 | d = ctx1.manifest().diff(ctx2.manifest()) |
|
597 | d = ctx1.manifest().diff(ctx2.manifest()) | |
598 |
for f, ((node1, flag1), (node2, flag2)) in |
|
598 | for f, ((node1, flag1), (node2, flag2)) in d.items(): | |
599 | if node2 is None: |
|
599 | if node2 is None: | |
600 | rappend(f) |
|
600 | rappend(f) | |
601 | else: |
|
601 | else: | |
@@ -621,7 +621,7 b' class mercurial_source(common.converter_' | |||||
621 | cleanp2 = set() |
|
621 | cleanp2 = set() | |
622 | if len(parents) == 2: |
|
622 | if len(parents) == 2: | |
623 | d = parents[1].manifest().diff(ctx.manifest(), clean=True) |
|
623 | d = parents[1].manifest().diff(ctx.manifest(), clean=True) | |
624 |
for f, value in |
|
624 | for f, value in d.items(): | |
625 | if value is None: |
|
625 | if value is None: | |
626 | cleanp2.add(f) |
|
626 | cleanp2.add(f) | |
627 | changes = [(f, rev) for f in files if f not in self.ignored] |
|
627 | changes = [(f, rev) for f in files if f not in self.ignored] |
@@ -102,7 +102,7 b' class monotone_source(common.converter_s' | |||||
102 | # Prepare the command in automate stdio format |
|
102 | # Prepare the command in automate stdio format | |
103 | kwargs = pycompat.byteskwargs(kwargs) |
|
103 | kwargs = pycompat.byteskwargs(kwargs) | |
104 | command = [] |
|
104 | command = [] | |
105 |
for k, v in |
|
105 | for k, v in kwargs.items(): | |
106 | command.append(b"%d:%s" % (len(k), k)) |
|
106 | command.append(b"%d:%s" % (len(k), k)) | |
107 | if v: |
|
107 | if v: | |
108 | command.append(b"%d:%s" % (len(v), v)) |
|
108 | command.append(b"%d:%s" % (len(v), v)) |
@@ -202,7 +202,7 b' def get_log_child(' | |||||
202 | def receiver(orig_paths, revnum, author, date, message, pool): |
|
202 | def receiver(orig_paths, revnum, author, date, message, pool): | |
203 | paths = {} |
|
203 | paths = {} | |
204 | if orig_paths is not None: |
|
204 | if orig_paths is not None: | |
205 |
for k, v in |
|
205 | for k, v in orig_paths.items(): | |
206 | paths[k] = changedpath(v) |
|
206 | paths[k] = changedpath(v) | |
207 | pickle.dump((paths, revnum, author, date, message), fp, protocol) |
|
207 | pickle.dump((paths, revnum, author, date, message), fp, protocol) | |
208 |
|
208 | |||
@@ -297,7 +297,7 b' class directlogstream(list):' | |||||
297 | def receiver(orig_paths, revnum, author, date, message, pool): |
|
297 | def receiver(orig_paths, revnum, author, date, message, pool): | |
298 | paths = {} |
|
298 | paths = {} | |
299 | if orig_paths is not None: |
|
299 | if orig_paths is not None: | |
300 |
for k, v in |
|
300 | for k, v in orig_paths.items(): | |
301 | paths[k] = changedpath(v) |
|
301 | paths[k] = changedpath(v) | |
302 | self.append((paths, revnum, author, date, message)) |
|
302 | self.append((paths, revnum, author, date, message)) | |
303 |
|
303 | |||
@@ -729,7 +729,7 b' class svn_source(converter_source):' | |||||
729 | ) |
|
729 | ) | |
730 | files = [ |
|
730 | files = [ | |
731 | n |
|
731 | n | |
732 |
for n, e in |
|
732 | for n, e in entries.items() | |
733 | if e.kind == svn.core.svn_node_file |
|
733 | if e.kind == svn.core.svn_node_file | |
734 | ] |
|
734 | ] | |
735 | self.removed = set() |
|
735 | self.removed = set() | |
@@ -819,7 +819,7 b' class svn_source(converter_source):' | |||||
819 | origpaths = [] |
|
819 | origpaths = [] | |
820 | copies = [ |
|
820 | copies = [ | |
821 | (e.copyfrom_path, e.copyfrom_rev, p) |
|
821 | (e.copyfrom_path, e.copyfrom_rev, p) | |
822 |
for p, e in |
|
822 | for p, e in origpaths.items() | |
823 | if e.copyfrom_path |
|
823 | if e.copyfrom_path | |
824 | ] |
|
824 | ] | |
825 | # Apply moves/copies from more specific to general |
|
825 | # Apply moves/copies from more specific to general | |
@@ -850,7 +850,7 b' class svn_source(converter_source):' | |||||
850 | # be represented in mercurial. |
|
850 | # be represented in mercurial. | |
851 | addeds = { |
|
851 | addeds = { | |
852 | p: e.copyfrom_path |
|
852 | p: e.copyfrom_path | |
853 |
for p, e in |
|
853 | for p, e in origpaths.items() | |
854 | if e.action == b'A' and e.copyfrom_path |
|
854 | if e.action == b'A' and e.copyfrom_path | |
855 | } |
|
855 | } | |
856 | badroots = set() |
|
856 | badroots = set() | |
@@ -1139,7 +1139,7 b' class svn_source(converter_source):' | |||||
1139 | parents = [] |
|
1139 | parents = [] | |
1140 | # check whether this revision is the start of a branch or part |
|
1140 | # check whether this revision is the start of a branch or part | |
1141 | # of a branch renaming |
|
1141 | # of a branch renaming | |
1142 |
orig_paths = sorted( |
|
1142 | orig_paths = sorted(orig_paths.items()) | |
1143 | root_paths = [ |
|
1143 | root_paths = [ | |
1144 | (p, e) for p, e in orig_paths if self.module.startswith(p) |
|
1144 | (p, e) for p, e in orig_paths if self.module.startswith(p) | |
1145 | ] |
|
1145 | ] | |
@@ -1301,7 +1301,7 b' class svn_source(converter_source):' | |||||
1301 | path += b'/' |
|
1301 | path += b'/' | |
1302 | return ( |
|
1302 | return ( | |
1303 | (path + p) |
|
1303 | (path + p) | |
1304 |
for p, e in |
|
1304 | for p, e in entries.items() | |
1305 | if e.kind == svn.core.svn_node_file |
|
1305 | if e.kind == svn.core.svn_node_file | |
1306 | ) |
|
1306 | ) | |
1307 |
|
1307 |
@@ -378,7 +378,7 b' def reposetup(ui, repo):' | |||||
378 |
|
378 | |||
379 | if not repo.local(): |
|
379 | if not repo.local(): | |
380 | return |
|
380 | return | |
381 |
for name, fn in |
|
381 | for name, fn in filters.items(): | |
382 | repo.adddatafilter(name, fn) |
|
382 | repo.adddatafilter(name, fn) | |
383 |
|
383 | |||
384 | ui.setconfig(b'patch', b'eol', b'auto', b'eol') |
|
384 | ui.setconfig(b'patch', b'eol', b'auto', b'eol') |
@@ -174,7 +174,7 b' class annotateopts(object):' | |||||
174 |
|
174 | |||
175 | def __init__(self, **opts): |
|
175 | def __init__(self, **opts): | |
176 | opts = pycompat.byteskwargs(opts) |
|
176 | opts = pycompat.byteskwargs(opts) | |
177 |
for k, v in |
|
177 | for k, v in self.defaults.items(): | |
178 | setattr(self, k, opts.get(k, v)) |
|
178 | setattr(self, k, opts.get(k, v)) | |
179 |
|
179 | |||
180 | @util.propertycache |
|
180 | @util.propertycache | |
@@ -583,7 +583,7 b' class _annotatecontext(object):' | |||||
583 | # find an unresolved line and its linelog rev to annotate |
|
583 | # find an unresolved line and its linelog rev to annotate | |
584 | hsh = None |
|
584 | hsh = None | |
585 | try: |
|
585 | try: | |
586 |
for (rev, _linenum), idxs in |
|
586 | for (rev, _linenum), idxs in key2idxs.items(): | |
587 | if revmap.rev2flag(rev) & revmapmod.sidebranchflag: |
|
587 | if revmap.rev2flag(rev) & revmapmod.sidebranchflag: | |
588 | continue |
|
588 | continue | |
589 | hsh = annotateresult[idxs[0]][0] |
|
589 | hsh = annotateresult[idxs[0]][0] | |
@@ -594,7 +594,7 b' class _annotatecontext(object):' | |||||
594 | # the remaining key2idxs are not in main branch, resolving them |
|
594 | # the remaining key2idxs are not in main branch, resolving them | |
595 | # using the hard way... |
|
595 | # using the hard way... | |
596 | revlines = {} |
|
596 | revlines = {} | |
597 |
for (rev, linenum), idxs in |
|
597 | for (rev, linenum), idxs in key2idxs.items(): | |
598 | if rev not in revlines: |
|
598 | if rev not in revlines: | |
599 | hsh = annotateresult[idxs[0]][0] |
|
599 | hsh = annotateresult[idxs[0]][0] | |
600 | if self.ui.debugflag: |
|
600 | if self.ui.debugflag: |
@@ -14,7 +14,6 b' from mercurial import (' | |||||
14 | error, |
|
14 | error, | |
15 | extensions, |
|
15 | extensions, | |
16 | hg, |
|
16 | hg, | |
17 | pycompat, |
|
|||
18 | util, |
|
17 | util, | |
19 | wireprotov1peer, |
|
18 | wireprotov1peer, | |
20 | wireprotov1server, |
|
19 | wireprotov1server, | |
@@ -189,7 +188,7 b' def clientfetch(repo, paths, lastnodemap' | |||||
189 | for result in results: |
|
188 | for result in results: | |
190 | r = result.result() |
|
189 | r = result.result() | |
191 | # TODO: pconvert these paths on the server? |
|
190 | # TODO: pconvert these paths on the server? | |
192 |
r = {util.pconvert(p): v for p, v in |
|
191 | r = {util.pconvert(p): v for p, v in r.items()} | |
193 | for path in sorted(r): |
|
192 | for path in sorted(r): | |
194 | # ignore malicious paths |
|
193 | # ignore malicious paths | |
195 | if not path.startswith(b'fastannotate/') or b'/../' in ( |
|
194 | if not path.startswith(b'fastannotate/') or b'/../' in ( |
@@ -377,9 +377,7 b' def cleanup(repo, replacements, wdirwrit' | |||||
377 | Useful as a hook point for extending "hg fix" with output summarizing the |
|
377 | Useful as a hook point for extending "hg fix" with output summarizing the | |
378 | effects of the command, though we choose not to output anything here. |
|
378 | effects of the command, though we choose not to output anything here. | |
379 | """ |
|
379 | """ | |
380 | replacements = { |
|
380 | replacements = {prec: [succ] for prec, succ in replacements.items()} | |
381 | prec: [succ] for prec, succ in pycompat.iteritems(replacements) |
|
|||
382 | } |
|
|||
383 | scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True) |
|
381 | scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True) | |
384 |
|
382 | |||
385 |
|
383 | |||
@@ -692,7 +690,7 b' def fixfile(ui, repo, opts, fixers, fixc' | |||||
692 | """ |
|
690 | """ | |
693 | metadata = {} |
|
691 | metadata = {} | |
694 | newdata = fixctx[path].data() |
|
692 | newdata = fixctx[path].data() | |
695 |
for fixername, fixer in |
|
693 | for fixername, fixer in fixers.items(): | |
696 | if fixer.affects(opts, fixctx, path): |
|
694 | if fixer.affects(opts, fixctx, path): | |
697 | ranges = lineranges( |
|
695 | ranges = lineranges( | |
698 | opts, path, basepaths, basectxs, fixctx, newdata |
|
696 | opts, path, basepaths, basectxs, fixctx, newdata | |
@@ -770,7 +768,7 b' def writeworkingdir(repo, ctx, filedata,' | |||||
770 |
|
768 | |||
771 | Directly updates the dirstate for the affected files. |
|
769 | Directly updates the dirstate for the affected files. | |
772 | """ |
|
770 | """ | |
773 |
for path, data in |
|
771 | for path, data in filedata.items(): | |
774 | fctx = ctx[path] |
|
772 | fctx = ctx[path] | |
775 | fctx.write(data, fctx.flags()) |
|
773 | fctx.write(data, fctx.flags()) | |
776 |
|
774 |
@@ -501,15 +501,11 b' def overridewalk(orig, self, match, subr' | |||||
501 | visit.update(f for f in copymap if f not in results and matchfn(f)) |
|
501 | visit.update(f for f in copymap if f not in results and matchfn(f)) | |
502 | else: |
|
502 | else: | |
503 | if matchalways: |
|
503 | if matchalways: | |
504 | visit.update( |
|
504 | visit.update(f for f, st in dmap.items() if f not in results) | |
505 | f for f, st in pycompat.iteritems(dmap) if f not in results |
|
|||
506 | ) |
|
|||
507 | visit.update(f for f in copymap if f not in results) |
|
505 | visit.update(f for f in copymap if f not in results) | |
508 | else: |
|
506 | else: | |
509 | visit.update( |
|
507 | visit.update( | |
510 | f |
|
508 | f for f, st in dmap.items() if f not in results and matchfn(f) | |
511 | for f, st in pycompat.iteritems(dmap) |
|
|||
512 | if f not in results and matchfn(f) |
|
|||
513 | ) |
|
509 | ) | |
514 | visit.update(f for f in copymap if f not in results and matchfn(f)) |
|
510 | visit.update(f for f in copymap if f not in results and matchfn(f)) | |
515 |
|
511 |
@@ -115,7 +115,7 b' def parseoptions(ui, cmdoptions, args):' | |||||
115 | opts = dict( |
|
115 | opts = dict( | |
116 | [ |
|
116 | [ | |
117 | (k, convert(v)) if isinstance(v, bytes) else (k, v) |
|
117 | (k, convert(v)) if isinstance(v, bytes) else (k, v) | |
118 |
for k, v in |
|
118 | for k, v in opts.items() | |
119 | ] |
|
119 | ] | |
120 | ) |
|
120 | ) | |
121 |
|
121 | |||
@@ -131,7 +131,7 b' class Command(object):' | |||||
131 | def __bytes__(self): |
|
131 | def __bytes__(self): | |
132 | cmd = b"hg " + self.name |
|
132 | cmd = b"hg " + self.name | |
133 | if self.opts: |
|
133 | if self.opts: | |
134 |
for k, values in sorted( |
|
134 | for k, values in sorted(self.opts.items()): | |
135 | for v in values: |
|
135 | for v in values: | |
136 | if v: |
|
136 | if v: | |
137 | if isinstance(v, int): |
|
137 | if isinstance(v, int): |
@@ -376,9 +376,7 b' def view(ui, repo, *etc, **opts):' | |||||
376 | """start interactive history viewer""" |
|
376 | """start interactive history viewer""" | |
377 | opts = pycompat.byteskwargs(opts) |
|
377 | opts = pycompat.byteskwargs(opts) | |
378 | os.chdir(repo.root) |
|
378 | os.chdir(repo.root) | |
379 | optstr = b' '.join( |
|
379 | optstr = b' '.join([b'--%s %s' % (k, v) for k, v in opts.items() if v]) | |
380 | [b'--%s %s' % (k, v) for k, v in pycompat.iteritems(opts) if v] |
|
|||
381 | ) |
|
|||
382 | if repo.filtername is None: |
|
380 | if repo.filtername is None: | |
383 | optstr += b'--hidden' |
|
381 | optstr += b'--hidden' | |
384 |
|
382 |
@@ -2101,7 +2101,7 b' def _finishhistedit(ui, repo, state, fm)' | |||||
2101 |
|
2101 | |||
2102 | mapping, tmpnodes, created, ntm = processreplacement(state) |
|
2102 | mapping, tmpnodes, created, ntm = processreplacement(state) | |
2103 | if mapping: |
|
2103 | if mapping: | |
2104 |
for prec, succs in |
|
2104 | for prec, succs in mapping.items(): | |
2105 | if not succs: |
|
2105 | if not succs: | |
2106 | ui.debug(b'histedit: %s is dropped\n' % short(prec)) |
|
2106 | ui.debug(b'histedit: %s is dropped\n' % short(prec)) | |
2107 | else: |
|
2107 | else: | |
@@ -2139,7 +2139,7 b' def _finishhistedit(ui, repo, state, fm)' | |||||
2139 | nodechanges = fd( |
|
2139 | nodechanges = fd( | |
2140 | { |
|
2140 | { | |
2141 | hf(oldn): fl([hf(n) for n in newn], name=b'node') |
|
2141 | hf(oldn): fl([hf(n) for n in newn], name=b'node') | |
2142 |
for oldn, newn in |
|
2142 | for oldn, newn in mapping.items() | |
2143 | }, |
|
2143 | }, | |
2144 | key=b"oldnode", |
|
2144 | key=b"oldnode", | |
2145 | value=b"newnodes", |
|
2145 | value=b"newnodes", | |
@@ -2387,7 +2387,7 b' def ruleeditor(repo, ui, actions, editco' | |||||
2387 | tsum = summary[len(fword) + 1 :].lstrip() |
|
2387 | tsum = summary[len(fword) + 1 :].lstrip() | |
2388 | # safe but slow: reverse iterate over the actions so we |
|
2388 | # safe but slow: reverse iterate over the actions so we | |
2389 | # don't clash on two commits having the same summary |
|
2389 | # don't clash on two commits having the same summary | |
2390 |
for na, l in reversed(list( |
|
2390 | for na, l in reversed(list(newact.items())): | |
2391 | actx = repo[na.node] |
|
2391 | actx = repo[na.node] | |
2392 | asum = _getsummary(actx) |
|
2392 | asum = _getsummary(actx) | |
2393 | if asum == tsum: |
|
2393 | if asum == tsum: | |
@@ -2400,7 +2400,7 b' def ruleeditor(repo, ui, actions, editco' | |||||
2400 |
|
2400 | |||
2401 | # copy over and flatten the new list |
|
2401 | # copy over and flatten the new list | |
2402 | actions = [] |
|
2402 | actions = [] | |
2403 |
for na, l in |
|
2403 | for na, l in newact.items(): | |
2404 | actions.append(na) |
|
2404 | actions.append(na) | |
2405 | actions += l |
|
2405 | actions += l | |
2406 |
|
2406 |
@@ -419,7 +419,7 b' def localrepolistkeys(orig, self, namesp' | |||||
419 | if pattern.endswith(b'*'): |
|
419 | if pattern.endswith(b'*'): | |
420 | pattern = b're:^' + pattern[:-1] + b'.*' |
|
420 | pattern = b're:^' + pattern[:-1] + b'.*' | |
421 | kind, pat, matcher = stringutil.stringmatcher(pattern) |
|
421 | kind, pat, matcher = stringutil.stringmatcher(pattern) | |
422 |
for bookmark, node in |
|
422 | for bookmark, node in bookmarks.items(): | |
423 | if matcher(bookmark): |
|
423 | if matcher(bookmark): | |
424 | results[bookmark] = node |
|
424 | results[bookmark] = node | |
425 | return results |
|
425 | return results | |
@@ -542,7 +542,7 b' def _generateoutputparts(head, bundlerep' | |||||
542 | if part.type == b'changegroup': |
|
542 | if part.type == b'changegroup': | |
543 | haschangegroup = True |
|
543 | haschangegroup = True | |
544 | newpart = bundle2.bundlepart(part.type, data=part.read()) |
|
544 | newpart = bundle2.bundlepart(part.type, data=part.read()) | |
545 |
for key, value in |
|
545 | for key, value in part.params.items(): | |
546 | newpart.addparam(key, value) |
|
546 | newpart.addparam(key, value) | |
547 | parts.append(newpart) |
|
547 | parts.append(newpart) | |
548 |
|
548 | |||
@@ -794,7 +794,7 b' def _saveremotebookmarks(repo, newbookma' | |||||
794 | # saveremotenames expects 20 byte binary nodes for branches |
|
794 | # saveremotenames expects 20 byte binary nodes for branches | |
795 | branches[rname].append(bin(hexnode)) |
|
795 | branches[rname].append(bin(hexnode)) | |
796 |
|
796 | |||
797 |
for bookmark, hexnode in |
|
797 | for bookmark, hexnode in newbookmarks.items(): | |
798 | bookmarks[bookmark] = hexnode |
|
798 | bookmarks[bookmark] = hexnode | |
799 | remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks) |
|
799 | remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks) | |
800 |
|
800 | |||
@@ -804,7 +804,7 b' def _savelocalbookmarks(repo, bookmarks)' | |||||
804 | return |
|
804 | return | |
805 | with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr: |
|
805 | with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr: | |
806 | changes = [] |
|
806 | changes = [] | |
807 |
for scratchbook, node in |
|
807 | for scratchbook, node in bookmarks.items(): | |
808 | changectx = repo[node] |
|
808 | changectx = repo[node] | |
809 | changes.append((scratchbook, changectx.node())) |
|
809 | changes.append((scratchbook, changectx.node())) | |
810 | repo._bookmarks.applychanges(repo, tr, changes) |
|
810 | repo._bookmarks.applychanges(repo, tr, changes) | |
@@ -1045,7 +1045,7 b' def storetobundlestore(orig, repo, op, u' | |||||
1045 | bundle2._processpart(op, part) |
|
1045 | bundle2._processpart(op, part) | |
1046 | else: |
|
1046 | else: | |
1047 | bundlepart = bundle2.bundlepart(part.type, data=part.read()) |
|
1047 | bundlepart = bundle2.bundlepart(part.type, data=part.read()) | |
1048 |
for key, value in |
|
1048 | for key, value in part.params.items(): | |
1049 | bundlepart.addparam(key, value) |
|
1049 | bundlepart.addparam(key, value) | |
1050 |
|
1050 | |||
1051 | # Certain parts require a response |
|
1051 | # Certain parts require a response | |
@@ -1137,7 +1137,7 b' def processparts(orig, repo, op, unbundl' | |||||
1137 | # differs from previous behavior, we need to put it behind a |
|
1137 | # differs from previous behavior, we need to put it behind a | |
1138 | # config flag for incremental rollout. |
|
1138 | # config flag for incremental rollout. | |
1139 | bundlepart = bundle2.bundlepart(part.type, data=part.read()) |
|
1139 | bundlepart = bundle2.bundlepart(part.type, data=part.read()) | |
1140 |
for key, value in |
|
1140 | for key, value in part.params.items(): | |
1141 | bundlepart.addparam(key, value) |
|
1141 | bundlepart.addparam(key, value) | |
1142 |
|
1142 | |||
1143 | # Certain parts require a response |
|
1143 | # Certain parts require a response | |
@@ -1323,9 +1323,7 b' def _maybeaddpushbackpart(op, bookmark, ' | |||||
1323 | b'new': newnode, |
|
1323 | b'new': newnode, | |
1324 | b'old': oldnode, |
|
1324 | b'old': oldnode, | |
1325 | } |
|
1325 | } | |
1326 | op.reply.newpart( |
|
1326 | op.reply.newpart(b'pushkey', mandatoryparams=params.items()) | |
1327 | b'pushkey', mandatoryparams=pycompat.iteritems(params) |
|
|||
1328 | ) |
|
|||
1329 |
|
1327 | |||
1330 |
|
1328 | |||
1331 | def bundle2pushkey(orig, op, part): |
|
1329 | def bundle2pushkey(orig, op, part): |
@@ -12,7 +12,6 b' from mercurial import (' | |||||
12 | changegroup, |
|
12 | changegroup, | |
13 | error, |
|
13 | error, | |
14 | extensions, |
|
14 | extensions, | |
15 | pycompat, |
|
|||
16 | revsetlang, |
|
15 | revsetlang, | |
17 | util, |
|
16 | util, | |
18 | ) |
|
17 | ) | |
@@ -67,7 +66,7 b' def getscratchbranchparts(repo, peer, ou' | |||||
67 | parts.append( |
|
66 | parts.append( | |
68 | bundle2.bundlepart( |
|
67 | bundle2.bundlepart( | |
69 | scratchbranchparttype.upper(), |
|
68 | scratchbranchparttype.upper(), | |
70 |
advisoryparams=p |
|
69 | advisoryparams=params.items(), | |
71 | data=cg, |
|
70 | data=cg, | |
72 | ) |
|
71 | ) | |
73 | ) |
|
72 | ) |
@@ -13,8 +13,6 b' import time' | |||||
13 | import warnings |
|
13 | import warnings | |
14 | import mysql.connector |
|
14 | import mysql.connector | |
15 |
|
15 | |||
16 | from mercurial import pycompat |
|
|||
17 |
|
||||
18 | from . import indexapi |
|
16 | from . import indexapi | |
19 |
|
17 | |||
20 |
|
18 | |||
@@ -179,7 +177,7 b' class sqlindexapi(indexapi.indexapi):' | |||||
179 | self.sqlconnect() |
|
177 | self.sqlconnect() | |
180 | args = [] |
|
178 | args = [] | |
181 | values = [] |
|
179 | values = [] | |
182 |
for bookmark, node in |
|
180 | for bookmark, node in bookmarks.items(): | |
183 | args.append(b'(%s, %s, %s)') |
|
181 | args.append(b'(%s, %s, %s)') | |
184 | values.extend((bookmark, node, self.reponame)) |
|
182 | values.extend((bookmark, node, self.reponame)) | |
185 | args = b','.join(args) |
|
183 | args = b','.join(args) |
@@ -128,7 +128,7 b' def recordbookmarks(orig, store, fp):' | |||||
128 | repo = store._repo |
|
128 | repo = store._repo | |
129 | if util.safehasattr(repo, 'journal'): |
|
129 | if util.safehasattr(repo, 'journal'): | |
130 | oldmarks = bookmarks.bmstore(repo) |
|
130 | oldmarks = bookmarks.bmstore(repo) | |
131 |
for mark, value in |
|
131 | for mark, value in store.items(): | |
132 | oldvalue = oldmarks.get(mark, repo.nullid) |
|
132 | oldvalue = oldmarks.get(mark, repo.nullid) | |
133 | if value != oldvalue: |
|
133 | if value != oldvalue: | |
134 | repo.journal.record(bookmarktype, mark, oldvalue, value) |
|
134 | repo.journal.record(bookmarktype, mark, oldvalue, value) |
@@ -513,7 +513,7 b' def demo(ui, repo, *args, **opts):' | |||||
513 | kwmaps = _defaultkwmaps(ui) |
|
513 | kwmaps = _defaultkwmaps(ui) | |
514 | if uikwmaps: |
|
514 | if uikwmaps: | |
515 | ui.status(_(b'\tdisabling current template maps\n')) |
|
515 | ui.status(_(b'\tdisabling current template maps\n')) | |
516 |
for k, v in |
|
516 | for k, v in kwmaps.items(): | |
517 | ui.setconfig(b'keywordmaps', k, v, b'keyword') |
|
517 | ui.setconfig(b'keywordmaps', k, v, b'keyword') | |
518 | else: |
|
518 | else: | |
519 | ui.status(_(b'\n\tconfiguration using current keyword template maps\n')) |
|
519 | ui.status(_(b'\n\tconfiguration using current keyword template maps\n')) | |
@@ -527,7 +527,7 b' def demo(ui, repo, *args, **opts):' | |||||
527 | ui.writenoi18n(b'[extensions]\nkeyword =\n') |
|
527 | ui.writenoi18n(b'[extensions]\nkeyword =\n') | |
528 | demoitems(b'keyword', ui.configitems(b'keyword')) |
|
528 | demoitems(b'keyword', ui.configitems(b'keyword')) | |
529 | demoitems(b'keywordset', ui.configitems(b'keywordset')) |
|
529 | demoitems(b'keywordset', ui.configitems(b'keywordset')) | |
530 |
demoitems(b'keywordmaps', |
|
530 | demoitems(b'keywordmaps', kwmaps.items()) | |
531 | keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n' |
|
531 | keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n' | |
532 | repo.wvfs.write(fn, keywords) |
|
532 | repo.wvfs.write(fn, keywords) | |
533 | repo[None].add([fn]) |
|
533 | repo[None].add([fn]) |
@@ -713,7 +713,7 b' def copiespathcopies(orig, ctx1, ctx2, m' | |||||
713 | copies = orig(ctx1, ctx2, match=match) |
|
713 | copies = orig(ctx1, ctx2, match=match) | |
714 | updated = {} |
|
714 | updated = {} | |
715 |
|
715 | |||
716 |
for k, v in |
|
716 | for k, v in copies.items(): | |
717 | updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v |
|
717 | updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v | |
718 |
|
718 | |||
719 | return updated |
|
719 | return updated |
@@ -62,7 +62,7 b' class gitlfspointer(dict):' | |||||
62 | def validate(self): |
|
62 | def validate(self): | |
63 | """raise InvalidPointer on error. return self if there is no error""" |
|
63 | """raise InvalidPointer on error. return self if there is no error""" | |
64 | requiredcount = 0 |
|
64 | requiredcount = 0 | |
65 |
for k, v in |
|
65 | for k, v in self.items(): | |
66 | if k in self._requiredre: |
|
66 | if k in self._requiredre: | |
67 | if not self._requiredre[k].match(v): |
|
67 | if not self._requiredre[k].match(v): | |
68 | raise InvalidPointer( |
|
68 | raise InvalidPointer( |
@@ -24,7 +24,6 b' from mercurial import (' | |||||
24 | exchange, |
|
24 | exchange, | |
25 | exthelper, |
|
25 | exthelper, | |
26 | localrepo, |
|
26 | localrepo, | |
27 | pycompat, |
|
|||
28 | revlog, |
|
27 | revlog, | |
29 | scmutil, |
|
28 | scmutil, | |
30 | util, |
|
29 | util, | |
@@ -142,7 +141,7 b' def writetostore(self, text):' | |||||
142 |
|
141 | |||
143 | # translate hg filelog metadata to lfs metadata with "x-hg-" prefix |
|
142 | # translate hg filelog metadata to lfs metadata with "x-hg-" prefix | |
144 | if hgmeta is not None: |
|
143 | if hgmeta is not None: | |
145 |
for k, v in |
|
144 | for k, v in hgmeta.items(): | |
146 | metadata[b'x-hg-%s' % k] = v |
|
145 | metadata[b'x-hg-%s' % k] = v | |
147 |
|
146 | |||
148 | rawtext = metadata.serialize() |
|
147 | rawtext = metadata.serialize() |
@@ -2024,7 +2024,7 b' class queue(object):' | |||||
2024 | # we can't copy a file created by the patch itself |
|
2024 | # we can't copy a file created by the patch itself | |
2025 | if dst in copies: |
|
2025 | if dst in copies: | |
2026 | del copies[dst] |
|
2026 | del copies[dst] | |
2027 |
for src, dsts in |
|
2027 | for src, dsts in copies.items(): | |
2028 | for dst in dsts: |
|
2028 | for dst in dsts: | |
2029 | repo.dirstate.copy(src, dst) |
|
2029 | repo.dirstate.copy(src, dst) | |
2030 | else: |
|
2030 | else: | |
@@ -4287,7 +4287,7 b' def extsetup(ui):' | |||||
4287 | entry[1].extend(mqopt) |
|
4287 | entry[1].extend(mqopt) | |
4288 |
|
4288 | |||
4289 | def dotable(cmdtable): |
|
4289 | def dotable(cmdtable): | |
4290 |
for cmd, entry in |
|
4290 | for cmd, entry in cmdtable.items(): | |
4291 | cmd = cmdutil.parsealiases(cmd)[0] |
|
4291 | cmd = cmdutil.parsealiases(cmd)[0] | |
4292 | func = entry[0] |
|
4292 | func = entry[0] | |
4293 | if func.norepo: |
|
4293 | if func.norepo: |
@@ -2279,7 +2279,7 b' def phabupdate(ui, repo, *specs, **opts)' | |||||
2279 | drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs])) |
|
2279 | drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs])) | |
2280 | specs = [] |
|
2280 | specs = [] | |
2281 | unknown = [] |
|
2281 | unknown = [] | |
2282 |
for r, d in |
|
2282 | for r, d in drevmap.items(): | |
2283 | if d is None: |
|
2283 | if d is None: | |
2284 | unknown.append(repo[r]) |
|
2284 | unknown.append(repo[r]) | |
2285 | else: |
|
2285 | else: | |
@@ -2364,7 +2364,7 b' def phabstatusshowview(ui, repo, display' | |||||
2364 | revs = repo.revs('sort(_underway(), topo)') |
|
2364 | revs = repo.revs('sort(_underway(), topo)') | |
2365 | drevmap = getdrevmap(repo, revs) |
|
2365 | drevmap = getdrevmap(repo, revs) | |
2366 | unknownrevs, drevids, revsbydrevid = [], set(), {} |
|
2366 | unknownrevs, drevids, revsbydrevid = [], set(), {} | |
2367 |
for rev, drevid in |
|
2367 | for rev, drevid in drevmap.items(): | |
2368 | if drevid is not None: |
|
2368 | if drevid is not None: | |
2369 | drevids.add(drevid) |
|
2369 | drevids.add(drevid) | |
2370 | revsbydrevid.setdefault(drevid, set()).add(rev) |
|
2370 | revsbydrevid.setdefault(drevid, set()).add(rev) |
@@ -243,7 +243,7 b' class rebaseruntime(object):' | |||||
243 | f.write(b'%d\n' % int(self.keepbranchesf)) |
|
243 | f.write(b'%d\n' % int(self.keepbranchesf)) | |
244 | f.write(b'%s\n' % (self.activebookmark or b'')) |
|
244 | f.write(b'%s\n' % (self.activebookmark or b'')) | |
245 | destmap = self.destmap |
|
245 | destmap = self.destmap | |
246 |
for d, v in |
|
246 | for d, v in self.state.items(): | |
247 | oldrev = repo[d].hex() |
|
247 | oldrev = repo[d].hex() | |
248 | if v >= 0: |
|
248 | if v >= 0: | |
249 | newrev = repo[v].hex() |
|
249 | newrev = repo[v].hex() | |
@@ -505,7 +505,7 b' class rebaseruntime(object):' | |||||
505 | # commits. |
|
505 | # commits. | |
506 | self.storestatus(tr) |
|
506 | self.storestatus(tr) | |
507 |
|
507 | |||
508 |
cands = [k for k, v in |
|
508 | cands = [k for k, v in self.state.items() if v == revtodo] | |
509 | p = repo.ui.makeprogress( |
|
509 | p = repo.ui.makeprogress( | |
510 | _(b"rebasing"), unit=_(b'changesets'), total=len(cands) |
|
510 | _(b"rebasing"), unit=_(b'changesets'), total=len(cands) | |
511 | ) |
|
511 | ) | |
@@ -1336,7 +1336,7 b' def _definedestmap(ui, repo, inmemory, d' | |||||
1336 | # emulate the old behavior, showing "nothing to rebase" (a better |
|
1336 | # emulate the old behavior, showing "nothing to rebase" (a better | |
1337 | # behavior may be abort with "cannot find branching point" error) |
|
1337 | # behavior may be abort with "cannot find branching point" error) | |
1338 | bpbase.clear() |
|
1338 | bpbase.clear() | |
1339 |
for bp, bs in |
|
1339 | for bp, bs in bpbase.items(): # calculate roots | |
1340 | roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs)) |
|
1340 | roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs)) | |
1341 |
|
1341 | |||
1342 | rebaseset = repo.revs(b'%ld::', roots) |
|
1342 | rebaseset = repo.revs(b'%ld::', roots) | |
@@ -2103,7 +2103,7 b' def clearrebased(' | |||||
2103 | fl = fm.formatlist |
|
2103 | fl = fm.formatlist | |
2104 | fd = fm.formatdict |
|
2104 | fd = fm.formatdict | |
2105 | changes = {} |
|
2105 | changes = {} | |
2106 |
for oldns, newn in |
|
2106 | for oldns, newn in replacements.items(): | |
2107 | for oldn in oldns: |
|
2107 | for oldn in oldns: | |
2108 | changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node') |
|
2108 | changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node') | |
2109 | nodechanges = fd(changes, key=b"oldnode", value=b"newnodes") |
|
2109 | nodechanges = fd(changes, key=b"oldnode", value=b"newnodes") |
@@ -177,7 +177,7 b' class releasenotessections(object):' | |||||
177 | custom_sections = getcustomadmonitions(repo) |
|
177 | custom_sections = getcustomadmonitions(repo) | |
178 | if custom_sections: |
|
178 | if custom_sections: | |
179 | sections.update(custom_sections) |
|
179 | sections.update(custom_sections) | |
180 |
self._sections = list( |
|
180 | self._sections = list(sections.items()) | |
181 | else: |
|
181 | else: | |
182 | self._sections = list(DEFAULT_SECTIONS) |
|
182 | self._sections = list(DEFAULT_SECTIONS) | |
183 |
|
183 |
@@ -146,7 +146,7 b' class basestore(object):' | |||||
146 |
|
146 | |||
147 | filenamemap = self._resolvefilenames(existing.keys()) |
|
147 | filenamemap = self._resolvefilenames(existing.keys()) | |
148 |
|
148 | |||
149 |
for filename, sha in |
|
149 | for filename, sha in filenamemap.items(): | |
150 | yield (filename, existing[sha]) |
|
150 | yield (filename, existing[sha]) | |
151 |
|
151 | |||
152 | def _resolvefilenames(self, hashes): |
|
152 | def _resolvefilenames(self, hashes): |
@@ -453,7 +453,7 b' class mutabledatapack(basepack.mutableba' | |||||
453 |
|
453 | |||
454 | def createindex(self, nodelocations, indexoffset): |
|
454 | def createindex(self, nodelocations, indexoffset): | |
455 | entries = sorted( |
|
455 | entries = sorted( | |
456 |
(n, db, o, s) for n, (db, o, s) in |
|
456 | (n, db, o, s) for n, (db, o, s) in self.entries.items() | |
457 | ) |
|
457 | ) | |
458 |
|
458 | |||
459 | rawindex = b'' |
|
459 | rawindex = b'' |
@@ -519,7 +519,7 b' class mutablehistorypack(basepack.mutabl' | |||||
519 |
|
519 | |||
520 | files = ( |
|
520 | files = ( | |
521 | (hashutil.sha1(filename).digest(), filename, offset, size) |
|
521 | (hashutil.sha1(filename).digest(), filename, offset, size) | |
522 |
for filename, (offset, size) in |
|
522 | for filename, (offset, size) in self.files.items() | |
523 | ) |
|
523 | ) | |
524 | files = sorted(files) |
|
524 | files = sorted(files) | |
525 |
|
525 | |||
@@ -555,7 +555,7 b' class mutablehistorypack(basepack.mutabl' | |||||
555 | ) |
|
555 | ) | |
556 | nodeindexoffset += constants.FILENAMESIZE + len(filename) |
|
556 | nodeindexoffset += constants.FILENAMESIZE + len(filename) | |
557 |
|
557 | |||
558 |
for node, location in sorted( |
|
558 | for node, location in sorted(nodelocations.items()): | |
559 | nodeindexentries.append( |
|
559 | nodeindexentries.append( | |
560 | struct.pack(nodeindexformat, node, location) |
|
560 | struct.pack(nodeindexformat, node, location) | |
561 | ) |
|
561 | ) |
@@ -15,7 +15,6 b' from mercurial import (' | |||||
15 | ancestor, |
|
15 | ancestor, | |
16 | error, |
|
16 | error, | |
17 | mdiff, |
|
17 | mdiff, | |
18 | pycompat, |
|
|||
19 | revlog, |
|
18 | revlog, | |
20 | ) |
|
19 | ) | |
21 | from mercurial.utils import storageutil |
|
20 | from mercurial.utils import storageutil | |
@@ -423,7 +422,7 b' class remotefilelog(object):' | |||||
423 | return self.repo.nullid |
|
422 | return self.repo.nullid | |
424 |
|
423 | |||
425 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
424 | revmap, parentfunc = self._buildrevgraph(a, b) | |
426 |
nodemap = {v: k for (k, v) in |
|
425 | nodemap = {v: k for (k, v) in revmap.items()} | |
427 |
|
426 | |||
428 | ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b]) |
|
427 | ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b]) | |
429 | if ancs: |
|
428 | if ancs: | |
@@ -438,7 +437,7 b' class remotefilelog(object):' | |||||
438 | return self.repo.nullid |
|
437 | return self.repo.nullid | |
439 |
|
438 | |||
440 | revmap, parentfunc = self._buildrevgraph(a, b) |
|
439 | revmap, parentfunc = self._buildrevgraph(a, b) | |
441 |
nodemap = {v: k for (k, v) in |
|
440 | nodemap = {v: k for (k, v) in revmap.items()} | |
442 |
|
441 | |||
443 | ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b]) |
|
442 | ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b]) | |
444 | return map(nodemap.__getitem__, ancs) |
|
443 | return map(nodemap.__getitem__, ancs) | |
@@ -454,7 +453,7 b' class remotefilelog(object):' | |||||
454 | parentsmap = collections.defaultdict(list) |
|
453 | parentsmap = collections.defaultdict(list) | |
455 | allparents = set() |
|
454 | allparents = set() | |
456 | for mapping in (amap, bmap): |
|
455 | for mapping in (amap, bmap): | |
457 |
for node, pdata in |
|
456 | for node, pdata in mapping.items(): | |
458 | parents = parentsmap[node] |
|
457 | parents = parentsmap[node] | |
459 | p1, p2, linknode, copyfrom = pdata |
|
458 | p1, p2, linknode, copyfrom = pdata | |
460 | # Don't follow renames (copyfrom). |
|
459 | # Don't follow renames (copyfrom). |
@@ -21,7 +21,6 b' from mercurial import (' | |||||
21 | error, |
|
21 | error, | |
22 | extensions, |
|
22 | extensions, | |
23 | match, |
|
23 | match, | |
24 | pycompat, |
|
|||
25 | scmutil, |
|
24 | scmutil, | |
26 | store, |
|
25 | store, | |
27 | streamclone, |
|
26 | streamclone, | |
@@ -416,7 +415,7 b' def gcserver(ui, repo):' | |||||
416 | cachepath = repo.vfs.join(b"remotefilelogcache") |
|
415 | cachepath = repo.vfs.join(b"remotefilelogcache") | |
417 | for head in heads: |
|
416 | for head in heads: | |
418 | mf = repo[head].manifest() |
|
417 | mf = repo[head].manifest() | |
419 |
for filename, filenode in |
|
418 | for filename, filenode in mf.items(): | |
420 | filecachepath = os.path.join(cachepath, filename, hex(filenode)) |
|
419 | filecachepath = os.path.join(cachepath, filename, hex(filenode)) | |
421 | neededfiles.add(filecachepath) |
|
420 | neededfiles.add(filecachepath) | |
422 |
|
421 |
@@ -487,12 +487,12 b' def keepset(repo, keyfn, lastkeepkeys=No' | |||||
487 | if type(m) is dict: |
|
487 | if type(m) is dict: | |
488 | # m is a result of diff of two manifests and is a dictionary that |
|
488 | # m is a result of diff of two manifests and is a dictionary that | |
489 | # maps filename to ((newnode, newflag), (oldnode, oldflag)) tuple |
|
489 | # maps filename to ((newnode, newflag), (oldnode, oldflag)) tuple | |
490 |
for filename, diff in |
|
490 | for filename, diff in m.items(): | |
491 | if diff[0][0] is not None: |
|
491 | if diff[0][0] is not None: | |
492 | keepkeys.add(keyfn(filename, diff[0][0])) |
|
492 | keepkeys.add(keyfn(filename, diff[0][0])) | |
493 | else: |
|
493 | else: | |
494 | # m is a manifest object |
|
494 | # m is a manifest object | |
495 |
for filename, filenode in |
|
495 | for filename, filenode in m.items(): | |
496 | keepkeys.add(keyfn(filename, filenode)) |
|
496 | keepkeys.add(keyfn(filename, filenode)) | |
497 |
|
497 | |||
498 | return keepkeys |
|
498 | return keepkeys | |
@@ -602,7 +602,7 b' class repacker(object):' | |||||
602 | repackprogress = ui.makeprogress( |
|
602 | repackprogress = ui.makeprogress( | |
603 | _(b"repacking data"), unit=self.unit, total=len(byfile) |
|
603 | _(b"repacking data"), unit=self.unit, total=len(byfile) | |
604 | ) |
|
604 | ) | |
605 |
for filename, entries in sorted( |
|
605 | for filename, entries in sorted(byfile.items()): | |
606 | repackprogress.update(count) |
|
606 | repackprogress.update(count) | |
607 |
|
607 | |||
608 | ancestors = {} |
|
608 | ancestors = {} | |
@@ -756,7 +756,7 b' class repacker(object):' | |||||
756 | progress = ui.makeprogress( |
|
756 | progress = ui.makeprogress( | |
757 | _(b"repacking history"), unit=self.unit, total=len(byfile) |
|
757 | _(b"repacking history"), unit=self.unit, total=len(byfile) | |
758 | ) |
|
758 | ) | |
759 |
for filename, entries in sorted( |
|
759 | for filename, entries in sorted(byfile.items()): | |
760 | ancestors = {} |
|
760 | ancestors = {} | |
761 | nodes = list(node for node in entries) |
|
761 | nodes = list(node for node in entries) | |
762 |
|
762 |
@@ -14,7 +14,6 b' from mercurial import (' | |||||
14 | error, |
|
14 | error, | |
15 | localrepo, |
|
15 | localrepo, | |
16 | match, |
|
16 | match, | |
17 | pycompat, |
|
|||
18 | scmutil, |
|
17 | scmutil, | |
19 | sparse, |
|
18 | sparse, | |
20 | util, |
|
19 | util, | |
@@ -268,7 +267,7 b' def wraprepo(repo):' | |||||
268 | mfrevlog = mfl.getstorage(b'') |
|
267 | mfrevlog = mfl.getstorage(b'') | |
269 | if base is not None: |
|
268 | if base is not None: | |
270 | mfdict = mfl[repo[base].manifestnode()].read() |
|
269 | mfdict = mfl[repo[base].manifestnode()].read() | |
271 |
skip = set( |
|
270 | skip = set(mfdict.items()) | |
272 | else: |
|
271 | else: | |
273 | skip = set() |
|
272 | skip = set() | |
274 |
|
273 | |||
@@ -298,7 +297,7 b' def wraprepo(repo):' | |||||
298 | else: |
|
297 | else: | |
299 | mfdict = mfl[mfnode].read() |
|
298 | mfdict = mfl[mfnode].read() | |
300 |
|
299 | |||
301 |
diff = |
|
300 | diff = mfdict.items() | |
302 | if pats: |
|
301 | if pats: | |
303 | diff = (pf for pf in diff if m(pf[0])) |
|
302 | diff = (pf for pf in diff if m(pf[0])) | |
304 | if sparsematch: |
|
303 | if sparsematch: |
@@ -102,7 +102,7 b' def sumdicts(*dicts):' | |||||
102 | """ |
|
102 | """ | |
103 | result = collections.defaultdict(lambda: 0) |
|
103 | result = collections.defaultdict(lambda: 0) | |
104 | for dict in dicts: |
|
104 | for dict in dicts: | |
105 |
for k, v in |
|
105 | for k, v in dict.items(): | |
106 | result[k] += v |
|
106 | result[k] += v | |
107 | return result |
|
107 | return result | |
108 |
|
108 | |||
@@ -110,7 +110,7 b' def sumdicts(*dicts):' | |||||
110 | def prefixkeys(dict, prefix): |
|
110 | def prefixkeys(dict, prefix): | |
111 | """Returns ``dict`` with ``prefix`` prepended to all its keys.""" |
|
111 | """Returns ``dict`` with ``prefix`` prepended to all its keys.""" | |
112 | result = {} |
|
112 | result = {} | |
113 |
for k, v in |
|
113 | for k, v in dict.items(): | |
114 | result[prefix + k] = v |
|
114 | result[prefix + k] = v | |
115 | return result |
|
115 | return result | |
116 |
|
116 | |||
@@ -208,7 +208,7 b' def parsepackmeta(metabuf):' | |||||
208 | integers. |
|
208 | integers. | |
209 | """ |
|
209 | """ | |
210 | metadict = _parsepackmeta(metabuf) |
|
210 | metadict = _parsepackmeta(metabuf) | |
211 |
for k, v in |
|
211 | for k, v in metadict.items(): | |
212 | if k in _metaitemtypes and int in _metaitemtypes[k]: |
|
212 | if k in _metaitemtypes and int in _metaitemtypes[k]: | |
213 | metadict[k] = bin2int(v) |
|
213 | metadict[k] = bin2int(v) | |
214 | return metadict |
|
214 | return metadict |
@@ -170,7 +170,7 b' class lazyremotenamedict(mutablemapping)' | |||||
170 | if not self.loaded: |
|
170 | if not self.loaded: | |
171 | self._load() |
|
171 | self._load() | |
172 |
|
172 | |||
173 |
for k, vtup in |
|
173 | for k, vtup in self.potentialentries.items(): | |
174 | yield (k, [bin(vtup[0])]) |
|
174 | yield (k, [bin(vtup[0])]) | |
175 |
|
175 | |||
176 | items = iteritems |
|
176 | items = iteritems | |
@@ -207,7 +207,7 b' class remotenames(object):' | |||||
207 | if not self._nodetobmarks: |
|
207 | if not self._nodetobmarks: | |
208 | bmarktonodes = self.bmarktonodes() |
|
208 | bmarktonodes = self.bmarktonodes() | |
209 | self._nodetobmarks = {} |
|
209 | self._nodetobmarks = {} | |
210 |
for name, node in |
|
210 | for name, node in bmarktonodes.items(): | |
211 | self._nodetobmarks.setdefault(node[0], []).append(name) |
|
211 | self._nodetobmarks.setdefault(node[0], []).append(name) | |
212 | return self._nodetobmarks |
|
212 | return self._nodetobmarks | |
213 |
|
213 | |||
@@ -218,7 +218,7 b' class remotenames(object):' | |||||
218 | if not self._nodetobranch: |
|
218 | if not self._nodetobranch: | |
219 | branchtonodes = self.branchtonodes() |
|
219 | branchtonodes = self.branchtonodes() | |
220 | self._nodetobranch = {} |
|
220 | self._nodetobranch = {} | |
221 |
for name, nodes in |
|
221 | for name, nodes in branchtonodes.items(): | |
222 | for node in nodes: |
|
222 | for node in nodes: | |
223 | self._nodetobranch.setdefault(node, []).append(name) |
|
223 | self._nodetobranch.setdefault(node, []).append(name) | |
224 | return self._nodetobranch |
|
224 | return self._nodetobranch | |
@@ -228,7 +228,7 b' class remotenames(object):' | |||||
228 | marktonodes = self.bmarktonodes() |
|
228 | marktonodes = self.bmarktonodes() | |
229 | self._hoisttonodes = {} |
|
229 | self._hoisttonodes = {} | |
230 | hoist += b'/' |
|
230 | hoist += b'/' | |
231 |
for name, node in |
|
231 | for name, node in marktonodes.items(): | |
232 | if name.startswith(hoist): |
|
232 | if name.startswith(hoist): | |
233 | name = name[len(hoist) :] |
|
233 | name = name[len(hoist) :] | |
234 | self._hoisttonodes[name] = node |
|
234 | self._hoisttonodes[name] = node | |
@@ -239,7 +239,7 b' class remotenames(object):' | |||||
239 | marktonodes = self.bmarktonodes() |
|
239 | marktonodes = self.bmarktonodes() | |
240 | self._nodetohoists = {} |
|
240 | self._nodetohoists = {} | |
241 | hoist += b'/' |
|
241 | hoist += b'/' | |
242 |
for name, node in |
|
242 | for name, node in marktonodes.items(): | |
243 | if name.startswith(hoist): |
|
243 | if name.startswith(hoist): | |
244 | name = name[len(hoist) :] |
|
244 | name = name[len(hoist) :] | |
245 | self._nodetohoists.setdefault(node[0], []).append(name) |
|
245 | self._nodetohoists.setdefault(node[0], []).append(name) |
@@ -80,9 +80,7 b' def _commitfiltered(' | |||||
80 | files = initialfiles - exclude |
|
80 | files = initialfiles - exclude | |
81 | # Filter copies |
|
81 | # Filter copies | |
82 | copied = copiesmod.pathcopies(base, ctx) |
|
82 | copied = copiesmod.pathcopies(base, ctx) | |
83 | copied = { |
|
83 | copied = {dst: src for dst, src in copied.items() if dst in files} | |
84 | dst: src for dst, src in pycompat.iteritems(copied) if dst in files |
|
|||
85 | } |
|
|||
86 |
|
84 | |||
87 | def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): |
|
85 | def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()): | |
88 | if path not in contentctx: |
|
86 | if path not in contentctx: |
@@ -212,7 +212,7 b' def forbidcr(ui, repo, hooktype, node, *' | |||||
212 | def reposetup(ui, repo): |
|
212 | def reposetup(ui, repo): | |
213 | if not repo.local(): |
|
213 | if not repo.local(): | |
214 | return |
|
214 | return | |
215 |
for name, fn in |
|
215 | for name, fn in _filters.items(): | |
216 | repo.adddatafilter(name, fn) |
|
216 | repo.adddatafilter(name, fn) | |
217 |
|
217 | |||
218 |
|
218 |
@@ -75,7 +75,7 b' exts = {' | |||||
75 |
|
75 | |||
76 |
|
76 | |||
77 | def guesskind(dest): |
|
77 | def guesskind(dest): | |
78 |
for kind, extensions in |
|
78 | for kind, extensions in exts.items(): | |
79 | if any(dest.endswith(ext) for ext in extensions): |
|
79 | if any(dest.endswith(ext) for ext in extensions): | |
80 | return kind |
|
80 | return kind | |
81 | return None |
|
81 | return None |
@@ -137,7 +137,7 b' class bmstore(object):' | |||||
137 | return iter(self._refmap) |
|
137 | return iter(self._refmap) | |
138 |
|
138 | |||
139 | def iteritems(self): |
|
139 | def iteritems(self): | |
140 |
return |
|
140 | return self._refmap.items() | |
141 |
|
141 | |||
142 | def items(self): |
|
142 | def items(self): | |
143 | return self._refmap.items() |
|
143 | return self._refmap.items() | |
@@ -250,7 +250,7 b' class bmstore(object):' | |||||
250 | self._aclean = True |
|
250 | self._aclean = True | |
251 |
|
251 | |||
252 | def _write(self, fp): |
|
252 | def _write(self, fp): | |
253 |
for name, node in sorted( |
|
253 | for name, node in sorted(self._refmap.items()): | |
254 | fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name))) |
|
254 | fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name))) | |
255 | self._clean = True |
|
255 | self._clean = True | |
256 | self._repo.invalidatevolatilesets() |
|
256 | self._repo.invalidatevolatilesets() | |
@@ -418,7 +418,7 b' def headsforactive(repo):' | |||||
418 | ) |
|
418 | ) | |
419 | name = repo._activebookmark.split(b'@', 1)[0] |
|
419 | name = repo._activebookmark.split(b'@', 1)[0] | |
420 | heads = [] |
|
420 | heads = [] | |
421 |
for mark, n in |
|
421 | for mark, n in repo._bookmarks.items(): | |
422 | if mark.split(b'@', 1)[0] == name: |
|
422 | if mark.split(b'@', 1)[0] == name: | |
423 | heads.append(n) |
|
423 | heads.append(n) | |
424 | return heads |
|
424 | return heads | |
@@ -476,7 +476,7 b' def listbinbookmarks(repo):' | |||||
476 | marks = getattr(repo, '_bookmarks', {}) |
|
476 | marks = getattr(repo, '_bookmarks', {}) | |
477 |
|
477 | |||
478 | hasnode = repo.changelog.hasnode |
|
478 | hasnode = repo.changelog.hasnode | |
479 |
for k, v in |
|
479 | for k, v in marks.items(): | |
480 | # don't expose local divergent bookmarks |
|
480 | # don't expose local divergent bookmarks | |
481 | if hasnode(v) and not isdivergent(k): |
|
481 | if hasnode(v) and not isdivergent(k): | |
482 | yield k, v |
|
482 | yield k, v | |
@@ -687,7 +687,7 b' def mirroring_remote(ui, repo, remotemar' | |||||
687 | remotemarks""" |
|
687 | remotemarks""" | |
688 | changed = [] |
|
688 | changed = [] | |
689 | localmarks = repo._bookmarks |
|
689 | localmarks = repo._bookmarks | |
690 |
for (b, id) in |
|
690 | for (b, id) in remotemarks.items(): | |
691 | if id != localmarks.get(b, None) and id in repo: |
|
691 | if id != localmarks.get(b, None) and id in repo: | |
692 | changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b)) |
|
692 | changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b)) | |
693 | for b in localmarks: |
|
693 | for b in localmarks: | |
@@ -1074,7 +1074,7 b' def _printbookmarks(ui, repo, fm, bmarks' | |||||
1074 | hexfn = fm.hexfunc |
|
1074 | hexfn = fm.hexfunc | |
1075 | if len(bmarks) == 0 and fm.isplain(): |
|
1075 | if len(bmarks) == 0 and fm.isplain(): | |
1076 | ui.status(_(b"no bookmarks set\n")) |
|
1076 | ui.status(_(b"no bookmarks set\n")) | |
1077 |
for bmark, (n, prefix, label) in sorted( |
|
1077 | for bmark, (n, prefix, label) in sorted(bmarks.items()): | |
1078 | fm.startitem() |
|
1078 | fm.startitem() | |
1079 | fm.context(repo=repo) |
|
1079 | fm.context(repo=repo) | |
1080 | if not ui.quiet: |
|
1080 | if not ui.quiet: |
@@ -270,7 +270,7 b' class branchcache(object):' | |||||
270 | return key in self._entries |
|
270 | return key in self._entries | |
271 |
|
271 | |||
272 | def iteritems(self): |
|
272 | def iteritems(self): | |
273 |
for k, v in |
|
273 | for k, v in self._entries.items(): | |
274 | self._verifybranch(k) |
|
274 | self._verifybranch(k) | |
275 | yield k, v |
|
275 | yield k, v | |
276 |
|
276 | |||
@@ -400,7 +400,7 b' class branchcache(object):' | |||||
400 | return heads |
|
400 | return heads | |
401 |
|
401 | |||
402 | def iterbranches(self): |
|
402 | def iterbranches(self): | |
403 |
for bn, heads in |
|
403 | for bn, heads in self.items(): | |
404 | yield (bn, heads) + self._branchtip(heads) |
|
404 | yield (bn, heads) + self._branchtip(heads) | |
405 |
|
405 | |||
406 | def iterheads(self): |
|
406 | def iterheads(self): | |
@@ -434,7 +434,7 b' class branchcache(object):' | |||||
434 | cachekey.append(hex(self.filteredhash)) |
|
434 | cachekey.append(hex(self.filteredhash)) | |
435 | f.write(b" ".join(cachekey) + b'\n') |
|
435 | f.write(b" ".join(cachekey) + b'\n') | |
436 | nodecount = 0 |
|
436 | nodecount = 0 | |
437 |
for label, nodes in sorted( |
|
437 | for label, nodes in sorted(self._entries.items()): | |
438 | label = encoding.fromlocal(label) |
|
438 | label = encoding.fromlocal(label) | |
439 | for node in nodes: |
|
439 | for node in nodes: | |
440 | nodecount += 1 |
|
440 | nodecount += 1 | |
@@ -490,7 +490,7 b' class branchcache(object):' | |||||
490 | # Faster than using ctx.obsolete() |
|
490 | # Faster than using ctx.obsolete() | |
491 | obsrevs = obsolete.getrevs(repo, b'obsolete') |
|
491 | obsrevs = obsolete.getrevs(repo, b'obsolete') | |
492 |
|
492 | |||
493 |
for branch, newheadrevs in |
|
493 | for branch, newheadrevs in newbranches.items(): | |
494 | # For every branch, compute the new branchheads. |
|
494 | # For every branch, compute the new branchheads. | |
495 | # A branchhead is a revision such that no descendant is on |
|
495 | # A branchhead is a revision such that no descendant is on | |
496 | # the same branch. |
|
496 | # the same branch. |
@@ -2239,7 +2239,7 b' def handlecheckphases(op, inpart):' | |||||
2239 | b'remote repository changed while pushing - please try again ' |
|
2239 | b'remote repository changed while pushing - please try again ' | |
2240 | b'(%s is %s expected %s)' |
|
2240 | b'(%s is %s expected %s)' | |
2241 | ) |
|
2241 | ) | |
2242 |
for expectedphase, nodes in p |
|
2242 | for expectedphase, nodes in phasetonodes.items(): | |
2243 | for n in nodes: |
|
2243 | for n in nodes: | |
2244 | actualphase = phasecache.phase(unfi, cl.rev(n)) |
|
2244 | actualphase = phasecache.phase(unfi, cl.rev(n)) | |
2245 | if actualphase != expectedphase: |
|
2245 | if actualphase != expectedphase: |
@@ -424,7 +424,7 b' class cg1unpacker(object):' | |||||
424 | mfnode = cl.changelogrevision(cset).manifest |
|
424 | mfnode = cl.changelogrevision(cset).manifest | |
425 | mfest = ml[mfnode].readdelta() |
|
425 | mfest = ml[mfnode].readdelta() | |
426 | # store file nodes we must see |
|
426 | # store file nodes we must see | |
427 |
for f, n in |
|
427 | for f, n in mfest.items(): | |
428 | needfiles.setdefault(f, set()).add(n) |
|
428 | needfiles.setdefault(f, set()).add(n) | |
429 |
|
429 | |||
430 | on_filelog_rev = None |
|
430 | on_filelog_rev = None | |
@@ -1966,7 +1966,7 b' def _addchangegroupfiles(' | |||||
1966 | del needfiles[f] |
|
1966 | del needfiles[f] | |
1967 | progress.complete() |
|
1967 | progress.complete() | |
1968 |
|
1968 | |||
1969 |
for f, needs in |
|
1969 | for f, needs in needfiles.items(): | |
1970 | fl = repo.file(f) |
|
1970 | fl = repo.file(f) | |
1971 | for n in needs: |
|
1971 | for n in needs: | |
1972 | try: |
|
1972 | try: |
@@ -134,7 +134,7 b' def _confighash(ui):' | |||||
134 | ignored = set() |
|
134 | ignored = set() | |
135 | envitems = [ |
|
135 | envitems = [ | |
136 | (k, v) |
|
136 | (k, v) | |
137 |
for k, v in |
|
137 | for k, v in encoding.environ.items() | |
138 | if _envre.match(k) and k not in ignored |
|
138 | if _envre.match(k) and k not in ignored | |
139 | ] |
|
139 | ] | |
140 | envhash = _hashlist(sorted(envitems)) |
|
140 | envhash = _hashlist(sorted(envitems)) | |
@@ -320,7 +320,7 b' class channeledsystem(object):' | |||||
320 |
|
320 | |||
321 | def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None): |
|
321 | def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None): | |
322 | args = [type, cmd, util.abspath(cwd or b'.')] |
|
322 | args = [type, cmd, util.abspath(cwd or b'.')] | |
323 |
args.extend(b'%s=%s' % (k, v) for k, v in |
|
323 | args.extend(b'%s=%s' % (k, v) for k, v in environ.items()) | |
324 | data = b'\0'.join(args) |
|
324 | data = b'\0'.join(args) | |
325 | self.out.write(struct.pack(b'>cI', self.channel, len(data))) |
|
325 | self.out.write(struct.pack(b'>cI', self.channel, len(data))) | |
326 | self.out.write(data) |
|
326 | self.out.write(data) |
@@ -626,7 +626,7 b' def dorecord(' | |||||
626 | # 5. finally restore backed-up files |
|
626 | # 5. finally restore backed-up files | |
627 | try: |
|
627 | try: | |
628 | dirstate = repo.dirstate |
|
628 | dirstate = repo.dirstate | |
629 |
for realname, tmpname in |
|
629 | for realname, tmpname in backups.items(): | |
630 | ui.debug(b'restoring %r to %r\n' % (tmpname, realname)) |
|
630 | ui.debug(b'restoring %r to %r\n' % (tmpname, realname)) | |
631 |
|
631 | |||
632 | if dirstate.get_entry(realname).maybe_clean: |
|
632 | if dirstate.get_entry(realname).maybe_clean: |
@@ -2469,7 +2469,7 b' def copy(ui, repo, *pats, **opts):' | |||||
2469 | ) |
|
2469 | ) | |
2470 | def debugcommands(ui, cmd=b'', *args): |
|
2470 | def debugcommands(ui, cmd=b'', *args): | |
2471 | """list all available commands and options""" |
|
2471 | """list all available commands and options""" | |
2472 |
for cmd, vals in sorted( |
|
2472 | for cmd, vals in sorted(table.items()): | |
2473 | cmd = cmd.split(b'|')[0] |
|
2473 | cmd = cmd.split(b'|')[0] | |
2474 | opts = b', '.join([i[1] for i in vals[1]]) |
|
2474 | opts = b', '.join([i[1] for i in vals[1]]) | |
2475 | ui.write(b'%s: %s\n' % (cmd, opts)) |
|
2475 | ui.write(b'%s: %s\n' % (cmd, opts)) | |
@@ -7089,7 +7089,7 b' def summary(ui, repo, **opts):' | |||||
7089 |
|
7089 | |||
7090 | c = repo.dirstate.copies() |
|
7090 | c = repo.dirstate.copies() | |
7091 | copied, renamed = [], [] |
|
7091 | copied, renamed = [], [] | |
7092 |
for d, s in |
|
7092 | for d, s in c.items(): | |
7093 | if s in status.removed: |
|
7093 | if s in status.removed: | |
7094 | status.removed.remove(s) |
|
7094 | status.removed.remove(s) | |
7095 | renamed.append(d) |
|
7095 | renamed.append(d) |
@@ -123,7 +123,7 b' class basectx(object):' | |||||
123 | deleted, unknown, ignored = s.deleted, s.unknown, s.ignored |
|
123 | deleted, unknown, ignored = s.deleted, s.unknown, s.ignored | |
124 | deletedset = set(deleted) |
|
124 | deletedset = set(deleted) | |
125 | d = mf1.diff(mf2, match=match, clean=listclean) |
|
125 | d = mf1.diff(mf2, match=match, clean=listclean) | |
126 |
for fn, value in |
|
126 | for fn, value in d.items(): | |
127 | if fn in deletedset: |
|
127 | if fn in deletedset: | |
128 | continue |
|
128 | continue | |
129 | if value is None: |
|
129 | if value is None: |
@@ -17,7 +17,6 b' from . import (' | |||||
17 | match as matchmod, |
|
17 | match as matchmod, | |
18 | pathutil, |
|
18 | pathutil, | |
19 | policy, |
|
19 | policy, | |
20 | pycompat, |
|
|||
21 | util, |
|
20 | util, | |
22 | ) |
|
21 | ) | |
23 |
|
22 | |||
@@ -68,7 +67,7 b' def _filter(src, dst, t):' | |||||
68 | def _chain(prefix, suffix): |
|
67 | def _chain(prefix, suffix): | |
69 | """chain two sets of copies 'prefix' and 'suffix'""" |
|
68 | """chain two sets of copies 'prefix' and 'suffix'""" | |
70 | result = prefix.copy() |
|
69 | result = prefix.copy() | |
71 |
for key, value in |
|
70 | for key, value in suffix.items(): | |
72 | result[key] = prefix.get(value, value) |
|
71 | result[key] = prefix.get(value, value) | |
73 | return result |
|
72 | return result | |
74 |
|
73 | |||
@@ -408,7 +407,7 b' def _combine_changeset_copies(' | |||||
408 |
|
407 | |||
409 | if childcopies: |
|
408 | if childcopies: | |
410 | newcopies = copies.copy() |
|
409 | newcopies = copies.copy() | |
411 |
for dest, source in |
|
410 | for dest, source in childcopies.items(): | |
412 | prev = copies.get(source) |
|
411 | prev = copies.get(source) | |
413 | if prev is not None and prev[1] is not None: |
|
412 | if prev is not None and prev[1] is not None: | |
414 | source = prev[1] |
|
413 | source = prev[1] | |
@@ -623,7 +622,7 b' def _combine_changeset_copies_extra(' | |||||
623 | newcopies = copies |
|
622 | newcopies = copies | |
624 | if childcopies: |
|
623 | if childcopies: | |
625 | newcopies = copies.copy() |
|
624 | newcopies = copies.copy() | |
626 |
for dest, source in |
|
625 | for dest, source in childcopies.items(): | |
627 | prev = copies.get(source) |
|
626 | prev = copies.get(source) | |
628 | if prev is not None and prev[1] is not None: |
|
627 | if prev is not None and prev[1] is not None: | |
629 | source = prev[1] |
|
628 | source = prev[1] | |
@@ -721,7 +720,7 b' def _reverse_renames(copies, dst, match)' | |||||
721 | # can still exist (e.g. hg cp a b; hg mv a c). In those cases we |
|
720 | # can still exist (e.g. hg cp a b; hg mv a c). In those cases we | |
722 | # arbitrarily pick one of the renames. |
|
721 | # arbitrarily pick one of the renames. | |
723 | r = {} |
|
722 | r = {} | |
724 |
for k, v in sorted( |
|
723 | for k, v in sorted(copies.items()): | |
725 | if match and not match(v): |
|
724 | if match and not match(v): | |
726 | continue |
|
725 | continue | |
727 | # remove copies |
|
726 | # remove copies | |
@@ -1080,7 +1079,7 b' def _dir_renames(repo, ctx, copy, fullco' | |||||
1080 |
|
1079 | |||
1081 | # examine each file copy for a potential directory move, which is |
|
1080 | # examine each file copy for a potential directory move, which is | |
1082 | # when all the files in a directory are moved to a new directory |
|
1081 | # when all the files in a directory are moved to a new directory | |
1083 |
for dst, src in |
|
1082 | for dst, src in fullcopy.items(): | |
1084 | dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst) |
|
1083 | dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst) | |
1085 | if dsrc in invalid: |
|
1084 | if dsrc in invalid: | |
1086 | # already seen to be uninteresting |
|
1085 | # already seen to be uninteresting | |
@@ -1103,7 +1102,7 b' def _dir_renames(repo, ctx, copy, fullco' | |||||
1103 | if not dirmove: |
|
1102 | if not dirmove: | |
1104 | return {}, {} |
|
1103 | return {}, {} | |
1105 |
|
1104 | |||
1106 |
dirmove = {k + b"/": v + b"/" for k, v in |
|
1105 | dirmove = {k + b"/": v + b"/" for k, v in dirmove.items()} | |
1107 |
|
1106 | |||
1108 | for d in dirmove: |
|
1107 | for d in dirmove: | |
1109 | repo.ui.debug( |
|
1108 | repo.ui.debug( | |
@@ -1186,7 +1185,7 b' def _heuristicscopytracing(repo, c1, c2,' | |||||
1186 |
|
1185 | |||
1187 | copies2 = {} |
|
1186 | copies2 = {} | |
1188 | cp = _forwardcopies(base, c2) |
|
1187 | cp = _forwardcopies(base, c2) | |
1189 |
for dst, src in |
|
1188 | for dst, src in cp.items(): | |
1190 | if src in m1: |
|
1189 | if src in m1: | |
1191 | copies2[dst] = src |
|
1190 | copies2[dst] = src | |
1192 |
|
1191 | |||
@@ -1304,5 +1303,5 b' def graftcopies(wctx, ctx, base):' | |||||
1304 | for dest, __ in list(new_copies.items()): |
|
1303 | for dest, __ in list(new_copies.items()): | |
1305 | if dest in parent: |
|
1304 | if dest in parent: | |
1306 | del new_copies[dest] |
|
1305 | del new_copies[dest] | |
1307 |
for dst, src in |
|
1306 | for dst, src in new_copies.items(): | |
1308 | wctx[dst].markcopied(src) |
|
1307 | wctx[dst].markcopied(src) |
@@ -492,7 +492,7 b' def debugcapabilities(ui, path, **opts):' | |||||
492 | b2caps = bundle2.bundle2caps(peer) |
|
492 | b2caps = bundle2.bundle2caps(peer) | |
493 | if b2caps: |
|
493 | if b2caps: | |
494 | ui.writenoi18n(b'Bundle2 capabilities:\n') |
|
494 | ui.writenoi18n(b'Bundle2 capabilities:\n') | |
495 |
for key, values in sorted( |
|
495 | for key, values in sorted(b2caps.items()): | |
496 | ui.write(b' %s\n' % key) |
|
496 | ui.write(b' %s\n' % key) | |
497 | for v in values: |
|
497 | for v in values: | |
498 | ui.write(b' %s\n' % v) |
|
498 | ui.write(b' %s\n' % v) | |
@@ -2388,7 +2388,7 b' def debugmergestate(ui, repo, *args, **o' | |||||
2388 | if f in ms: |
|
2388 | if f in ms: | |
2389 | # If file is in mergestate, we have already processed it's extras |
|
2389 | # If file is in mergestate, we have already processed it's extras | |
2390 | continue |
|
2390 | continue | |
2391 |
for k, v in |
|
2391 | for k, v in d.items(): | |
2392 | fm_extras.startitem() |
|
2392 | fm_extras.startitem() | |
2393 | fm_extras.data(file=f) |
|
2393 | fm_extras.data(file=f) | |
2394 | fm_extras.data(key=k) |
|
2394 | fm_extras.data(key=k) | |
@@ -2405,7 +2405,7 b' def debugnamecomplete(ui, repo, *args):' | |||||
2405 | names = set() |
|
2405 | names = set() | |
2406 | # since we previously only listed open branches, we will handle that |
|
2406 | # since we previously only listed open branches, we will handle that | |
2407 | # specially (after this for loop) |
|
2407 | # specially (after this for loop) | |
2408 |
for name, ns in |
|
2408 | for name, ns in repo.names.items(): | |
2409 | if name != b'branches': |
|
2409 | if name != b'branches': | |
2410 | names.update(ns.listnames(repo)) |
|
2410 | names.update(ns.listnames(repo)) | |
2411 | names.update( |
|
2411 | names.update( | |
@@ -2699,7 +2699,7 b' def debugpathcomplete(ui, repo, *specs, ' | |||||
2699 | fullpaths = opts['full'] |
|
2699 | fullpaths = opts['full'] | |
2700 | files, dirs = set(), set() |
|
2700 | files, dirs = set(), set() | |
2701 | adddir, addfile = dirs.add, files.add |
|
2701 | adddir, addfile = dirs.add, files.add | |
2702 |
for f, st in |
|
2702 | for f, st in dirstate.items(): | |
2703 | if f.startswith(spec) and st.state in acceptable: |
|
2703 | if f.startswith(spec) and st.state in acceptable: | |
2704 | if fixpaths: |
|
2704 | if fixpaths: | |
2705 | f = f.replace(b'/', pycompat.ossep) |
|
2705 | f = f.replace(b'/', pycompat.ossep) | |
@@ -4270,7 +4270,7 b' def debugwireargs(ui, repopath, *vals, *' | |||||
4270 | for opt in cmdutil.remoteopts: |
|
4270 | for opt in cmdutil.remoteopts: | |
4271 | del opts[opt[1]] |
|
4271 | del opts[opt[1]] | |
4272 | args = {} |
|
4272 | args = {} | |
4273 |
for k, v in |
|
4273 | for k, v in opts.items(): | |
4274 | if v: |
|
4274 | if v: | |
4275 | args[k] = v |
|
4275 | args[k] = v | |
4276 | args = pycompat.strkwargs(args) |
|
4276 | args = pycompat.strkwargs(args) |
@@ -342,7 +342,7 b' class dirstate(object):' | |||||
342 | return iter(sorted(self._map)) |
|
342 | return iter(sorted(self._map)) | |
343 |
|
343 | |||
344 | def items(self): |
|
344 | def items(self): | |
345 |
return |
|
345 | return self._map.items() | |
346 |
|
346 | |||
347 | iteritems = items |
|
347 | iteritems = items | |
348 |
|
348 | |||
@@ -770,9 +770,7 b' class dirstate(object):' | |||||
770 | def _writedirstate(self, tr, st): |
|
770 | def _writedirstate(self, tr, st): | |
771 | # notify callbacks about parents change |
|
771 | # notify callbacks about parents change | |
772 | if self._origpl is not None and self._origpl != self._pl: |
|
772 | if self._origpl is not None and self._origpl != self._pl: | |
773 | for c, callback in sorted( |
|
773 | for c, callback in sorted(self._plchangecallbacks.items()): | |
774 | pycompat.iteritems(self._plchangecallbacks) |
|
|||
775 | ): |
|
|||
776 | callback(self, self._origpl, self._pl) |
|
774 | callback(self, self._origpl, self._pl) | |
777 | self._origpl = None |
|
775 | self._origpl = None | |
778 | self._map.write(tr, st) |
|
776 | self._map.write(tr, st) | |
@@ -935,7 +933,7 b' class dirstate(object):' | |||||
935 | if match.isexact() and self._checkcase: |
|
933 | if match.isexact() and self._checkcase: | |
936 | normed = {} |
|
934 | normed = {} | |
937 |
|
935 | |||
938 |
for f, st in |
|
936 | for f, st in results.items(): | |
939 | if st is None: |
|
937 | if st is None: | |
940 | continue |
|
938 | continue | |
941 |
|
939 | |||
@@ -948,7 +946,7 b' class dirstate(object):' | |||||
948 |
|
946 | |||
949 | paths.add(f) |
|
947 | paths.add(f) | |
950 |
|
948 | |||
951 |
for norm, paths in |
|
949 | for norm, paths in normed.items(): | |
952 | if len(paths) > 1: |
|
950 | if len(paths) > 1: | |
953 | for path in paths: |
|
951 | for path in paths: | |
954 | folded = self._discoverpath( |
|
952 | folded = self._discoverpath( |
@@ -12,7 +12,6 b' from . import (' | |||||
12 | error, |
|
12 | error, | |
13 | pathutil, |
|
13 | pathutil, | |
14 | policy, |
|
14 | policy, | |
15 | pycompat, |
|
|||
16 | txnutil, |
|
15 | txnutil, | |
17 | util, |
|
16 | util, | |
18 | ) |
|
17 | ) | |
@@ -354,7 +353,7 b' class dirstatemap(_dirstatemapcommon):' | |||||
354 | util.clearcachedproperty(self, b"dirfoldmap") |
|
353 | util.clearcachedproperty(self, b"dirfoldmap") | |
355 |
|
354 | |||
356 | def items(self): |
|
355 | def items(self): | |
357 |
return |
|
356 | return self._map.items() | |
358 |
|
357 | |||
359 | # forward for python2,3 compat |
|
358 | # forward for python2,3 compat | |
360 | iteritems = items |
|
359 | iteritems = items | |
@@ -378,7 +377,7 b' class dirstatemap(_dirstatemapcommon):' | |||||
378 | self._dirtyparents = True |
|
377 | self._dirtyparents = True | |
379 | copies = {} |
|
378 | copies = {} | |
380 | if fold_p2: |
|
379 | if fold_p2: | |
381 |
for f, s in |
|
380 | for f, s in self._map.items(): | |
382 | # Discard "merged" markers when moving away from a merge state |
|
381 | # Discard "merged" markers when moving away from a merge state | |
383 | if s.p2_info: |
|
382 | if s.p2_info: | |
384 | source = self.copymap.pop(f, None) |
|
383 | source = self.copymap.pop(f, None) | |
@@ -501,7 +500,7 b' class dirstatemap(_dirstatemapcommon):' | |||||
501 |
|
500 | |||
502 | f = {} |
|
501 | f = {} | |
503 | normcase = util.normcase |
|
502 | normcase = util.normcase | |
504 |
for name, s in |
|
503 | for name, s in self._map.items(): | |
505 | if not s.removed: |
|
504 | if not s.removed: | |
506 | f[normcase(name)] = name |
|
505 | f[normcase(name)] = name | |
507 | f[b'.'] = b'.' # prevents useless util.fspath() invocation |
|
506 | f[b'.'] = b'.' # prevents useless util.fspath() invocation |
@@ -237,7 +237,7 b' def _headssummary(pushop):' | |||||
237 |
|
237 | |||
238 | knownnode = cl.hasnode # do not use nodemap until it is filtered |
|
238 | knownnode = cl.hasnode # do not use nodemap until it is filtered | |
239 | # A. register remote heads of branches which are in outgoing set |
|
239 | # A. register remote heads of branches which are in outgoing set | |
240 |
for branch, heads in |
|
240 | for branch, heads in remotemap.items(): | |
241 | # don't add head info about branches which we don't have locally |
|
241 | # don't add head info about branches which we don't have locally | |
242 | if branch not in branches: |
|
242 | if branch not in branches: | |
243 | continue |
|
243 | continue | |
@@ -261,14 +261,14 b' def _headssummary(pushop):' | |||||
261 | repo, |
|
261 | repo, | |
262 | ( |
|
262 | ( | |
263 | (branch, heads[1]) |
|
263 | (branch, heads[1]) | |
264 |
for branch, heads in |
|
264 | for branch, heads in headssum.items() | |
265 | if heads[0] is not None |
|
265 | if heads[0] is not None | |
266 | ), |
|
266 | ), | |
267 | ) |
|
267 | ) | |
268 | newmap.update(repo, (ctx.rev() for ctx in missingctx)) |
|
268 | newmap.update(repo, (ctx.rev() for ctx in missingctx)) | |
269 |
for branch, newheads in |
|
269 | for branch, newheads in newmap.items(): | |
270 | headssum[branch][1][:] = newheads |
|
270 | headssum[branch][1][:] = newheads | |
271 |
for branch, items in |
|
271 | for branch, items in headssum.items(): | |
272 | for l in items: |
|
272 | for l in items: | |
273 | if l is not None: |
|
273 | if l is not None: | |
274 | l.sort() |
|
274 | l.sort() | |
@@ -379,9 +379,7 b' def checkheads(pushop):' | |||||
379 | headssum = _oldheadssummary(repo, remoteheads, outgoing, inc) |
|
379 | headssum = _oldheadssummary(repo, remoteheads, outgoing, inc) | |
380 | pushop.pushbranchmap = headssum |
|
380 | pushop.pushbranchmap = headssum | |
381 | newbranches = [ |
|
381 | newbranches = [ | |
382 | branch |
|
382 | branch for branch, heads in headssum.items() if heads[0] is None | |
383 | for branch, heads in pycompat.iteritems(headssum) |
|
|||
384 | if heads[0] is None |
|
|||
385 | ] |
|
383 | ] | |
386 | # 1. Check for new branches on the remote. |
|
384 | # 1. Check for new branches on the remote. | |
387 | if newbranches and not newbranch: # new branch requires --new-branch |
|
385 | if newbranches and not newbranch: # new branch requires --new-branch |
@@ -572,7 +572,7 b' class cmdalias(object):' | |||||
572 |
|
572 | |||
573 | try: |
|
573 | try: | |
574 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) |
|
574 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) | |
575 |
for alias, e in |
|
575 | for alias, e in cmdtable.items(): | |
576 | if e is entry: |
|
576 | if e is entry: | |
577 | self.cmd = alias |
|
577 | self.cmd = alias | |
578 | break |
|
578 | break |
@@ -805,7 +805,7 b' def _pushb2ctxcheckheads(pushop, bundler' | |||||
805 | bundler.newpart(b'check:heads', data=iter(pushop.remoteheads)) |
|
805 | bundler.newpart(b'check:heads', data=iter(pushop.remoteheads)) | |
806 | else: |
|
806 | else: | |
807 | affected = set() |
|
807 | affected = set() | |
808 |
for branch, heads in |
|
808 | for branch, heads in pushop.pushbranchmap.items(): | |
809 | remoteheads, newheads, unsyncedheads, discardedheads = heads |
|
809 | remoteheads, newheads, unsyncedheads, discardedheads = heads | |
810 | if remoteheads is not None: |
|
810 | if remoteheads is not None: | |
811 | remote = set(remoteheads) |
|
811 | remote = set(remoteheads) | |
@@ -1116,7 +1116,7 b' def _getbundlesendvars(pushop, bundler):' | |||||
1116 |
|
1116 | |||
1117 | part = bundler.newpart(b'pushvars') |
|
1117 | part = bundler.newpart(b'pushvars') | |
1118 |
|
1118 | |||
1119 |
for key, value in |
|
1119 | for key, value in shellvars.items(): | |
1120 | part.addparam(key, value, mandatory=False) |
|
1120 | part.addparam(key, value, mandatory=False) | |
1121 |
|
1121 | |||
1122 |
|
1122 |
@@ -73,7 +73,7 b' def find(name):' | |||||
73 | try: |
|
73 | try: | |
74 | mod = _extensions[name] |
|
74 | mod = _extensions[name] | |
75 | except KeyError: |
|
75 | except KeyError: | |
76 |
for k, v in |
|
76 | for k, v in _extensions.items(): | |
77 | if k.endswith(b'.' + name) or k.endswith(b'/' + name): |
|
77 | if k.endswith(b'.' + name) or k.endswith(b'/' + name): | |
78 | mod = v |
|
78 | mod = v | |
79 | break |
|
79 | break | |
@@ -170,7 +170,7 b' def _rejectunicode(name, xs):' | |||||
170 |
|
170 | |||
171 | def _validatecmdtable(ui, cmdtable): |
|
171 | def _validatecmdtable(ui, cmdtable): | |
172 | """Check if extension commands have required attributes""" |
|
172 | """Check if extension commands have required attributes""" | |
173 |
for c, e in |
|
173 | for c, e in cmdtable.items(): | |
174 | f = e[0] |
|
174 | f = e[0] | |
175 | missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)] |
|
175 | missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)] | |
176 | if not missing: |
|
176 | if not missing: | |
@@ -578,7 +578,7 b' def wrapcommand(table, command, wrapper,' | |||||
578 | ''' |
|
578 | ''' | |
579 | assert callable(wrapper) |
|
579 | assert callable(wrapper) | |
580 | aliases, entry = cmdutil.findcmd(command, table) |
|
580 | aliases, entry = cmdutil.findcmd(command, table) | |
581 |
for alias, e in |
|
581 | for alias, e in table.items(): | |
582 | if e is entry: |
|
582 | if e is entry: | |
583 | key = alias |
|
583 | key = alias | |
584 | break |
|
584 | break | |
@@ -755,7 +755,7 b' def _disabledpaths():' | |||||
755 | if name in exts or name in _order or name == b'__init__': |
|
755 | if name in exts or name in _order or name == b'__init__': | |
756 | continue |
|
756 | continue | |
757 | exts[name] = path |
|
757 | exts[name] = path | |
758 |
for name, path in |
|
758 | for name, path in _disabledextensions.items(): | |
759 | # If no path was provided for a disabled extension (e.g. "color=!"), |
|
759 | # If no path was provided for a disabled extension (e.g. "color=!"), | |
760 | # don't replace the path we already found by the scan above. |
|
760 | # don't replace the path we already found by the scan above. | |
761 | if path: |
|
761 | if path: | |
@@ -817,7 +817,7 b' def disabled():' | |||||
817 |
|
817 | |||
818 | return { |
|
818 | return { | |
819 | name: gettext(desc) |
|
819 | name: gettext(desc) | |
820 |
for name, desc in |
|
820 | for name, desc in __index__.docs.items() | |
821 | if name not in _order |
|
821 | if name not in _order | |
822 | } |
|
822 | } | |
823 | except (ImportError, AttributeError): |
|
823 | except (ImportError, AttributeError): | |
@@ -828,7 +828,7 b' def disabled():' | |||||
828 | return {} |
|
828 | return {} | |
829 |
|
829 | |||
830 | exts = {} |
|
830 | exts = {} | |
831 |
for name, path in |
|
831 | for name, path in paths.items(): | |
832 | doc = _disabledhelp(path) |
|
832 | doc = _disabledhelp(path) | |
833 | if doc and name != b'__index__': |
|
833 | if doc and name != b'__index__': | |
834 | exts[name] = doc.splitlines()[0] |
|
834 | exts[name] = doc.splitlines()[0] | |
@@ -917,7 +917,7 b' def disabledcmd(ui, cmd, strict=False):' | |||||
917 | ext = _finddisabledcmd(ui, cmd, cmd, path, strict=strict) |
|
917 | ext = _finddisabledcmd(ui, cmd, cmd, path, strict=strict) | |
918 | if not ext: |
|
918 | if not ext: | |
919 | # otherwise, interrogate each extension until there's a match |
|
919 | # otherwise, interrogate each extension until there's a match | |
920 |
for name, path in |
|
920 | for name, path in paths.items(): | |
921 | ext = _finddisabledcmd(ui, cmd, name, path, strict=strict) |
|
921 | ext = _finddisabledcmd(ui, cmd, name, path, strict=strict) | |
922 | if ext: |
|
922 | if ext: | |
923 | break |
|
923 | break | |
@@ -942,9 +942,7 b' def enabled(shortname=True):' | |||||
942 |
|
942 | |||
943 | def notloaded(): |
|
943 | def notloaded(): | |
944 | '''return short names of extensions that failed to load''' |
|
944 | '''return short names of extensions that failed to load''' | |
945 | return [ |
|
945 | return [name for name, mod in _extensions.items() if mod is None] | |
946 | name for name, mod in pycompat.iteritems(_extensions) if mod is None |
|
|||
947 | ] |
|
|||
948 |
|
946 | |||
949 |
|
947 | |||
950 | def moduleversion(module): |
|
948 | def moduleversion(module): |
@@ -14,7 +14,6 b' from . import (' | |||||
14 | commands, |
|
14 | commands, | |
15 | error, |
|
15 | error, | |
16 | extensions, |
|
16 | extensions, | |
17 | pycompat, |
|
|||
18 | registrar, |
|
17 | registrar, | |
19 | ) |
|
18 | ) | |
20 |
|
19 | |||
@@ -114,7 +113,7 b' class exthelper(object):' | |||||
114 | self._extcommandwrappers.extend(other._extcommandwrappers) |
|
113 | self._extcommandwrappers.extend(other._extcommandwrappers) | |
115 | self._functionwrappers.extend(other._functionwrappers) |
|
114 | self._functionwrappers.extend(other._functionwrappers) | |
116 | self.cmdtable.update(other.cmdtable) |
|
115 | self.cmdtable.update(other.cmdtable) | |
117 |
for section, items in |
|
116 | for section, items in other.configtable.items(): | |
118 | if section in self.configtable: |
|
117 | if section in self.configtable: | |
119 | self.configtable[section].update(items) |
|
118 | self.configtable[section].update(items) | |
120 | else: |
|
119 | else: |
@@ -1199,7 +1199,7 b' def _workingpath(repo, ctx):' | |||||
1199 |
|
1199 | |||
1200 | def loadinternalmerge(ui, extname, registrarobj): |
|
1200 | def loadinternalmerge(ui, extname, registrarobj): | |
1201 | """Load internal merge tool from specified registrarobj""" |
|
1201 | """Load internal merge tool from specified registrarobj""" | |
1202 |
for name, func in |
|
1202 | for name, func in registrarobj._table.items(): | |
1203 | fullname = b':' + name |
|
1203 | fullname = b':' + name | |
1204 | internals[fullname] = func |
|
1204 | internals[fullname] = func | |
1205 | internals[b'internal:' + name] = func |
|
1205 | internals[b'internal:' + name] = func |
@@ -613,7 +613,7 b' def match(ctx, cwd, expr, badfn=None):' | |||||
613 |
|
613 | |||
614 | def loadpredicate(ui, extname, registrarobj): |
|
614 | def loadpredicate(ui, extname, registrarobj): | |
615 | """Load fileset predicates from specified registrarobj""" |
|
615 | """Load fileset predicates from specified registrarobj""" | |
616 |
for name, func in |
|
616 | for name, func in registrarobj._table.items(): | |
617 | symbols[name] = func |
|
617 | symbols[name] = func | |
618 |
|
618 | |||
619 |
|
619 |
@@ -293,7 +293,7 b' class _nestedformatter(baseformatter):' | |||||
293 | def _iteritems(data): |
|
293 | def _iteritems(data): | |
294 | '''iterate key-value pairs in stable order''' |
|
294 | '''iterate key-value pairs in stable order''' | |
295 | if isinstance(data, dict): |
|
295 | if isinstance(data, dict): | |
296 |
return sorted( |
|
296 | return sorted(data.items()) | |
297 | return data |
|
297 | return data | |
298 |
|
298 | |||
299 |
|
299 |
@@ -125,7 +125,7 b' def listexts(header, exts, indent=1, sho' | |||||
125 | '''return a text listing of the given extensions''' |
|
125 | '''return a text listing of the given extensions''' | |
126 | rst = [] |
|
126 | rst = [] | |
127 | if exts: |
|
127 | if exts: | |
128 |
for name, desc in sorted( |
|
128 | for name, desc in sorted(exts.items()): | |
129 | if not showdeprecated and any(w in desc for w in _exclkeywords): |
|
129 | if not showdeprecated and any(w in desc for w in _exclkeywords): | |
130 | continue |
|
130 | continue | |
131 | rst.append(b'%s:%s: %s\n' % (b' ' * indent, name, desc)) |
|
131 | rst.append(b'%s:%s: %s\n' % (b' ' * indent, name, desc)) | |
@@ -280,7 +280,7 b' def topicmatch(ui, commands, kw):' | |||||
280 | name = names[0] |
|
280 | name = names[0] | |
281 | if not filtertopic(ui, name): |
|
281 | if not filtertopic(ui, name): | |
282 | results[b'topics'].append((names[0], header)) |
|
282 | results[b'topics'].append((names[0], header)) | |
283 |
for cmd, entry in |
|
283 | for cmd, entry in commands.table.items(): | |
284 | if len(entry) == 3: |
|
284 | if len(entry) == 3: | |
285 | summary = entry[2] |
|
285 | summary = entry[2] | |
286 | else: |
|
286 | else: | |
@@ -664,7 +664,7 b' def _getcategorizedhelpcmds(ui, cmdtable' | |||||
664 | h = {} |
|
664 | h = {} | |
665 | # Command -> string showing synonyms |
|
665 | # Command -> string showing synonyms | |
666 | syns = {} |
|
666 | syns = {} | |
667 |
for c, e in |
|
667 | for c, e in cmdtable.items(): | |
668 | fs = cmdutil.parsealiases(c) |
|
668 | fs = cmdutil.parsealiases(c) | |
669 | f = fs[0] |
|
669 | f = fs[0] | |
670 | syns[f] = fs |
|
670 | syns[f] = fs |
@@ -412,7 +412,7 b' class hgweb(object):' | |||||
412 |
|
412 | |||
413 | if cmd == b'archive': |
|
413 | if cmd == b'archive': | |
414 | fn = req.qsparams[b'node'] |
|
414 | fn = req.qsparams[b'node'] | |
415 |
for type_, spec in |
|
415 | for type_, spec in webutil.archivespecs.items(): | |
416 | ext = spec[2] |
|
416 | ext = spec[2] | |
417 | if fn.endswith(ext): |
|
417 | if fn.endswith(ext): | |
418 | req.qsparams[b'node'] = fn[: -len(ext)] |
|
418 | req.qsparams[b'node'] = fn[: -len(ext)] |
@@ -77,7 +77,7 b' class multidict(object):' | |||||
77 | return vals[0] |
|
77 | return vals[0] | |
78 |
|
78 | |||
79 | def asdictoflists(self): |
|
79 | def asdictoflists(self): | |
80 |
return {k: list(v) for k, v in |
|
80 | return {k: list(v) for k, v in self._items.items()} | |
81 |
|
81 | |||
82 |
|
82 | |||
83 | @attr.s(frozen=True) |
|
83 | @attr.s(frozen=True) | |
@@ -175,7 +175,7 b' def parserequestfromenv(env, reponame=No' | |||||
175 | # This is what is documented to be used for os.environ on Unix. |
|
175 | # This is what is documented to be used for os.environ on Unix. | |
176 | return pycompat.fsencode(s) |
|
176 | return pycompat.fsencode(s) | |
177 |
|
177 | |||
178 |
env = {tobytes(k): tobytes(v) for k, v in |
|
178 | env = {tobytes(k): tobytes(v) for k, v in env.items()} | |
179 |
|
179 | |||
180 | # Some hosting solutions are emulating hgwebdir, and dispatching directly |
|
180 | # Some hosting solutions are emulating hgwebdir, and dispatching directly | |
181 | # to an hgweb instance using this environment variable. This was always |
|
181 | # to an hgweb instance using this environment variable. This was always | |
@@ -309,7 +309,7 b' def parserequestfromenv(env, reponame=No' | |||||
309 | # perform case normalization for us. We just rewrite underscore to dash |
|
309 | # perform case normalization for us. We just rewrite underscore to dash | |
310 | # so keys match what likely went over the wire. |
|
310 | # so keys match what likely went over the wire. | |
311 | headers = [] |
|
311 | headers = [] | |
312 |
for k, v in |
|
312 | for k, v in env.items(): | |
313 | if k.startswith(b'HTTP_'): |
|
313 | if k.startswith(b'HTTP_'): | |
314 | headers.append((k[len(b'HTTP_') :].replace(b'_', b'-'), v)) |
|
314 | headers.append((k[len(b'HTTP_') :].replace(b'_', b'-'), v)) | |
315 |
|
315 |
@@ -563,7 +563,7 b' def manifest(web):' | |||||
563 | l = len(path) |
|
563 | l = len(path) | |
564 | abspath = b"/" + path |
|
564 | abspath = b"/" + path | |
565 |
|
565 | |||
566 |
for full, n in |
|
566 | for full, n in mf.items(): | |
567 | # the virtual path (working copy path) used for the full |
|
567 | # the virtual path (working copy path) used for the full | |
568 | # (repository) path |
|
568 | # (repository) path | |
569 | f = decodepath(full) |
|
569 | f = decodepath(full) | |
@@ -1520,7 +1520,7 b' def help(web):' | |||||
1520 |
|
1520 | |||
1521 | early, other = [], [] |
|
1521 | early, other = [], [] | |
1522 | primary = lambda s: s.partition(b'|')[0] |
|
1522 | primary = lambda s: s.partition(b'|')[0] | |
1523 |
for c, e in |
|
1523 | for c, e in commands.table.items(): | |
1524 | doc = _getdoc(e) |
|
1524 | doc = _getdoc(e) | |
1525 | if b'DEPRECATED' in doc or c.startswith(b'debug'): |
|
1525 | if b'DEPRECATED' in doc or c.startswith(b'debug'): | |
1526 | continue |
|
1526 | continue |
@@ -56,7 +56,7 b' def archivelist(ui, nodeid, url=None):' | |||||
56 | allowed = ui.configlist(b'web', b'allow-archive', untrusted=True) |
|
56 | allowed = ui.configlist(b'web', b'allow-archive', untrusted=True) | |
57 | archives = [] |
|
57 | archives = [] | |
58 |
|
58 | |||
59 |
for typ, spec in |
|
59 | for typ, spec in archivespecs.items(): | |
60 | if typ in allowed or ui.configbool( |
|
60 | if typ in allowed or ui.configbool( | |
61 | b'web', b'allow' + typ, untrusted=True |
|
61 | b'web', b'allow' + typ, untrusted=True | |
62 | ): |
|
62 | ): | |
@@ -863,7 +863,7 b' class sessionvars(templateutil.wrapped):' | |||||
863 |
|
863 | |||
864 | def itermaps(self, context): |
|
864 | def itermaps(self, context): | |
865 | separator = self._start |
|
865 | separator = self._start | |
866 |
for key, value in sorted( |
|
866 | for key, value in sorted(self._vars.items()): | |
867 | yield { |
|
867 | yield { | |
868 | b'name': key, |
|
868 | b'name': key, | |
869 | b'value': pycompat.bytestr(value), |
|
869 | b'value': pycompat.bytestr(value), |
@@ -23,7 +23,7 b' def launch(application):' | |||||
23 | procutil.setbinary(procutil.stdin) |
|
23 | procutil.setbinary(procutil.stdin) | |
24 | procutil.setbinary(procutil.stdout) |
|
24 | procutil.setbinary(procutil.stdout) | |
25 |
|
25 | |||
26 |
environ = dict( |
|
26 | environ = dict(os.environ.items()) # re-exports | |
27 | environ.setdefault('PATH_INFO', '') |
|
27 | environ.setdefault('PATH_INFO', '') | |
28 | if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): |
|
28 | if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): | |
29 | # IIS includes script_name in PATH_INFO |
|
29 | # IIS includes script_name in PATH_INFO |
@@ -166,7 +166,7 b' def _exthook(ui, repo, htype, name, cmd,' | |||||
166 | else: |
|
166 | else: | |
167 | env[b'HGPLAIN'] = b'' |
|
167 | env[b'HGPLAIN'] = b'' | |
168 |
|
168 | |||
169 |
for k, v in |
|
169 | for k, v in args.items(): | |
170 | # transaction changes can accumulate MBs of data, so skip it |
|
170 | # transaction changes can accumulate MBs of data, so skip it | |
171 | # for external hooks |
|
171 | # for external hooks | |
172 | if k == b'changes': |
|
172 | if k == b'changes': |
@@ -93,7 +93,7 b' def readauthforuri(ui, uri, user):' | |||||
93 | bestuser = None |
|
93 | bestuser = None | |
94 | bestlen = 0 |
|
94 | bestlen = 0 | |
95 | bestauth = None |
|
95 | bestauth = None | |
96 |
for group, auth in |
|
96 | for group, auth in groups.items(): | |
97 | if user and user != auth.get(b'username', user): |
|
97 | if user and user != auth.get(b'username', user): | |
98 | # If a username was set in the URI, the entry username |
|
98 | # If a username was set in the URI, the entry username | |
99 | # must either match it or be unset |
|
99 | # must either match it or be unset |
@@ -2068,7 +2068,7 b' class localrepository(object):' | |||||
2068 | else: |
|
2068 | else: | |
2069 | tags = self._tagscache.tags |
|
2069 | tags = self._tagscache.tags | |
2070 | rev = self.changelog.rev |
|
2070 | rev = self.changelog.rev | |
2071 |
for k, v in |
|
2071 | for k, v in tags.items(): | |
2072 | try: |
|
2072 | try: | |
2073 | # ignore tags to unknown nodes |
|
2073 | # ignore tags to unknown nodes | |
2074 | rev(v) |
|
2074 | rev(v) | |
@@ -2103,13 +2103,12 b' class localrepository(object):' | |||||
2103 | # writing to the cache), but the rest of Mercurial wants them in |
|
2103 | # writing to the cache), but the rest of Mercurial wants them in | |
2104 | # local encoding. |
|
2104 | # local encoding. | |
2105 | tags = {} |
|
2105 | tags = {} | |
2106 |
for (name, (node, hist)) in |
|
2106 | for (name, (node, hist)) in alltags.items(): | |
2107 | if node != self.nullid: |
|
2107 | if node != self.nullid: | |
2108 | tags[encoding.tolocal(name)] = node |
|
2108 | tags[encoding.tolocal(name)] = node | |
2109 | tags[b'tip'] = self.changelog.tip() |
|
2109 | tags[b'tip'] = self.changelog.tip() | |
2110 | tagtypes = { |
|
2110 | tagtypes = { | |
2111 | encoding.tolocal(name): value |
|
2111 | encoding.tolocal(name): value for (name, value) in tagtypes.items() | |
2112 | for (name, value) in pycompat.iteritems(tagtypes) |
|
|||
2113 | } |
|
2112 | } | |
2114 | return (tags, tagtypes) |
|
2113 | return (tags, tagtypes) | |
2115 |
|
2114 | |||
@@ -2138,7 +2137,7 b' class localrepository(object):' | |||||
2138 | '''return the tags associated with a node''' |
|
2137 | '''return the tags associated with a node''' | |
2139 | if not self._tagscache.nodetagscache: |
|
2138 | if not self._tagscache.nodetagscache: | |
2140 | nodetagscache = {} |
|
2139 | nodetagscache = {} | |
2141 |
for t, n in |
|
2140 | for t, n in self._tagscache.tags.items(): | |
2142 | nodetagscache.setdefault(n, []).append(t) |
|
2141 | nodetagscache.setdefault(n, []).append(t) | |
2143 | for tags in pycompat.itervalues(nodetagscache): |
|
2142 | for tags in pycompat.itervalues(nodetagscache): | |
2144 | tags.sort() |
|
2143 | tags.sort() | |
@@ -2256,7 +2255,7 b' class localrepository(object):' | |||||
2256 | mf = matchmod.match(self.root, b'', [pat]) |
|
2255 | mf = matchmod.match(self.root, b'', [pat]) | |
2257 | fn = None |
|
2256 | fn = None | |
2258 | params = cmd |
|
2257 | params = cmd | |
2259 |
for name, filterfn in |
|
2258 | for name, filterfn in self._datafilters.items(): | |
2260 | if cmd.startswith(name): |
|
2259 | if cmd.startswith(name): | |
2261 | fn = filterfn |
|
2260 | fn = filterfn | |
2262 | params = cmd[len(name) :].lstrip() |
|
2261 | params = cmd[len(name) :].lstrip() |
@@ -327,7 +327,7 b' class changesetprinter(object):' | |||||
327 | if branch != b'default': |
|
327 | if branch != b'default': | |
328 | self.ui.write(columns[b'branch'] % branch, label=b'log.branch') |
|
328 | self.ui.write(columns[b'branch'] % branch, label=b'log.branch') | |
329 |
|
329 | |||
330 |
for nsname, ns in |
|
330 | for nsname, ns in self.repo.names.items(): | |
331 | # branches has special logic already handled above, so here we just |
|
331 | # branches has special logic already handled above, so here we just | |
332 | # skip it |
|
332 | # skip it | |
333 | if nsname == b'branches': |
|
333 | if nsname == b'branches': | |
@@ -991,7 +991,7 b' def _makerevset(repo, wopts, slowpath):' | |||||
991 | opts[b'_patslog'] = list(wopts.pats) |
|
991 | opts[b'_patslog'] = list(wopts.pats) | |
992 |
|
992 | |||
993 | expr = [] |
|
993 | expr = [] | |
994 |
for op, val in sorted( |
|
994 | for op, val in sorted(opts.items()): | |
995 | if not val: |
|
995 | if not val: | |
996 | continue |
|
996 | continue | |
997 | revop, listop = _opt2logrevset[op] |
|
997 | revop, listop = _opt2logrevset[op] |
@@ -10,7 +10,6 b'' | |||||
10 | from .node import hex |
|
10 | from .node import hex | |
11 |
|
11 | |||
12 | from . import ( |
|
12 | from . import ( | |
13 | pycompat, |
|
|||
14 | util, |
|
13 | util, | |
15 | vfs as vfsmod, |
|
14 | vfs as vfsmod, | |
16 | ) |
|
15 | ) | |
@@ -77,7 +76,7 b' def writeremotenamefile(repo, remotepath' | |||||
77 | if oldpath != remotepath: |
|
76 | if oldpath != remotepath: | |
78 | f.write(b'%s\0%s\0%s\n' % (node, oldpath, rname)) |
|
77 | f.write(b'%s\0%s\0%s\n' % (node, oldpath, rname)) | |
79 |
|
78 | |||
80 |
for name, node in sorted( |
|
79 | for name, node in sorted(names.items()): | |
81 | if nametype == b"branches": |
|
80 | if nametype == b"branches": | |
82 | for n in node: |
|
81 | for n in node: | |
83 | f.write(b'%s\0%s\0%s\n' % (n, remotepath, name)) |
|
82 | f.write(b'%s\0%s\0%s\n' % (n, remotepath, name)) | |
@@ -159,7 +158,7 b' def pullremotenames(localrepo, remoterep' | |||||
159 | with remoterepo.commandexecutor() as e: |
|
158 | with remoterepo.commandexecutor() as e: | |
160 | branchmap = e.callcommand(b'branchmap', {}).result() |
|
159 | branchmap = e.callcommand(b'branchmap', {}).result() | |
161 |
|
160 | |||
162 |
for branch, nodes in |
|
161 | for branch, nodes in branchmap.items(): | |
163 | bmap[branch] = [] |
|
162 | bmap[branch] = [] | |
164 | for node in nodes: |
|
163 | for node in nodes: | |
165 | if node in repo and not repo[node].obsolete(): |
|
164 | if node in repo and not repo[node].obsolete(): |
@@ -2,7 +2,6 b' import _lsprof' | |||||
2 | import sys |
|
2 | import sys | |
3 |
|
3 | |||
4 | from .pycompat import getattr |
|
4 | from .pycompat import getattr | |
5 | from . import pycompat |
|
|||
6 |
|
5 | |||
7 | Profiler = _lsprof.Profiler |
|
6 | Profiler = _lsprof.Profiler | |
8 |
|
7 | |||
@@ -124,7 +123,7 b' def label(code):' | |||||
124 | try: |
|
123 | try: | |
125 | mname = _fn2mod[code.co_filename] |
|
124 | mname = _fn2mod[code.co_filename] | |
126 | except KeyError: |
|
125 | except KeyError: | |
127 |
for k, v in list( |
|
126 | for k, v in list(sys.modules.items()): | |
128 | if v is None: |
|
127 | if v is None: | |
129 | continue |
|
128 | continue | |
130 | if not isinstance(getattr(v, '__file__', None), str): |
|
129 | if not isinstance(getattr(v, '__file__', None), str): |
@@ -867,11 +867,11 b' class treemanifest(object):' | |||||
867 | differs, load it in both |
|
867 | differs, load it in both | |
868 | """ |
|
868 | """ | |
869 | toloadlazy = [] |
|
869 | toloadlazy = [] | |
870 |
for d, v1 in |
|
870 | for d, v1 in t1._lazydirs.items(): | |
871 | v2 = t2._lazydirs.get(d) |
|
871 | v2 = t2._lazydirs.get(d) | |
872 | if not v2 or v2[0] != v1[0]: |
|
872 | if not v2 or v2[0] != v1[0]: | |
873 | toloadlazy.append(d) |
|
873 | toloadlazy.append(d) | |
874 |
for d, v1 in |
|
874 | for d, v1 in t2._lazydirs.items(): | |
875 | if d not in t1._lazydirs: |
|
875 | if d not in t1._lazydirs: | |
876 | toloadlazy.append(d) |
|
876 | toloadlazy.append(d) | |
877 |
|
877 | |||
@@ -953,7 +953,7 b' class treemanifest(object):' | |||||
953 | if p in self._files: |
|
953 | if p in self._files: | |
954 | yield self._subpath(p), n |
|
954 | yield self._subpath(p), n | |
955 | else: |
|
955 | else: | |
956 |
for f, sn in |
|
956 | for f, sn in n.items(): | |
957 | yield f, sn |
|
957 | yield f, sn | |
958 |
|
958 | |||
959 | iteritems = items |
|
959 | iteritems = items | |
@@ -1104,11 +1104,10 b' class treemanifest(object):' | |||||
1104 | def _copyfunc(s): |
|
1104 | def _copyfunc(s): | |
1105 | self._load() |
|
1105 | self._load() | |
1106 | s._lazydirs = { |
|
1106 | s._lazydirs = { | |
1107 | d: (n, r, True) |
|
1107 | d: (n, r, True) for d, (n, r, c) in self._lazydirs.items() | |
1108 | for d, (n, r, c) in pycompat.iteritems(self._lazydirs) |
|
|||
1109 | } |
|
1108 | } | |
1110 | sdirs = s._dirs |
|
1109 | sdirs = s._dirs | |
1111 |
for d, v in |
|
1110 | for d, v in self._dirs.items(): | |
1112 | sdirs[d] = v.copy() |
|
1111 | sdirs[d] = v.copy() | |
1113 | s._files = dict.copy(self._files) |
|
1112 | s._files = dict.copy(self._files) | |
1114 | s._flags = dict.copy(self._flags) |
|
1113 | s._flags = dict.copy(self._flags) | |
@@ -1136,7 +1135,7 b' class treemanifest(object):' | |||||
1136 | t1._load() |
|
1135 | t1._load() | |
1137 | t2._load() |
|
1136 | t2._load() | |
1138 | self._loaddifflazy(t1, t2) |
|
1137 | self._loaddifflazy(t1, t2) | |
1139 |
for d, m1 in |
|
1138 | for d, m1 in t1._dirs.items(): | |
1140 | if d in t2._dirs: |
|
1139 | if d in t2._dirs: | |
1141 | m2 = t2._dirs[d] |
|
1140 | m2 = t2._dirs[d] | |
1142 | _filesnotin(m1, m2) |
|
1141 | _filesnotin(m1, m2) | |
@@ -1249,7 +1248,7 b' class treemanifest(object):' | |||||
1249 | ret._flags[fn] = self._flags[fn] |
|
1248 | ret._flags[fn] = self._flags[fn] | |
1250 |
|
1249 | |||
1251 | visit = self._loadchildrensetlazy(visit) |
|
1250 | visit = self._loadchildrensetlazy(visit) | |
1252 |
for dir, subm in |
|
1251 | for dir, subm in self._dirs.items(): | |
1253 | if visit and dir[:-1] not in visit: |
|
1252 | if visit and dir[:-1] not in visit: | |
1254 | continue |
|
1253 | continue | |
1255 | m = subm._matches_inner(match) |
|
1254 | m = subm._matches_inner(match) | |
@@ -1294,15 +1293,15 b' class treemanifest(object):' | |||||
1294 | t2._load() |
|
1293 | t2._load() | |
1295 | self._loaddifflazy(t1, t2) |
|
1294 | self._loaddifflazy(t1, t2) | |
1296 |
|
1295 | |||
1297 |
for d, m1 in |
|
1296 | for d, m1 in t1._dirs.items(): | |
1298 | m2 = t2._dirs.get(d, emptytree) |
|
1297 | m2 = t2._dirs.get(d, emptytree) | |
1299 | stack.append((m1, m2)) |
|
1298 | stack.append((m1, m2)) | |
1300 |
|
1299 | |||
1301 |
for d, m2 in |
|
1300 | for d, m2 in t2._dirs.items(): | |
1302 | if d not in t1._dirs: |
|
1301 | if d not in t1._dirs: | |
1303 | stack.append((emptytree, m2)) |
|
1302 | stack.append((emptytree, m2)) | |
1304 |
|
1303 | |||
1305 |
for fn, n1 in |
|
1304 | for fn, n1 in t1._files.items(): | |
1306 | fl1 = t1._flags.get(fn, b'') |
|
1305 | fl1 = t1._flags.get(fn, b'') | |
1307 | n2 = t2._files.get(fn, None) |
|
1306 | n2 = t2._files.get(fn, None) | |
1308 | fl2 = t2._flags.get(fn, b'') |
|
1307 | fl2 = t2._flags.get(fn, b'') | |
@@ -1311,7 +1310,7 b' class treemanifest(object):' | |||||
1311 | elif clean: |
|
1310 | elif clean: | |
1312 | result[t1._subpath(fn)] = None |
|
1311 | result[t1._subpath(fn)] = None | |
1313 |
|
1312 | |||
1314 |
for fn, n2 in |
|
1313 | for fn, n2 in t2._files.items(): | |
1315 | if fn not in t1._files: |
|
1314 | if fn not in t1._files: | |
1316 | fl2 = t2._flags.get(fn, b'') |
|
1315 | fl2 = t2._flags.get(fn, b'') | |
1317 | result[t2._subpath(fn)] = ((None, b''), (n2, fl2)) |
|
1316 | result[t2._subpath(fn)] = ((None, b''), (n2, fl2)) | |
@@ -1361,9 +1360,7 b' class treemanifest(object):' | |||||
1361 | """ |
|
1360 | """ | |
1362 | self._load() |
|
1361 | self._load() | |
1363 | flags = self.flags |
|
1362 | flags = self.flags | |
1364 | lazydirs = [ |
|
1363 | lazydirs = [(d[:-1], v[0], b't') for d, v in self._lazydirs.items()] | |
1365 | (d[:-1], v[0], b't') for d, v in pycompat.iteritems(self._lazydirs) |
|
|||
1366 | ] |
|
|||
1367 | dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs] |
|
1364 | dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs] | |
1368 | files = [(f, self._files[f], flags(f)) for f in self._files] |
|
1365 | files = [(f, self._files[f], flags(f)) for f in self._files] | |
1369 | return _text(sorted(dirs + files + lazydirs)) |
|
1366 | return _text(sorted(dirs + files + lazydirs)) | |
@@ -1392,7 +1389,7 b' class treemanifest(object):' | |||||
1392 | visit = self._loadchildrensetlazy(visit) |
|
1389 | visit = self._loadchildrensetlazy(visit) | |
1393 | if visit == b'this' or visit == b'all': |
|
1390 | if visit == b'this' or visit == b'all': | |
1394 | visit = None |
|
1391 | visit = None | |
1395 |
for d, subm in |
|
1392 | for d, subm in self._dirs.items(): | |
1396 | if visit and d[:-1] not in visit: |
|
1393 | if visit and d[:-1] not in visit: | |
1397 | continue |
|
1394 | continue | |
1398 | subp1 = getnode(m1, d) |
|
1395 | subp1 = getnode(m1, d) | |
@@ -1415,7 +1412,7 b' class treemanifest(object):' | |||||
1415 | self._load() |
|
1412 | self._load() | |
1416 | # OPT: use visitchildrenset to avoid loading everything. |
|
1413 | # OPT: use visitchildrenset to avoid loading everything. | |
1417 | self._loadalllazy() |
|
1414 | self._loadalllazy() | |
1418 |
for d, subm in |
|
1415 | for d, subm in self._dirs.items(): | |
1419 | for subtree in subm.walksubtrees(matcher=matcher): |
|
1416 | for subtree in subm.walksubtrees(matcher=matcher): | |
1420 | yield subtree |
|
1417 | yield subtree | |
1421 |
|
1418 |
@@ -1638,7 +1638,7 b' def readpatternfile(filepath, warn, sour' | |||||
1638 | continue |
|
1638 | continue | |
1639 |
|
1639 | |||
1640 | linesyntax = syntax |
|
1640 | linesyntax = syntax | |
1641 |
for s, rels in |
|
1641 | for s, rels in syntaxes.items(): | |
1642 | if line.startswith(rels): |
|
1642 | if line.startswith(rels): | |
1643 | linesyntax = rels |
|
1643 | linesyntax = rels | |
1644 | line = line[len(rels) :] |
|
1644 | line = line[len(rels) :] |
@@ -643,10 +643,10 b' class mergeresult(object):' | |||||
643 |
|
643 | |||
644 | def filemap(self, sort=False): |
|
644 | def filemap(self, sort=False): | |
645 | if sorted: |
|
645 | if sorted: | |
646 |
for key, val in sorted( |
|
646 | for key, val in sorted(self._filemapping.items()): | |
647 | yield key, val |
|
647 | yield key, val | |
648 | else: |
|
648 | else: | |
649 |
for key, val in |
|
649 | for key, val in self._filemapping.items(): | |
650 | yield key, val |
|
650 | yield key, val | |
651 |
|
651 | |||
652 | def addcommitinfo(self, filename, key, value): |
|
652 | def addcommitinfo(self, filename, key, value): | |
@@ -671,15 +671,15 b' class mergeresult(object):' | |||||
671 | """returns a dictionary of actions to be perfomed with action as key |
|
671 | """returns a dictionary of actions to be perfomed with action as key | |
672 | and a list of files and related arguments as values""" |
|
672 | and a list of files and related arguments as values""" | |
673 | res = collections.defaultdict(list) |
|
673 | res = collections.defaultdict(list) | |
674 |
for a, d in |
|
674 | for a, d in self._actionmapping.items(): | |
675 |
for f, (args, msg) in |
|
675 | for f, (args, msg) in d.items(): | |
676 | res[a].append((f, args, msg)) |
|
676 | res[a].append((f, args, msg)) | |
677 | return res |
|
677 | return res | |
678 |
|
678 | |||
679 | def setactions(self, actions): |
|
679 | def setactions(self, actions): | |
680 | self._filemapping = actions |
|
680 | self._filemapping = actions | |
681 | self._actionmapping = collections.defaultdict(dict) |
|
681 | self._actionmapping = collections.defaultdict(dict) | |
682 |
for f, (act, data, msg) in |
|
682 | for f, (act, data, msg) in self._filemapping.items(): | |
683 | self._actionmapping[act][f] = data, msg |
|
683 | self._actionmapping[act][f] = data, msg | |
684 |
|
684 | |||
685 | def hasconflicts(self): |
|
685 | def hasconflicts(self): | |
@@ -786,7 +786,7 b' def manifestmerge(' | |||||
786 | relevantfiles = set(ma.diff(m2).keys()) |
|
786 | relevantfiles = set(ma.diff(m2).keys()) | |
787 |
|
787 | |||
788 | # For copied and moved files, we need to add the source file too. |
|
788 | # For copied and moved files, we need to add the source file too. | |
789 |
for copykey, copyvalue in |
|
789 | for copykey, copyvalue in branch_copies1.copy.items(): | |
790 | if copyvalue in relevantfiles: |
|
790 | if copyvalue in relevantfiles: | |
791 | relevantfiles.add(copykey) |
|
791 | relevantfiles.add(copykey) | |
792 | for movedirkey in branch_copies1.movewithdir: |
|
792 | for movedirkey in branch_copies1.movewithdir: | |
@@ -796,7 +796,7 b' def manifestmerge(' | |||||
796 |
|
796 | |||
797 | diff = m1.diff(m2, match=matcher) |
|
797 | diff = m1.diff(m2, match=matcher) | |
798 |
|
798 | |||
799 |
for f, ((n1, fl1), (n2, fl2)) in |
|
799 | for f, ((n1, fl1), (n2, fl2)) in diff.items(): | |
800 | if n1 and n2: # file exists on both local and remote side |
|
800 | if n1 and n2: # file exists on both local and remote side | |
801 | if f not in ma: |
|
801 | if f not in ma: | |
802 | # TODO: what if they're renamed from different sources? |
|
802 | # TODO: what if they're renamed from different sources? | |
@@ -1511,7 +1511,7 b' def applyupdates(' | |||||
1511 | ms = wctx.mergestate(clean=True) |
|
1511 | ms = wctx.mergestate(clean=True) | |
1512 | ms.start(wctx.p1().node(), mctx.node(), labels) |
|
1512 | ms.start(wctx.p1().node(), mctx.node(), labels) | |
1513 |
|
1513 | |||
1514 |
for f, op in |
|
1514 | for f, op in mresult.commitinfo.items(): | |
1515 | # the other side of filenode was choosen while merging, store this in |
|
1515 | # the other side of filenode was choosen while merging, store this in | |
1516 | # mergestate so that it can be reused on commit |
|
1516 | # mergestate so that it can be reused on commit | |
1517 | ms.addcommitinfo(f, op) |
|
1517 | ms.addcommitinfo(f, op) | |
@@ -2072,7 +2072,7 b' def _update(' | |||||
2072 | _checkcollision(repo, wc.manifest(), mresult) |
|
2072 | _checkcollision(repo, wc.manifest(), mresult) | |
2073 |
|
2073 | |||
2074 | # divergent renames |
|
2074 | # divergent renames | |
2075 |
for f, fl in sorted( |
|
2075 | for f, fl in sorted(mresult.diverge.items()): | |
2076 | repo.ui.warn( |
|
2076 | repo.ui.warn( | |
2077 | _( |
|
2077 | _( | |
2078 | b"note: possible conflict - %s was renamed " |
|
2078 | b"note: possible conflict - %s was renamed " | |
@@ -2084,7 +2084,7 b' def _update(' | |||||
2084 | repo.ui.warn(b" %s\n" % nf) |
|
2084 | repo.ui.warn(b" %s\n" % nf) | |
2085 |
|
2085 | |||
2086 | # rename and delete |
|
2086 | # rename and delete | |
2087 |
for f, fl in sorted( |
|
2087 | for f, fl in sorted(mresult.renamedelete.items()): | |
2088 | repo.ui.warn( |
|
2088 | repo.ui.warn( | |
2089 | _( |
|
2089 | _( | |
2090 | b"note: possible conflict - %s was deleted " |
|
2090 | b"note: possible conflict - %s was deleted " | |
@@ -2124,7 +2124,7 b' def _update(' | |||||
2124 |
|
2124 | |||
2125 | if updatedirstate: |
|
2125 | if updatedirstate: | |
2126 | if extraactions: |
|
2126 | if extraactions: | |
2127 |
for k, acts in |
|
2127 | for k, acts in extraactions.items(): | |
2128 | for a in acts: |
|
2128 | for a in acts: | |
2129 | mresult.addfile(a[0], k, *a[1:]) |
|
2129 | mresult.addfile(a[0], k, *a[1:]) | |
2130 | if k == mergestatemod.ACTION_GET and wantfiledata: |
|
2130 | if k == mergestatemod.ACTION_GET and wantfiledata: | |
@@ -2195,10 +2195,10 b' def _update(' | |||||
2195 | getfiledata = None |
|
2195 | getfiledata = None | |
2196 | else: |
|
2196 | else: | |
2197 | now_sec = now[0] |
|
2197 | now_sec = now[0] | |
2198 |
for f, m in |
|
2198 | for f, m in getfiledata.items(): | |
2199 | if m is not None and m[2][0] >= now_sec: |
|
2199 | if m is not None and m[2][0] >= now_sec: | |
2200 | ambiguous_mtime[f] = (m[0], m[1], None) |
|
2200 | ambiguous_mtime[f] = (m[0], m[1], None) | |
2201 |
for f, m in |
|
2201 | for f, m in ambiguous_mtime.items(): | |
2202 | getfiledata[f] = m |
|
2202 | getfiledata[f] = m | |
2203 |
|
2203 | |||
2204 | repo.setparents(fp1, fp2) |
|
2204 | repo.setparents(fp1, fp2) |
@@ -363,7 +363,7 b' class _mergestate_base(object):' | |||||
363 | def unresolved(self): |
|
363 | def unresolved(self): | |
364 | """Obtain the paths of unresolved files.""" |
|
364 | """Obtain the paths of unresolved files.""" | |
365 |
|
365 | |||
366 |
for f, entry in |
|
366 | for f, entry in self._state.items(): | |
367 | if entry[0] in ( |
|
367 | if entry[0] in ( | |
368 | MERGE_RECORD_UNRESOLVED, |
|
368 | MERGE_RECORD_UNRESOLVED, | |
369 | MERGE_RECORD_UNRESOLVED_PATH, |
|
369 | MERGE_RECORD_UNRESOLVED_PATH, | |
@@ -490,7 +490,7 b' class _mergestate_base(object):' | |||||
490 | ACTION_ADD_MODIFIED: [], |
|
490 | ACTION_ADD_MODIFIED: [], | |
491 | ACTION_GET: [], |
|
491 | ACTION_GET: [], | |
492 | } |
|
492 | } | |
493 |
for f, (r, action) in |
|
493 | for f, (r, action) in self._results.items(): | |
494 | if action is not None: |
|
494 | if action is not None: | |
495 | actions[action].append((f, None, b"merge result")) |
|
495 | actions[action].append((f, None, b"merge result")) | |
496 | return actions |
|
496 | return actions | |
@@ -690,7 +690,7 b' class mergestate(_mergestate_base):' | |||||
690 | # the type of state that is stored, and capital-letter records are used |
|
690 | # the type of state that is stored, and capital-letter records are used | |
691 | # to prevent older versions of Mercurial that do not support the feature |
|
691 | # to prevent older versions of Mercurial that do not support the feature | |
692 | # from loading them. |
|
692 | # from loading them. | |
693 |
for filename, v in |
|
693 | for filename, v in self._state.items(): | |
694 | if v[0] in ( |
|
694 | if v[0] in ( | |
695 | MERGE_RECORD_UNRESOLVED_PATH, |
|
695 | MERGE_RECORD_UNRESOLVED_PATH, | |
696 | MERGE_RECORD_RESOLVED_PATH, |
|
696 | MERGE_RECORD_RESOLVED_PATH, | |
@@ -714,9 +714,9 b' class mergestate(_mergestate_base):' | |||||
714 | else: |
|
714 | else: | |
715 | # Normal files. These are stored in 'F' records. |
|
715 | # Normal files. These are stored in 'F' records. | |
716 | records.append((RECORD_MERGED, b'\0'.join([filename] + v))) |
|
716 | records.append((RECORD_MERGED, b'\0'.join([filename] + v))) | |
717 |
for filename, extras in sorted( |
|
717 | for filename, extras in sorted(self._stateextras.items()): | |
718 | rawextras = b'\0'.join( |
|
718 | rawextras = b'\0'.join( | |
719 |
b'%s\0%s' % (k, v) for k, v in |
|
719 | b'%s\0%s' % (k, v) for k, v in extras.items() | |
720 | ) |
|
720 | ) | |
721 | records.append( |
|
721 | records.append( | |
722 | (RECORD_FILE_VALUES, b'%s\0%s' % (filename, rawextras)) |
|
722 | (RECORD_FILE_VALUES, b'%s\0%s' % (filename, rawextras)) |
@@ -1,6 +1,5 b'' | |||||
1 | from .i18n import _ |
|
1 | from .i18n import _ | |
2 | from . import ( |
|
2 | from . import ( | |
3 | pycompat, |
|
|||
4 | registrar, |
|
3 | registrar, | |
5 | templatekw, |
|
4 | templatekw, | |
6 | util, |
|
5 | util, | |
@@ -85,7 +84,7 b' class namespaces(object):' | |||||
85 | return self._names.get(namespace, default) |
|
84 | return self._names.get(namespace, default) | |
86 |
|
85 | |||
87 | def items(self): |
|
86 | def items(self): | |
88 |
return |
|
87 | return self._names.items() | |
89 |
|
88 | |||
90 | iteritems = items |
|
89 | iteritems = items | |
91 |
|
90 | |||
@@ -118,7 +117,7 b' class namespaces(object):' | |||||
118 |
|
117 | |||
119 | Raises a KeyError if there is no such node. |
|
118 | Raises a KeyError if there is no such node. | |
120 | """ |
|
119 | """ | |
121 |
for ns, v in |
|
120 | for ns, v in self._names.items(): | |
122 | n = v.singlenode(repo, name) |
|
121 | n = v.singlenode(repo, name) | |
123 | if n: |
|
122 | if n: | |
124 | return n |
|
123 | return n |
@@ -248,7 +248,7 b' def _fm0readmarkers(data, off, stop):' | |||||
248 | # if content cannot be translated to nodeid drop the data. |
|
248 | # if content cannot be translated to nodeid drop the data. | |
249 | parents = None |
|
249 | parents = None | |
250 |
|
250 | |||
251 |
metadata = tuple(sorted( |
|
251 | metadata = tuple(sorted(metadata.items())) | |
252 |
|
252 | |||
253 | yield (pre, sucs, flags, metadata, date, parents) |
|
253 | yield (pre, sucs, flags, metadata, date, parents) | |
254 |
|
254 | |||
@@ -278,7 +278,7 b' def _fm0encodemeta(meta):' | |||||
278 | """Return encoded metadata string to string mapping. |
|
278 | """Return encoded metadata string to string mapping. | |
279 |
|
279 | |||
280 | Assume no ':' in key and no '\0' in both key and value.""" |
|
280 | Assume no ':' in key and no '\0' in both key and value.""" | |
281 |
for key, value in |
|
281 | for key, value in meta.items(): | |
282 | if b':' in key or b'\0' in key: |
|
282 | if b':' in key or b'\0' in key: | |
283 | raise ValueError(b"':' and '\0' are forbidden in metadata key'") |
|
283 | raise ValueError(b"':' and '\0' are forbidden in metadata key'") | |
284 | if b'\0' in value: |
|
284 | if b'\0' in value: | |
@@ -652,7 +652,7 b' class obsstore(object):' | |||||
652 | 'in-marker cycle with %s' % pycompat.sysstr(hex(prec)) |
|
652 | 'in-marker cycle with %s' % pycompat.sysstr(hex(prec)) | |
653 | ) |
|
653 | ) | |
654 |
|
654 | |||
655 |
metadata = tuple(sorted( |
|
655 | metadata = tuple(sorted(metadata.items())) | |
656 | for k, v in metadata: |
|
656 | for k, v in metadata: | |
657 | try: |
|
657 | try: | |
658 | # might be better to reject non-ASCII keys |
|
658 | # might be better to reject non-ASCII keys |
@@ -18,7 +18,6 b' from . import (' | |||||
18 | encoding, |
|
18 | encoding, | |
19 | error, |
|
19 | error, | |
20 | phases, |
|
20 | phases, | |
21 | pycompat, |
|
|||
22 | util, |
|
21 | util, | |
23 | ) |
|
22 | ) | |
24 | from .utils import dateutil |
|
23 | from .utils import dateutil | |
@@ -997,7 +996,7 b' def divergentsets(repo, ctx):' | |||||
997 | base[tuple(nsuccset)] = n |
|
996 | base[tuple(nsuccset)] = n | |
998 | return [ |
|
997 | return [ | |
999 | {b'divergentnodes': divset, b'commonpredecessor': b} |
|
998 | {b'divergentnodes': divset, b'commonpredecessor': b} | |
1000 |
for divset, b in |
|
999 | for divset, b in base.items() | |
1001 | ] |
|
1000 | ] | |
1002 |
|
1001 | |||
1003 |
|
1002 |
@@ -2643,11 +2643,7 b' def diffhunks(' | |||||
2643 | if copysourcematch: |
|
2643 | if copysourcematch: | |
2644 | # filter out copies where source side isn't inside the matcher |
|
2644 | # filter out copies where source side isn't inside the matcher | |
2645 | # (copies.pathcopies() already filtered out the destination) |
|
2645 | # (copies.pathcopies() already filtered out the destination) | |
2646 | copy = { |
|
2646 | copy = {dst: src for dst, src in copy.items() if copysourcematch(src)} | |
2647 | dst: src |
|
|||
2648 | for dst, src in pycompat.iteritems(copy) |
|
|||
2649 | if copysourcematch(src) |
|
|||
2650 | } |
|
|||
2651 |
|
2647 | |||
2652 | modifiedset = set(modified) |
|
2648 | modifiedset = set(modified) | |
2653 | addedset = set(added) |
|
2649 | addedset = set(added) |
@@ -324,7 +324,7 b' class dirs(object):' | |||||
324 | self._dirs = {} |
|
324 | self._dirs = {} | |
325 | addpath = self.addpath |
|
325 | addpath = self.addpath | |
326 | if isinstance(map, dict) and only_tracked: |
|
326 | if isinstance(map, dict) and only_tracked: | |
327 |
for f, s in |
|
327 | for f, s in map.items(): | |
328 | if s.state != b'r': |
|
328 | if s.state != b'r': | |
329 | addpath(f) |
|
329 | addpath(f) | |
330 | elif only_tracked: |
|
330 | elif only_tracked: |
@@ -219,7 +219,7 b' def binaryencode(phasemapping):' | |||||
219 | The revision lists are encoded as (phase, root) pairs. |
|
219 | The revision lists are encoded as (phase, root) pairs. | |
220 | """ |
|
220 | """ | |
221 | binarydata = [] |
|
221 | binarydata = [] | |
222 |
for phase, nodes in p |
|
222 | for phase, nodes in phasemapping.items(): | |
223 | for head in nodes: |
|
223 | for head in nodes: | |
224 | binarydata.append(_fphasesentry.pack(phase, head)) |
|
224 | binarydata.append(_fphasesentry.pack(phase, head)) | |
225 | return b''.join(binarydata) |
|
225 | return b''.join(binarydata) | |
@@ -363,9 +363,7 b' class phasecache(object):' | |||||
363 | self.invalidate() |
|
363 | self.invalidate() | |
364 | self.loadphaserevs(repo) |
|
364 | self.loadphaserevs(repo) | |
365 | return any( |
|
365 | return any( | |
366 | revs |
|
366 | revs for phase, revs in self.phaseroots.items() if phase != public | |
367 | for phase, revs in pycompat.iteritems(self.phaseroots) |
|
|||
368 | if phase != public |
|
|||
369 | ) |
|
367 | ) | |
370 |
|
368 | |||
371 | def nonpublicphaseroots(self, repo): |
|
369 | def nonpublicphaseroots(self, repo): | |
@@ -383,7 +381,7 b' class phasecache(object):' | |||||
383 | return set().union( |
|
381 | return set().union( | |
384 | *[ |
|
382 | *[ | |
385 | revs |
|
383 | revs | |
386 |
for phase, revs in |
|
384 | for phase, revs in self.phaseroots.items() | |
387 | if phase != public |
|
385 | if phase != public | |
388 | ] |
|
386 | ] | |
389 | ) |
|
387 | ) | |
@@ -528,7 +526,7 b' class phasecache(object):' | |||||
528 | f.close() |
|
526 | f.close() | |
529 |
|
527 | |||
530 | def _write(self, fp): |
|
528 | def _write(self, fp): | |
531 |
for phase, roots in |
|
529 | for phase, roots in self.phaseroots.items(): | |
532 | for h in sorted(roots): |
|
530 | for h in sorted(roots): | |
533 | fp.write(b'%i %s\n' % (phase, hex(h))) |
|
531 | fp.write(b'%i %s\n' % (phase, hex(h))) | |
534 | self.dirty = False |
|
532 | self.dirty = False | |
@@ -612,7 +610,7 b' class phasecache(object):' | |||||
612 | def retractboundary(self, repo, tr, targetphase, nodes): |
|
610 | def retractboundary(self, repo, tr, targetphase, nodes): | |
613 | oldroots = { |
|
611 | oldroots = { | |
614 | phase: revs |
|
612 | phase: revs | |
615 |
for phase, revs in |
|
613 | for phase, revs in self.phaseroots.items() | |
616 | if phase <= targetphase |
|
614 | if phase <= targetphase | |
617 | } |
|
615 | } | |
618 | if tr is None: |
|
616 | if tr is None: | |
@@ -690,7 +688,7 b' class phasecache(object):' | |||||
690 | """ |
|
688 | """ | |
691 | filtered = False |
|
689 | filtered = False | |
692 | has_node = repo.changelog.index.has_node # to filter unknown nodes |
|
690 | has_node = repo.changelog.index.has_node # to filter unknown nodes | |
693 |
for phase, nodes in |
|
691 | for phase, nodes in self.phaseroots.items(): | |
694 | missing = sorted(node for node in nodes if not has_node(node)) |
|
692 | missing = sorted(node for node in nodes if not has_node(node)) | |
695 | if missing: |
|
693 | if missing: | |
696 | for mnode in missing: |
|
694 | for mnode in missing: | |
@@ -854,7 +852,7 b' def analyzeremotephases(repo, subset, ro' | |||||
854 | # build list from dictionary |
|
852 | # build list from dictionary | |
855 | draftroots = [] |
|
853 | draftroots = [] | |
856 | has_node = repo.changelog.index.has_node # to filter unknown nodes |
|
854 | has_node = repo.changelog.index.has_node # to filter unknown nodes | |
857 |
for nhex, phase in |
|
855 | for nhex, phase in roots.items(): | |
858 | if nhex == b'publishing': # ignore data related to publish option |
|
856 | if nhex == b'publishing': # ignore data related to publish option | |
859 | continue |
|
857 | continue | |
860 | node = bin(nhex) |
|
858 | node = bin(nhex) |
@@ -18,7 +18,6 b' from ..node import (' | |||||
18 | from ..thirdparty import attr |
|
18 | from ..thirdparty import attr | |
19 | from .. import ( |
|
19 | from .. import ( | |
20 | error, |
|
20 | error, | |
21 | pycompat, |
|
|||
22 | revlogutils, |
|
21 | revlogutils, | |
23 | util, |
|
22 | util, | |
24 | ) |
|
23 | ) | |
@@ -959,7 +958,7 b' def pack_dirstate(dmap, copymap, pl):' | |||||
959 | cs = stringio() |
|
958 | cs = stringio() | |
960 | write = cs.write |
|
959 | write = cs.write | |
961 | write(b"".join(pl)) |
|
960 | write(b"".join(pl)) | |
962 |
for f, e in |
|
961 | for f, e in dmap.items(): | |
963 | if f in copymap: |
|
962 | if f in copymap: | |
964 | f = b"%s\0%s" % (f, copymap[f]) |
|
963 | f = b"%s\0%s" % (f, copymap[f]) | |
965 | e = _pack( |
|
964 | e = _pack( |
@@ -1305,7 +1305,7 b' class revlog(object):' | |||||
1305 | # But, obviously its parents aren't. |
|
1305 | # But, obviously its parents aren't. | |
1306 | for p in self.parents(n): |
|
1306 | for p in self.parents(n): | |
1307 | heads.pop(p, None) |
|
1307 | heads.pop(p, None) | |
1308 |
heads = [head for head, flag in |
|
1308 | heads = [head for head, flag in heads.items() if flag] | |
1309 | roots = list(roots) |
|
1309 | roots = list(roots) | |
1310 | assert orderedout |
|
1310 | assert orderedout | |
1311 | assert roots |
|
1311 | assert roots |
@@ -594,7 +594,7 b' def bookmark(repo, subset, x):' | |||||
594 | bms.add(repo[bmrev].rev()) |
|
594 | bms.add(repo[bmrev].rev()) | |
595 | else: |
|
595 | else: | |
596 | matchrevs = set() |
|
596 | matchrevs = set() | |
597 |
for name, bmrev in |
|
597 | for name, bmrev in repo._bookmarks.items(): | |
598 | if matcher(name): |
|
598 | if matcher(name): | |
599 | matchrevs.add(bmrev) |
|
599 | matchrevs.add(bmrev) | |
600 | for bmrev in matchrevs: |
|
600 | for bmrev in matchrevs: | |
@@ -1706,7 +1706,7 b' def named(repo, subset, x):' | |||||
1706 | ) |
|
1706 | ) | |
1707 | namespaces.add(repo.names[pattern]) |
|
1707 | namespaces.add(repo.names[pattern]) | |
1708 | else: |
|
1708 | else: | |
1709 |
for name, ns in |
|
1709 | for name, ns in repo.names.items(): | |
1710 | if matcher(name): |
|
1710 | if matcher(name): | |
1711 | namespaces.add(ns) |
|
1711 | namespaces.add(ns) | |
1712 |
|
1712 | |||
@@ -2803,7 +2803,7 b' def makematcher(tree):' | |||||
2803 |
|
2803 | |||
2804 | def loadpredicate(ui, extname, registrarobj): |
|
2804 | def loadpredicate(ui, extname, registrarobj): | |
2805 | """Load revset predicates from specified registrarobj""" |
|
2805 | """Load revset predicates from specified registrarobj""" | |
2806 |
for name, func in |
|
2806 | for name, func in registrarobj._table.items(): | |
2807 | symbols[name] = func |
|
2807 | symbols[name] = func | |
2808 | if func._safe: |
|
2808 | if func._safe: | |
2809 | safesymbols.add(name) |
|
2809 | safesymbols.add(name) |
@@ -612,7 +612,7 b' def expandaliases(tree, aliases, warn=No' | |||||
612 | tree = _aliasrules.expand(aliases, tree) |
|
612 | tree = _aliasrules.expand(aliases, tree) | |
613 | # warn about problematic (but not referred) aliases |
|
613 | # warn about problematic (but not referred) aliases | |
614 | if warn is not None: |
|
614 | if warn is not None: | |
615 |
for name, alias in sorted( |
|
615 | for name, alias in sorted(aliases.items()): | |
616 | if alias.error and not alias.warned: |
|
616 | if alias.error and not alias.warned: | |
617 | warn(_(b'warning: %s\n') % (alias.error)) |
|
617 | warn(_(b'warning: %s\n') % (alias.error)) | |
618 | alias.warned = True |
|
618 | alias.warned = True |
@@ -108,7 +108,7 b' def itersubrepos(ctx1, ctx2):' | |||||
108 | del subpaths[subpath] |
|
108 | del subpaths[subpath] | |
109 | missing.add(subpath) |
|
109 | missing.add(subpath) | |
110 |
|
110 | |||
111 |
for subpath, ctx in sorted( |
|
111 | for subpath, ctx in sorted(subpaths.items()): | |
112 | yield subpath, ctx.sub(subpath) |
|
112 | yield subpath, ctx.sub(subpath) | |
113 |
|
113 | |||
114 | # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way, |
|
114 | # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way, | |
@@ -1336,7 +1336,7 b' def _interestingfiles(repo, matcher):' | |||||
1336 | ignored=False, |
|
1336 | ignored=False, | |
1337 | full=False, |
|
1337 | full=False, | |
1338 | ) |
|
1338 | ) | |
1339 |
for abs, st in |
|
1339 | for abs, st in walkresults.items(): | |
1340 | entry = dirstate.get_entry(abs) |
|
1340 | entry = dirstate.get_entry(abs) | |
1341 | if (not entry.any_tracked) and audit_path.check(abs): |
|
1341 | if (not entry.any_tracked) and audit_path.check(abs): | |
1342 | unknown.append(abs) |
|
1342 | unknown.append(abs) | |
@@ -1383,7 +1383,7 b' def _markchanges(repo, unknown, deleted,' | |||||
1383 | with repo.wlock(): |
|
1383 | with repo.wlock(): | |
1384 | wctx.forget(deleted) |
|
1384 | wctx.forget(deleted) | |
1385 | wctx.add(unknown) |
|
1385 | wctx.add(unknown) | |
1386 |
for new, old in |
|
1386 | for new, old in renames.items(): | |
1387 | wctx.copy(old, new) |
|
1387 | wctx.copy(old, new) | |
1388 |
|
1388 | |||
1389 |
|
1389 | |||
@@ -1509,12 +1509,9 b' def movedirstate(repo, newctx, match=Non' | |||||
1509 | # Merge old parent and old working dir copies |
|
1509 | # Merge old parent and old working dir copies | |
1510 | oldcopies = copiesmod.pathcopies(newctx, oldctx, match) |
|
1510 | oldcopies = copiesmod.pathcopies(newctx, oldctx, match) | |
1511 | oldcopies.update(copies) |
|
1511 | oldcopies.update(copies) | |
1512 | copies = { |
|
1512 | copies = {dst: oldcopies.get(src, src) for dst, src in oldcopies.items()} | |
1513 | dst: oldcopies.get(src, src) |
|
|||
1514 | for dst, src in pycompat.iteritems(oldcopies) |
|
|||
1515 | } |
|
|||
1516 | # Adjust the dirstate copies |
|
1513 | # Adjust the dirstate copies | |
1517 |
for dst, src in |
|
1514 | for dst, src in copies.items(): | |
1518 | if src not in newctx or dst in newctx or not ds.get_entry(dst).added: |
|
1515 | if src not in newctx or dst in newctx or not ds.get_entry(dst).added: | |
1519 | src = None |
|
1516 | src = None | |
1520 | ds.copy(src, dst) |
|
1517 | ds.copy(src, dst) |
@@ -9,7 +9,6 b'' | |||||
9 | from .i18n import _ |
|
9 | from .i18n import _ | |
10 | from . import ( |
|
10 | from . import ( | |
11 | mdiff, |
|
11 | mdiff, | |
12 | pycompat, |
|
|||
13 | ) |
|
12 | ) | |
14 |
|
13 | |||
15 |
|
14 | |||
@@ -97,7 +96,7 b' def _findsimilarmatches(repo, added, rem' | |||||
97 | copies[a] = (r, myscore) |
|
96 | copies[a] = (r, myscore) | |
98 | progress.complete() |
|
97 | progress.complete() | |
99 |
|
98 | |||
100 |
for dest, v in |
|
99 | for dest, v in copies.items(): | |
101 | source, bscore = v |
|
100 | source, bscore = v | |
102 | yield source, dest, bscore |
|
101 | yield source, dest, bscore | |
103 |
|
102 |
@@ -554,7 +554,7 b' def refreshwdir(repo, origstatus, origsp' | |||||
554 | ) |
|
554 | ) | |
555 |
|
555 | |||
556 | # Check for files that were only in the dirstate. |
|
556 | # Check for files that were only in the dirstate. | |
557 |
for file, state in |
|
557 | for file, state in dirstate.items(): | |
558 | if not file in files: |
|
558 | if not file in files: | |
559 | old = origsparsematch(file) |
|
559 | old = origsparsematch(file) | |
560 | new = sparsematch(file) |
|
560 | new = sparsematch(file) |
@@ -472,10 +472,10 b' class sshv1peer(wireprotov1peer.wirepeer' | |||||
472 | else: |
|
472 | else: | |
473 | wireargs[k] = args[k] |
|
473 | wireargs[k] = args[k] | |
474 | del args[k] |
|
474 | del args[k] | |
475 |
for k, v in sorted( |
|
475 | for k, v in sorted(wireargs.items()): | |
476 | self._pipeo.write(b"%s %d\n" % (k, len(v))) |
|
476 | self._pipeo.write(b"%s %d\n" % (k, len(v))) | |
477 | if isinstance(v, dict): |
|
477 | if isinstance(v, dict): | |
478 |
for dk, dv in |
|
478 | for dk, dv in v.items(): | |
479 | self._pipeo.write(b"%s %d\n" % (dk, len(dv))) |
|
479 | self._pipeo.write(b"%s %d\n" % (dk, len(dv))) | |
480 | self._pipeo.write(dv) |
|
480 | self._pipeo.write(dv) | |
481 | else: |
|
481 | else: |
@@ -573,7 +573,7 b' def display_by_method(data, fp):' | |||||
573 |
|
573 | |||
574 | # compute sums for each function |
|
574 | # compute sums for each function | |
575 | functiondata = [] |
|
575 | functiondata = [] | |
576 |
for fname, sitestats in |
|
576 | for fname, sitestats in grouped.items(): | |
577 | total_cum_sec = 0 |
|
577 | total_cum_sec = 0 | |
578 | total_self_sec = 0 |
|
578 | total_self_sec = 0 | |
579 | total_percent = 0 |
|
579 | total_percent = 0 | |
@@ -652,7 +652,7 b' def display_about_method(data, fp, funct' | |||||
652 | else: |
|
652 | else: | |
653 | children[site] = 1 |
|
653 | children[site] = 1 | |
654 |
|
654 | |||
655 |
parents = [(parent, count) for parent, count in |
|
655 | parents = [(parent, count) for parent, count in parents.items()] | |
656 | parents.sort(reverse=True, key=lambda x: x[1]) |
|
656 | parents.sort(reverse=True, key=lambda x: x[1]) | |
657 | for parent, count in parents: |
|
657 | for parent, count in parents: | |
658 | fp.write( |
|
658 | fp.write( | |
@@ -696,7 +696,7 b' def display_about_method(data, fp, funct' | |||||
696 | ) |
|
696 | ) | |
697 | ) |
|
697 | ) | |
698 |
|
698 | |||
699 |
children = [(child, count) for child, count in |
|
699 | children = [(child, count) for child, count in children.items()] | |
700 | children.sort(reverse=True, key=lambda x: x[1]) |
|
700 | children.sort(reverse=True, key=lambda x: x[1]) | |
701 | for child, count in children: |
|
701 | for child, count in children: | |
702 | fp.write( |
|
702 | fp.write( | |
@@ -827,7 +827,7 b' def write_to_flame(data, fp, scriptpath=' | |||||
827 | fd, path = pycompat.mkstemp() |
|
827 | fd, path = pycompat.mkstemp() | |
828 |
|
828 | |||
829 | with open(path, b"w+") as file: |
|
829 | with open(path, b"w+") as file: | |
830 |
for line, count in |
|
830 | for line, count in lines.items(): | |
831 | file.write(b"%s %d\n" % (line, count)) |
|
831 | file.write(b"%s %d\n" % (line, count)) | |
832 |
|
832 | |||
833 | if outputfile is None: |
|
833 | if outputfile is None: |
@@ -144,7 +144,7 b' def _buildencodefun():' | |||||
144 | cmap[xchr(x)] = e + xchr(x).lower() |
|
144 | cmap[xchr(x)] = e + xchr(x).lower() | |
145 |
|
145 | |||
146 | dmap = {} |
|
146 | dmap = {} | |
147 |
for k, v in |
|
147 | for k, v in cmap.items(): | |
148 | dmap[v] = k |
|
148 | dmap[v] = k | |
149 |
|
149 | |||
150 | def decode(s): |
|
150 | def decode(s): |
@@ -193,7 +193,7 b' def debugstrip(ui, repo, *revs, **opts):' | |||||
193 | # a revision we have to only delete the bookmark and not strip |
|
193 | # a revision we have to only delete the bookmark and not strip | |
194 | # anything. revsets cannot detect that case. |
|
194 | # anything. revsets cannot detect that case. | |
195 | nodetobookmarks = {} |
|
195 | nodetobookmarks = {} | |
196 |
for mark, node in |
|
196 | for mark, node in repomarks.items(): | |
197 | nodetobookmarks.setdefault(node, []).append(mark) |
|
197 | nodetobookmarks.setdefault(node, []).append(mark) | |
198 | for marks in nodetobookmarks.values(): |
|
198 | for marks in nodetobookmarks.values(): | |
199 | if bookmarks.issuperset(marks): |
|
199 | if bookmarks.issuperset(marks): |
@@ -1770,7 +1770,7 b' class gitsubrepo(abstractsubrepo):' | |||||
1770 | for b in rev2branch[self._state[1]]: |
|
1770 | for b in rev2branch[self._state[1]]: | |
1771 | if b.startswith(b'refs/remotes/origin/'): |
|
1771 | if b.startswith(b'refs/remotes/origin/'): | |
1772 | return True |
|
1772 | return True | |
1773 |
for b, revision in |
|
1773 | for b, revision in branch2rev.items(): | |
1774 | if b.startswith(b'refs/remotes/origin/'): |
|
1774 | if b.startswith(b'refs/remotes/origin/'): | |
1775 | if self._gitisancestor(self._state[1], revision): |
|
1775 | if self._gitisancestor(self._state[1], revision): | |
1776 | return True |
|
1776 | return True |
@@ -190,7 +190,7 b' def submerge(repo, wctx, mctx, actx, ove' | |||||
190 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) |
|
190 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) | |
191 |
|
191 | |||
192 | promptssrc = filemerge.partextras(labels) |
|
192 | promptssrc = filemerge.partextras(labels) | |
193 |
for s, l in sorted( |
|
193 | for s, l in sorted(s1.items()): | |
194 | a = sa.get(s, nullstate) |
|
194 | a = sa.get(s, nullstate) | |
195 | ld = l # local state with possible dirty flag for compares |
|
195 | ld = l # local state with possible dirty flag for compares | |
196 | if wctx.sub(s).dirty(): |
|
196 | if wctx.sub(s).dirty(): |
@@ -25,7 +25,6 b' from . import (' | |||||
25 | encoding, |
|
25 | encoding, | |
26 | error, |
|
26 | error, | |
27 | match as matchmod, |
|
27 | match as matchmod, | |
28 | pycompat, |
|
|||
29 | scmutil, |
|
28 | scmutil, | |
30 | util, |
|
29 | util, | |
31 | ) |
|
30 | ) | |
@@ -354,7 +353,7 b' def _updatetags(filetags, alltags, tagty' | |||||
354 | if tagtype is None: |
|
353 | if tagtype is None: | |
355 | assert tagtypes is None |
|
354 | assert tagtypes is None | |
356 |
|
355 | |||
357 |
for name, nodehist in |
|
356 | for name, nodehist in filetags.items(): | |
358 | if name not in alltags: |
|
357 | if name not in alltags: | |
359 | alltags[name] = nodehist |
|
358 | alltags[name] = nodehist | |
360 | if tagtype is not None: |
|
359 | if tagtype is not None: | |
@@ -507,7 +506,7 b' def _getfnodes(ui, repo, nodes):' | |||||
507 |
|
506 | |||
508 | if unknown_entries: |
|
507 | if unknown_entries: | |
509 | fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries) |
|
508 | fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries) | |
510 |
for node, fnode in |
|
509 | for node, fnode in fixed_nodemap.items(): | |
511 | if fnode != repo.nullid: |
|
510 | if fnode != repo.nullid: | |
512 | cachefnode[node] = fnode |
|
511 | cachefnode[node] = fnode | |
513 |
|
512 | |||
@@ -549,7 +548,7 b' def _writetagcache(ui, repo, valid, cach' | |||||
549 | # we keep them in UTF-8 throughout this module. If we converted |
|
548 | # we keep them in UTF-8 throughout this module. If we converted | |
550 | # them local encoding on input, we would lose info writing them to |
|
549 | # them local encoding on input, we would lose info writing them to | |
551 | # the cache. |
|
550 | # the cache. | |
552 |
for (name, (node, hist)) in sorted( |
|
551 | for (name, (node, hist)) in sorted(cachetags.items()): | |
553 | for n in hist: |
|
552 | for n in hist: | |
554 | cachefile.write(b"%s %s\n" % (hex(n), name)) |
|
553 | cachefile.write(b"%s %s\n" % (hex(n), name)) | |
555 | cachefile.write(b"%s %s\n" % (hex(node), name)) |
|
554 | cachefile.write(b"%s %s\n" % (hex(node), name)) |
@@ -346,7 +346,7 b' def json(obj, paranoid=True):' | |||||
346 | out = [ |
|
346 | out = [ | |
347 | b'"%s": %s' |
|
347 | b'"%s": %s' | |
348 | % (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid)) |
|
348 | % (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid)) | |
349 |
for k, v in sorted( |
|
349 | for k, v in sorted(obj.items()) | |
350 | ] |
|
350 | ] | |
351 | return b'{' + b', '.join(out) + b'}' |
|
351 | return b'{' + b', '.join(out) + b'}' | |
352 | elif util.safehasattr(obj, b'__iter__'): |
|
352 | elif util.safehasattr(obj, b'__iter__'): | |
@@ -548,7 +548,7 b' def websub(text, websubtable):' | |||||
548 |
|
548 | |||
549 | def loadfilter(ui, extname, registrarobj): |
|
549 | def loadfilter(ui, extname, registrarobj): | |
550 | """Load template filter from specified registrarobj""" |
|
550 | """Load template filter from specified registrarobj""" | |
551 |
for name, func in |
|
551 | for name, func in registrarobj._table.items(): | |
552 | filters[name] = func |
|
552 | filters[name] = func | |
553 |
|
553 | |||
554 |
|
554 |
@@ -910,7 +910,7 b' def word(context, mapping, args):' | |||||
910 |
|
910 | |||
911 | def loadfunction(ui, extname, registrarobj): |
|
911 | def loadfunction(ui, extname, registrarobj): | |
912 | """Load template function from specified registrarobj""" |
|
912 | """Load template function from specified registrarobj""" | |
913 |
for name, func in |
|
913 | for name, func in registrarobj._table.items(): | |
914 | funcs[name] = func |
|
914 | funcs[name] = func | |
915 |
|
915 | |||
916 |
|
916 |
@@ -602,7 +602,7 b' def shownamespaces(context, mapping):' | |||||
602 | # 'name' for iterating over namespaces, templatename for local reference |
|
602 | # 'name' for iterating over namespaces, templatename for local reference | |
603 | return lambda v: {b'name': v, ns.templatename: v} |
|
603 | return lambda v: {b'name': v, ns.templatename: v} | |
604 |
|
604 | |||
605 |
for k, ns in |
|
605 | for k, ns in repo.names.items(): | |
606 | names = ns.names(repo, ctx.node()) |
|
606 | names = ns.names(repo, ctx.node()) | |
607 | f = _showcompatlist(context, mapping, b'name', names) |
|
607 | f = _showcompatlist(context, mapping, b'name', names) | |
608 | namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity) |
|
608 | namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity) | |
@@ -690,7 +690,7 b' def showpeerurls(context, mapping):' | |||||
690 | d.update(sub_opts) |
|
690 | d.update(sub_opts) | |
691 | path_dict = util.sortdict() |
|
691 | path_dict = util.sortdict() | |
692 | for p in ps: |
|
692 | for p in ps: | |
693 |
sub_opts = util.sortdict(sorted( |
|
693 | sub_opts = util.sortdict(sorted(p.suboptions.items())) | |
694 | path_dict[b'url'] = p.rawloc |
|
694 | path_dict[b'url'] = p.rawloc | |
695 | path_dict.update(sub_opts) |
|
695 | path_dict.update(sub_opts) | |
696 | d[b'urls'] = [path_dict] |
|
696 | d[b'urls'] = [path_dict] | |
@@ -1023,7 +1023,7 b' def showwhyunstable(context, mapping):' | |||||
1023 |
|
1023 | |||
1024 | def loadkeyword(ui, extname, registrarobj): |
|
1024 | def loadkeyword(ui, extname, registrarobj): | |
1025 | """Load template keyword from specified registrarobj""" |
|
1025 | """Load template keyword from specified registrarobj""" | |
1026 |
for name, func in |
|
1026 | for name, func in registrarobj._table.items(): | |
1027 | keywords[name] = func |
|
1027 | keywords[name] = func | |
1028 |
|
1028 | |||
1029 |
|
1029 |
@@ -530,8 +530,7 b' def _buildfuncargs(exp, context, curmeth' | |||||
530 |
|
530 | |||
531 | def compiledict(xs): |
|
531 | def compiledict(xs): | |
532 | return util.sortdict( |
|
532 | return util.sortdict( | |
533 | (k, compileexp(x, context, curmethods)) |
|
533 | (k, compileexp(x, context, curmethods)) for k, x in xs.items() | |
534 | for k, x in pycompat.iteritems(xs) |
|
|||
535 | ) |
|
534 | ) | |
536 |
|
535 | |||
537 | def compilelist(xs): |
|
536 | def compilelist(xs): | |
@@ -708,7 +707,7 b' class engine(object):' | |||||
708 | newres = self._resources.availablekeys(newmapping) |
|
707 | newres = self._resources.availablekeys(newmapping) | |
709 | mapping = { |
|
708 | mapping = { | |
710 | k: v |
|
709 | k: v | |
711 |
for k, v in |
|
710 | for k, v in origmapping.items() | |
712 | if ( |
|
711 | if ( | |
713 | k in knownres # not a symbol per self.symbol() |
|
712 | k in knownres # not a symbol per self.symbol() | |
714 | or newres.isdisjoint(self._defaultrequires(k)) |
|
713 | or newres.isdisjoint(self._defaultrequires(k)) |
@@ -310,7 +310,7 b' class hybrid(wrapped):' | |||||
310 | if util.safehasattr(self._values, b'get'): |
|
310 | if util.safehasattr(self._values, b'get'): | |
311 | values = { |
|
311 | values = { | |
312 | k: v |
|
312 | k: v | |
313 |
for k, v in |
|
313 | for k, v in self._values.items() | |
314 | if select(self._wrapvalue(k, v)) |
|
314 | if select(self._wrapvalue(k, v)) | |
315 | } |
|
315 | } | |
316 | else: |
|
316 | else: | |
@@ -342,10 +342,7 b' class hybrid(wrapped):' | |||||
342 | # TODO: make it non-recursive for trivial lists/dicts |
|
342 | # TODO: make it non-recursive for trivial lists/dicts | |
343 | xs = self._values |
|
343 | xs = self._values | |
344 | if util.safehasattr(xs, b'get'): |
|
344 | if util.safehasattr(xs, b'get'): | |
345 | return { |
|
345 | return {k: unwrapvalue(context, mapping, v) for k, v in xs.items()} | |
346 | k: unwrapvalue(context, mapping, v) |
|
|||
347 | for k, v in pycompat.iteritems(xs) |
|
|||
348 | } |
|
|||
349 | return [unwrapvalue(context, mapping, x) for x in xs] |
|
346 | return [unwrapvalue(context, mapping, x) for x in xs] | |
350 |
|
347 | |||
351 |
|
348 | |||
@@ -537,7 +534,7 b' class _mappingsequence(wrapped):' | |||||
537 | items.append( |
|
534 | items.append( | |
538 | { |
|
535 | { | |
539 | k: unwrapvalue(context, lm, v) |
|
536 | k: unwrapvalue(context, lm, v) | |
540 |
for k, v in |
|
537 | for k, v in nm.items() | |
541 | if k not in knownres |
|
538 | if k not in knownres | |
542 | } |
|
539 | } | |
543 | ) |
|
540 | ) | |
@@ -715,7 +712,7 b' def compatdict(' | |||||
715 | This exists for backward compatibility with the old-style template. Use |
|
712 | This exists for backward compatibility with the old-style template. Use | |
716 | hybriddict() for new template keywords. |
|
713 | hybriddict() for new template keywords. | |
717 | """ |
|
714 | """ | |
718 |
c = [{key: k, value: v} for k, v in |
|
715 | c = [{key: k, value: v} for k, v in data.items()] | |
719 | f = _showcompatlist(context, mapping, name, c, plural, separator) |
|
716 | f = _showcompatlist(context, mapping, name, c, plural, separator) | |
720 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) |
|
717 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) | |
721 |
|
718 |
@@ -382,7 +382,7 b' class transaction(util.transactional):' | |||||
382 | skip_pre = group == GEN_GROUP_POST_FINALIZE |
|
382 | skip_pre = group == GEN_GROUP_POST_FINALIZE | |
383 | skip_post = group == GEN_GROUP_PRE_FINALIZE |
|
383 | skip_post = group == GEN_GROUP_PRE_FINALIZE | |
384 |
|
384 | |||
385 |
for id, entry in sorted( |
|
385 | for id, entry in sorted(self._filegenerators.items()): | |
386 | any = True |
|
386 | any = True | |
387 | order, filenames, genfunc, location, post_finalize = entry |
|
387 | order, filenames, genfunc, location, post_finalize = entry | |
388 |
|
388 |
@@ -240,7 +240,7 b' def _generic_proxytunnel(self):' | |||||
240 | if x.lower().startswith('proxy-') |
|
240 | if x.lower().startswith('proxy-') | |
241 | } |
|
241 | } | |
242 | self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport) |
|
242 | self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport) | |
243 |
for header in p |
|
243 | for header in proxyheaders.items(): | |
244 | self.send(b'%s: %s\r\n' % header) |
|
244 | self.send(b'%s: %s\r\n' % header) | |
245 | self.send(b'\r\n') |
|
245 | self.send(b'\r\n') | |
246 |
|
246 |
@@ -1301,7 +1301,7 b' class sortdict(collections.OrderedDict):' | |||||
1301 | # __setitem__() isn't called as of PyPy 5.8.0 |
|
1301 | # __setitem__() isn't called as of PyPy 5.8.0 | |
1302 | def update(self, src, **f): |
|
1302 | def update(self, src, **f): | |
1303 | if isinstance(src, dict): |
|
1303 | if isinstance(src, dict): | |
1304 |
src = |
|
1304 | src = src.items() | |
1305 | for k, v in src: |
|
1305 | for k, v in src: | |
1306 | self[k] = v |
|
1306 | self[k] = v | |
1307 | for k in f: |
|
1307 | for k in f: |
@@ -174,9 +174,7 b' def streamencodemap(d):' | |||||
174 | """ |
|
174 | """ | |
175 | yield encodelength(MAJOR_TYPE_MAP, len(d)) |
|
175 | yield encodelength(MAJOR_TYPE_MAP, len(d)) | |
176 |
|
176 | |||
177 | for key, value in sorted( |
|
177 | for key, value in sorted(d.items(), key=lambda x: _mixedtypesortkey(x[0])): | |
178 | pycompat.iteritems(d), key=lambda x: _mixedtypesortkey(x[0]) |
|
|||
179 | ): |
|
|||
180 | for chunk in streamencode(key): |
|
178 | for chunk in streamencode(key): | |
181 | yield chunk |
|
179 | yield chunk | |
182 | for chunk in streamencode(value): |
|
180 | for chunk in streamencode(value): |
@@ -342,7 +342,7 b' def tempfilter(s, cmd):' | |||||
342 | def filter(s, cmd): |
|
342 | def filter(s, cmd): | |
343 | """filter a string through a command that transforms its input to its |
|
343 | """filter a string through a command that transforms its input to its | |
344 | output""" |
|
344 | output""" | |
345 |
for name, fn in |
|
345 | for name, fn in _filtertable.items(): | |
346 | if cmd.startswith(name): |
|
346 | if cmd.startswith(name): | |
347 | return fn(s, cmd[len(name) :].lstrip()) |
|
347 | return fn(s, cmd[len(name) :].lstrip()) | |
348 | return pipefilter(s, cmd) |
|
348 | return pipefilter(s, cmd) | |
@@ -448,7 +448,7 b' def shellenviron(environ=None):' | |||||
448 |
|
448 | |||
449 | env = dict(encoding.environ) |
|
449 | env = dict(encoding.environ) | |
450 | if environ: |
|
450 | if environ: | |
451 |
env.update((k, py2shell(v)) for k, v in |
|
451 | env.update((k, py2shell(v)) for k, v in environ.items()) | |
452 | env[b'HG'] = hgexecutable() |
|
452 | env[b'HG'] = hgexecutable() | |
453 | return env |
|
453 | return env | |
454 |
|
454 |
@@ -453,7 +453,7 b' def list_paths(ui, target_path=None):' | |||||
453 | """list all the (name, paths) in the passed ui""" |
|
453 | """list all the (name, paths) in the passed ui""" | |
454 | result = [] |
|
454 | result = [] | |
455 | if target_path is None: |
|
455 | if target_path is None: | |
456 |
for name, paths in sorted( |
|
456 | for name, paths in sorted(ui.paths.items()): | |
457 | for p in paths: |
|
457 | for p in paths: | |
458 | result.append((name, p)) |
|
458 | result.append((name, p)) | |
459 |
|
459 | |||
@@ -919,7 +919,7 b' class path(object):' | |||||
919 | # Now process the sub-options. If a sub-option is registered, its |
|
919 | # Now process the sub-options. If a sub-option is registered, its | |
920 | # attribute will always be present. The value will be None if there |
|
920 | # attribute will always be present. The value will be None if there | |
921 | # was no valid sub-option. |
|
921 | # was no valid sub-option. | |
922 |
for suboption, (attr, func) in |
|
922 | for suboption, (attr, func) in _pathsuboptions.items(): | |
923 | if suboption not in sub_options: |
|
923 | if suboption not in sub_options: | |
924 | setattr(self, attr, None) |
|
924 | setattr(self, attr, None) | |
925 | continue |
|
925 | continue | |
@@ -945,7 +945,7 b' class path(object):' | |||||
945 | This is intended to be used for presentation purposes. |
|
945 | This is intended to be used for presentation purposes. | |
946 | """ |
|
946 | """ | |
947 | d = {} |
|
947 | d = {} | |
948 |
for subopt, (attr, _func) in |
|
948 | for subopt, (attr, _func) in _pathsuboptions.items(): | |
949 | value = getattr(self, attr) |
|
949 | value = getattr(self, attr) | |
950 | if value is not None: |
|
950 | if value is not None: | |
951 | d[subopt] = value |
|
951 | d[subopt] = value |
@@ -405,11 +405,11 b' class verifier(object):' | |||||
405 | _(b'checking'), unit=_(b'manifests'), total=len(subdirs) |
|
405 | _(b'checking'), unit=_(b'manifests'), total=len(subdirs) | |
406 | ) |
|
406 | ) | |
407 |
|
407 | |||
408 |
for subdir, linkrevs in |
|
408 | for subdir, linkrevs in subdirnodes.items(): | |
409 | subdirfilenodes = self._verifymanifest( |
|
409 | subdirfilenodes = self._verifymanifest( | |
410 | linkrevs, subdir, storefiles, subdirprogress |
|
410 | linkrevs, subdir, storefiles, subdirprogress | |
411 | ) |
|
411 | ) | |
412 |
for f, onefilenodes in |
|
412 | for f, onefilenodes in subdirfilenodes.items(): | |
413 | filenodes.setdefault(f, {}).update(onefilenodes) |
|
413 | filenodes.setdefault(f, {}).update(onefilenodes) | |
414 |
|
414 | |||
415 | if not dir and subdirnodes: |
|
415 | if not dir and subdirnodes: |
@@ -122,7 +122,7 b" ARGUMENT_RECORD_HEADER = struct.Struct('" | |||||
122 |
|
122 | |||
123 | def humanflags(mapping, value): |
|
123 | def humanflags(mapping, value): | |
124 | """Convert a numeric flags value to a human value, using a mapping table.""" |
|
124 | """Convert a numeric flags value to a human value, using a mapping table.""" | |
125 |
namemap = {v: k for k, v in |
|
125 | namemap = {v: k for k, v in mapping.items()} | |
126 | flags = [] |
|
126 | flags = [] | |
127 | val = 1 |
|
127 | val = 1 | |
128 | while value >= val: |
|
128 | while value >= val: | |
@@ -159,7 +159,7 b' class frame(object):' | |||||
159 | @encoding.strmethod |
|
159 | @encoding.strmethod | |
160 | def __repr__(self): |
|
160 | def __repr__(self): | |
161 | typename = b'<unknown 0x%02x>' % self.typeid |
|
161 | typename = b'<unknown 0x%02x>' % self.typeid | |
162 |
for name, value in |
|
162 | for name, value in FRAME_TYPES.items(): | |
163 | if value == self.typeid: |
|
163 | if value == self.typeid: | |
164 | typename = name |
|
164 | typename = name | |
165 | break |
|
165 | break |
@@ -80,8 +80,7 b' def encodebatchcmds(req):' | |||||
80 | assert all(escapearg(k) == k for k in argsdict) |
|
80 | assert all(escapearg(k) == k for k in argsdict) | |
81 |
|
81 | |||
82 | args = b','.join( |
|
82 | args = b','.join( | |
83 | b'%s=%s' % (escapearg(k), escapearg(v)) |
|
83 | b'%s=%s' % (escapearg(k), escapearg(v)) for k, v in argsdict.items() | |
84 | for k, v in pycompat.iteritems(argsdict) |
|
|||
85 | ) |
|
84 | ) | |
86 | cmds.append(b'%s %s' % (op, args)) |
|
85 | cmds.append(b'%s %s' % (op, args)) | |
87 |
|
86 | |||
@@ -438,7 +437,7 b' class wirepeer(repository.peer):' | |||||
438 | self.requirecap(b'getbundle', _(b'look up remote changes')) |
|
437 | self.requirecap(b'getbundle', _(b'look up remote changes')) | |
439 | opts = {} |
|
438 | opts = {} | |
440 | bundlecaps = kwargs.get(b'bundlecaps') or set() |
|
439 | bundlecaps = kwargs.get(b'bundlecaps') or set() | |
441 |
for key, value in |
|
440 | for key, value in kwargs.items(): | |
442 | if value is None: |
|
441 | if value is None: | |
443 | continue |
|
442 | continue | |
444 | keytype = wireprototypes.GETBUNDLE_ARGUMENTS.get(key) |
|
443 | keytype = wireprototypes.GETBUNDLE_ARGUMENTS.get(key) |
@@ -235,7 +235,7 b' def between(repo, proto, pairs):' | |||||
235 | def branchmap(repo, proto): |
|
235 | def branchmap(repo, proto): | |
236 | branchmap = repo.branchmap() |
|
236 | branchmap = repo.branchmap() | |
237 | heads = [] |
|
237 | heads = [] | |
238 |
for branch, nodes in |
|
238 | for branch, nodes in branchmap.items(): | |
239 | branchname = urlreq.quote(encoding.fromlocal(branch)) |
|
239 | branchname = urlreq.quote(encoding.fromlocal(branch)) | |
240 | branchnodes = wireprototypes.encodelist(nodes) |
|
240 | branchnodes = wireprototypes.encodelist(nodes) | |
241 | heads.append(b'%s %s' % (branchname, branchnodes)) |
|
241 | heads.append(b'%s %s' % (branchname, branchnodes)) | |
@@ -432,7 +432,7 b' def getbundle(repo, proto, others):' | |||||
432 | opts = options( |
|
432 | opts = options( | |
433 | b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others |
|
433 | b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others | |
434 | ) |
|
434 | ) | |
435 |
for k, v in |
|
435 | for k, v in opts.items(): | |
436 | keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k] |
|
436 | keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k] | |
437 | if keytype == b'nodes': |
|
437 | if keytype == b'nodes': | |
438 | opts[k] = wireprototypes.decodelist(v) |
|
438 | opts[k] = wireprototypes.decodelist(v) |
General Comments 0
You need to be logged in to leave comments.
Login now