Show More
@@ -99,9 +99,7 b' class bugzilla_2_16(object):' | |||
|
99 | 99 | def filter_real_bug_ids(self, ids): |
|
100 | 100 | '''filter not-existing bug ids from list.''' |
|
101 | 101 | self.run('select bug_id from bugs where bug_id in %s' % buglist(ids)) |
|
102 |
|
|
|
103 | ids.sort() | |
|
104 | return ids | |
|
102 | return util.sort([c[0] for c in self.cursor.fetchall()]) | |
|
105 | 103 | |
|
106 | 104 | def filter_unknown_bug_ids(self, node, ids): |
|
107 | 105 | '''filter bug ids from list that already refer to this changeset.''' |
@@ -114,9 +112,7 b' class bugzilla_2_16(object):' | |||
|
114 | 112 | self.ui.status(_('bug %d already knows about changeset %s\n') % |
|
115 | 113 | (id, short(node))) |
|
116 | 114 | unknown.pop(id, None) |
|
117 |
|
|
|
118 | ids.sort() | |
|
119 | return ids | |
|
115 | return util.sort(unknown.keys()) | |
|
120 | 116 | |
|
121 | 117 | def notify(self, ids): |
|
122 | 118 | '''tell bugzilla to send mail.''' |
@@ -92,14 +92,12 b' def churn(ui, repo, **opts):' | |||
|
92 | 92 | alias, actual = l.split() |
|
93 | 93 | amap[alias] = actual |
|
94 | 94 | |
|
95 | revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])] | |
|
96 | revs.sort() | |
|
95 | revs = util.sort([int(r) for r in cmdutil.revrange(repo, opts['rev'])]) | |
|
97 | 96 | stats = countrevs(ui, repo, amap, revs, opts.get('progress')) |
|
98 | 97 | if not stats: |
|
99 | 98 | return |
|
100 | 99 | |
|
101 | stats = [(-l, u, l) for u,l in stats.items()] | |
|
102 | stats.sort() | |
|
100 | stats = util.sort([(-l, u, l) for u,l in stats.items()]) | |
|
103 | 101 | maxchurn = float(max(1, stats[0][2])) |
|
104 | 102 | maxuser = max([len(u) for k, u, l in stats]) |
|
105 | 103 |
@@ -337,10 +337,7 b' class convert_cvs(converter_source):' | |||
|
337 | 337 | |
|
338 | 338 | def getchanges(self, rev): |
|
339 | 339 | self.modecache = {} |
|
340 |
|
|
|
341 | cl = files.items() | |
|
342 | cl.sort() | |
|
343 | return (cl, {}) | |
|
340 | return util.sort(self.files[rev].items()), {} | |
|
344 | 341 | |
|
345 | 342 | def getcommit(self, rev): |
|
346 | 343 | return self.changeset[rev] |
@@ -349,7 +346,4 b' class convert_cvs(converter_source):' | |||
|
349 | 346 | return self.tags |
|
350 | 347 | |
|
351 | 348 | def getchangedfiles(self, rev, i): |
|
352 |
|
|
|
353 | files.sort() | |
|
354 | return files | |
|
355 | ||
|
349 | return util.sort(self.files[rev].keys()) |
@@ -297,8 +297,7 b' def createlog(ui, directory=None, root="' | |||
|
297 | 297 | if store: |
|
298 | 298 | # clean up the results and save in the log. |
|
299 | 299 | store = False |
|
300 | e.tags = [scache(x) for x in tags.get(e.revision, [])] | |
|
301 | e.tags.sort() | |
|
300 | e.tags = util.sort([scache(x) for x in tags.get(e.revision, [])]) | |
|
302 | 301 | e.comment = scache('\n'.join(e.comment)) |
|
303 | 302 | |
|
304 | 303 | revn = len(e.revision) |
@@ -468,9 +467,7 b' def createchangeset(ui, log, fuzz=60, me' | |||
|
468 | 467 | for tag in e.tags: |
|
469 | 468 | tags[tag] = True |
|
470 | 469 | # remember tags only if this is the latest changeset to have it |
|
471 |
tag |
|
|
472 | tagnames.sort() | |
|
473 | c.tags = tagnames | |
|
470 | c.tags = util.sort([tag for tag in tags if globaltags[tag] is c]) | |
|
474 | 471 | |
|
475 | 472 | # Find parent changesets, handle {{mergetobranch BRANCHNAME}} |
|
476 | 473 | # by inserting dummy changesets with two parents, and handle |
@@ -110,9 +110,8 b' class darcs_source(converter_source, com' | |||
|
110 | 110 | copies[elt.get('from')] = elt.get('to') |
|
111 | 111 | else: |
|
112 | 112 | changes.append((elt.text.strip(), rev)) |
|
113 | changes.sort() | |
|
114 | 113 | self.lastrev = rev |
|
115 | return changes, copies | |
|
114 | return util.sort(changes), copies | |
|
116 | 115 | |
|
117 | 116 | def getfile(self, name, rev): |
|
118 | 117 | if rev != self.lastrev: |
@@ -130,10 +130,8 b' class gnuarch_source(converter_source, c' | |||
|
130 | 130 | for c in cps: |
|
131 | 131 | copies[c] = cps[c] |
|
132 | 132 | |
|
133 | changes.sort() | |
|
134 | 133 | self.lastrev = rev |
|
135 | ||
|
136 | return changes, copies | |
|
134 | return util.sort(changes), copies | |
|
137 | 135 | |
|
138 | 136 | def getcommit(self, rev): |
|
139 | 137 | changes = self.changes[rev] |
@@ -164,14 +164,11 b' class mercurial_sink(converter_sink):' | |||
|
164 | 164 | tagparent = nullid |
|
165 | 165 | |
|
166 | 166 | try: |
|
167 |
old = parentctx |
|
|
168 | oldlines = old.splitlines(1) | |
|
169 | oldlines.sort() | |
|
167 | oldlines = util.sort(parentctx['.hgtags'].data().splitlines(1)) | |
|
170 | 168 | except: |
|
171 | 169 | oldlines = [] |
|
172 | 170 | |
|
173 |
newlines = [("%s %s\n" % (tags[tag], tag)) for tag in tags |
|
|
174 | newlines.sort() | |
|
171 | newlines = util.sort([("%s %s\n" % (tags[tag], tag)) for tag in tags]) | |
|
175 | 172 | |
|
176 | 173 | if newlines == oldlines: |
|
177 | 174 | return None |
@@ -238,8 +235,7 b' class mercurial_source(converter_source)' | |||
|
238 | 235 | else: |
|
239 | 236 | m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3] |
|
240 | 237 | changes = [(name, rev) for name in m + a + r] |
|
241 | changes.sort() | |
|
242 | return (changes, self.getcopies(ctx, m + a)) | |
|
238 | return util.sort(changes), self.getcopies(ctx, m + a) | |
|
243 | 239 | |
|
244 | 240 | def getcopies(self, ctx, files): |
|
245 | 241 | copies = {} |
@@ -658,8 +658,7 b' class svn_source(converter_source):' | |||
|
658 | 658 | # This will fail if a directory was copied |
|
659 | 659 | # from another branch and then some of its files |
|
660 | 660 | # were deleted in the same transaction. |
|
661 | children = self._find_children(path, revnum) | |
|
662 | children.sort() | |
|
661 | children = util.sort(self._find_children(path, revnum)) | |
|
663 | 662 | for child in children: |
|
664 | 663 | # Can we move a child directory and its |
|
665 | 664 | # parent in the same commit? (probably can). Could |
@@ -732,8 +731,7 b' class svn_source(converter_source):' | |||
|
732 | 731 | parents = [] |
|
733 | 732 | # check whether this revision is the start of a branch or part |
|
734 | 733 | # of a branch renaming |
|
735 | orig_paths = orig_paths.items() | |
|
736 | orig_paths.sort() | |
|
734 | orig_paths = util.sort(orig_paths.items()) | |
|
737 | 735 | root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)] |
|
738 | 736 | if root_paths: |
|
739 | 737 | path, ent = root_paths[-1] |
@@ -1045,10 +1043,9 b' class svn_sink(converter_sink, commandli' | |||
|
1045 | 1043 | return dirs |
|
1046 | 1044 | |
|
1047 | 1045 | def add_dirs(self, files): |
|
1048 | add_dirs = [d for d in self.dirs_of(files) | |
|
1046 | add_dirs = [d for d in util.sort(self.dirs_of(files)) | |
|
1049 | 1047 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] |
|
1050 | 1048 | if add_dirs: |
|
1051 | add_dirs.sort() | |
|
1052 | 1049 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) |
|
1053 | 1050 | return add_dirs |
|
1054 | 1051 | |
@@ -1058,8 +1055,7 b' class svn_sink(converter_sink, commandli' | |||
|
1058 | 1055 | return files |
|
1059 | 1056 | |
|
1060 | 1057 | def tidy_dirs(self, names): |
|
1061 |
dirs = |
|
|
1062 | dirs.sort() | |
|
1058 | dirs = util.sort(self.dirs_of(names)) | |
|
1063 | 1059 | dirs.reverse() |
|
1064 | 1060 | deleted = [] |
|
1065 | 1061 | for d in dirs: |
@@ -13,6 +13,7 b' from mercurial.commands import templateo' | |||
|
13 | 13 | from mercurial.i18n import _ |
|
14 | 14 | from mercurial.node import nullrev |
|
15 | 15 | from mercurial.util import Abort, canonpath |
|
16 | from mercurial import util | |
|
16 | 17 | |
|
17 | 18 | def revision_grapher(repo, start_rev, stop_rev): |
|
18 | 19 | """incremental revision grapher |
@@ -53,8 +54,7 b' def revision_grapher(repo, start_rev, st' | |||
|
53 | 54 | for parent in parents: |
|
54 | 55 | if parent not in next_revs: |
|
55 | 56 | parents_to_add.append(parent) |
|
56 | parents_to_add.sort() | |
|
57 | next_revs[rev_index:rev_index + 1] = parents_to_add | |
|
57 | next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add) | |
|
58 | 58 | |
|
59 | 59 | edges = [] |
|
60 | 60 | for parent in parents: |
@@ -105,8 +105,7 b' def filelog_grapher(repo, path, start_re' | |||
|
105 | 105 | for parent in parents: |
|
106 | 106 | if parent not in next_revs: |
|
107 | 107 | parents_to_add.append(parent) |
|
108 | parents_to_add.sort() | |
|
109 | next_revs[rev_index:rev_index + 1] = parents_to_add | |
|
108 | next_revs[rev_index:rev_index + 1] = util.sort(parents_to_add) | |
|
110 | 109 | |
|
111 | 110 | edges = [] |
|
112 | 111 | for parent in parents: |
@@ -534,9 +534,7 b' class Watcher(object):' | |||
|
534 | 534 | self.ui.note('%s processing %d deferred events as %d\n' % |
|
535 | 535 | (self.event_time(), self.deferred, |
|
536 | 536 | len(self.eventq))) |
|
537 |
|
|
|
538 | eventq.sort() | |
|
539 | for wpath, evts in eventq: | |
|
537 | for wpath, evts in util.sort(self.eventq.items()): | |
|
540 | 538 | for evt in evts: |
|
541 | 539 | self.deferred_event(wpath, evt) |
|
542 | 540 | self.eventq.clear() |
@@ -183,7 +183,6 b' class kwtemplater(object):' | |||
|
183 | 183 | candidates = [f for f in files if self.iskwfile(f, ctx.flags)] |
|
184 | 184 | if candidates: |
|
185 | 185 | self.restrict = True # do not expand when reading |
|
186 | candidates.sort() | |
|
187 | 186 | action = expand and 'expanding' or 'shrinking' |
|
188 | 187 | for f in candidates: |
|
189 | 188 | fp = self.repo.file(f) |
@@ -382,8 +381,7 b' def files(ui, repo, *pats, **opts):' | |||
|
382 | 381 | kwt = kwtools['templater'] |
|
383 | 382 | status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts) |
|
384 | 383 | modified, added, removed, deleted, unknown, ignored, clean = status |
|
385 | files = modified + added + clean + unknown | |
|
386 | files.sort() | |
|
384 | files = util.sort(modified + added + clean + unknown) | |
|
387 | 385 | wctx = repo[None] |
|
388 | 386 | kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)] |
|
389 | 387 | cwd = pats and repo.getcwd() or '' |
@@ -143,8 +143,7 b' class queue:' | |||
|
143 | 143 | bad = self.check_guard(guard) |
|
144 | 144 | if bad: |
|
145 | 145 | raise util.Abort(bad) |
|
146 |
guards = |
|
|
147 | guards.sort() | |
|
146 | guards = util.sort(util.unique(guards)) | |
|
148 | 147 | self.ui.debug('active guards: %s\n' % ' '.join(guards)) |
|
149 | 148 | self.active_guards = guards |
|
150 | 149 | self.guards_dirty = True |
@@ -536,8 +535,7 b' class queue:' | |||
|
536 | 535 | return (err, n) |
|
537 | 536 | |
|
538 | 537 | def _clean_series(self, patches): |
|
539 | indices = [self.find_series(p) for p in patches] | |
|
540 | indices.sort() | |
|
538 | indices = util.sort([self.find_series(p) for p in patches]) | |
|
541 | 539 | for i in indices[-1::-1]: |
|
542 | 540 | del self.full_series[i] |
|
543 | 541 | self.parse_series() |
@@ -545,10 +543,10 b' class queue:' | |||
|
545 | 543 | |
|
546 | 544 | def finish(self, repo, revs): |
|
547 | 545 | revs.sort() |
|
548 |
firstrev = repo |
|
|
546 | firstrev = repo[self.applied[0].rev].rev() | |
|
549 | 547 | appliedbase = 0 |
|
550 | 548 | patches = [] |
|
551 | for rev in revs: | |
|
549 | for rev in util.sort(revs): | |
|
552 | 550 | if rev < firstrev: |
|
553 | 551 | raise util.Abort(_('revision %d is not managed') % rev) |
|
554 | 552 | base = revlog.bin(self.applied[appliedbase].rev) |
@@ -1261,8 +1259,7 b' class queue:' | |||
|
1261 | 1259 | self.guards_path) |
|
1262 | 1260 | and not fl.startswith('.')): |
|
1263 | 1261 | msng_list.append(fl) |
|
1264 |
|
|
|
1265 | for x in msng_list: | |
|
1262 | for x in util.sort(msng_list): | |
|
1266 | 1263 | pfx = self.ui.verbose and ('D ') or '' |
|
1267 | 1264 | self.ui.write("%s%s\n" % (pfx, displayname(x))) |
|
1268 | 1265 |
@@ -156,9 +156,7 b' class notifier(object):' | |||
|
156 | 156 | if fnmatch.fnmatch(self.repo.root, pat): |
|
157 | 157 | for user in users.split(','): |
|
158 | 158 | subs[self.fixmail(user)] = 1 |
|
159 | subs = subs.keys() | |
|
160 | subs.sort() | |
|
161 | return subs | |
|
159 | return util.sort(subs) | |
|
162 | 160 | |
|
163 | 161 | def url(self, path=None): |
|
164 | 162 | return self.ui.config('web', 'baseurl') + (path or self.root) |
@@ -77,15 +77,12 b' def purge(ui, repo, *dirs, **opts):' | |||
|
77 | 77 | match = cmdutil.match(repo, dirs, opts) |
|
78 | 78 | match.dir = directories.append |
|
79 | 79 | status = repo.status(match=match, ignored=opts['all'], unknown=True) |
|
80 | files = status[4] + status[5] | |
|
81 | files.sort() | |
|
82 | directories.sort() | |
|
83 | 80 | |
|
84 | for f in files: | |
|
81 | for f in util.sort(status[4] + status[5]): | |
|
85 | 82 | ui.note(_('Removing file %s\n') % f) |
|
86 | 83 | remove(os.remove, f) |
|
87 | 84 | |
|
88 | for f in directories[::-1]: | |
|
85 | for f in util.sort(directories)[::-1]: | |
|
89 | 86 | if match(f) and not os.listdir(repo.wjoin(f)): |
|
90 | 87 | ui.note(_('Removing directory %s\n') % f) |
|
91 | 88 | remove(os.rmdir, f) |
@@ -88,9 +88,7 b' class transplanter:' | |||
|
88 | 88 | |
|
89 | 89 | def apply(self, repo, source, revmap, merges, opts={}): |
|
90 | 90 | '''apply the revisions in revmap one by one in revision order''' |
|
91 |
revs = revmap |
|
|
92 | revs.sort() | |
|
93 | ||
|
91 | revs = util.sort(revmap) | |
|
94 | 92 | p1, p2 = repo.dirstate.parents() |
|
95 | 93 | pulls = [] |
|
96 | 94 | diffopts = patch.diffopts(self.ui, opts) |
@@ -310,9 +308,7 b' class transplanter:' | |||
|
310 | 308 | if not os.path.isdir(self.path): |
|
311 | 309 | os.mkdir(self.path) |
|
312 | 310 | series = self.opener('series', 'w') |
|
313 | revs = revmap.keys() | |
|
314 | revs.sort() | |
|
315 | for rev in revs: | |
|
311 | for rev in util.sort(revmap): | |
|
316 | 312 | series.write(revlog.hex(revmap[rev]) + '\n') |
|
317 | 313 | if merges: |
|
318 | 314 | series.write('# Merges\n') |
@@ -572,10 +568,6 b' def transplant(ui, repo, *revs, **opts):' | |||
|
572 | 568 | for r in merges: |
|
573 | 569 | revmap[source.changelog.rev(r)] = r |
|
574 | 570 | |
|
575 | revs = revmap.keys() | |
|
576 | revs.sort() | |
|
577 | pulls = [] | |
|
578 | ||
|
579 | 571 | tp.apply(repo, source, revmap, merges, opts) |
|
580 | 572 | finally: |
|
581 | 573 | if bundle: |
@@ -130,9 +130,7 b' class changelog(revlog):' | |||
|
130 | 130 | |
|
131 | 131 | def encode_extra(self, d): |
|
132 | 132 | # keys must be sorted to produce a deterministic changelog entry |
|
133 | keys = d.keys() | |
|
134 | keys.sort() | |
|
135 | items = [_string_escape('%s:%s' % (k, d[k])) for k in keys] | |
|
133 | items = [_string_escape('%s:%s' % (k, d[k])) for k in util.sort(d)] | |
|
136 | 134 | return "\0".join(items) |
|
137 | 135 | |
|
138 | 136 | def read(self, node): |
@@ -175,7 +173,7 b' class changelog(revlog):' | |||
|
175 | 173 | files = l[3:] |
|
176 | 174 | return (manifest, user, (time, timezone), files, desc, extra) |
|
177 | 175 | |
|
178 |
def add(self, manifest, |
|
|
176 | def add(self, manifest, files, desc, transaction, p1=None, p2=None, | |
|
179 | 177 | user=None, date=None, extra={}): |
|
180 | 178 | |
|
181 | 179 | user, desc = util.fromlocal(user), util.fromlocal(desc) |
@@ -189,7 +187,6 b' class changelog(revlog):' | |||
|
189 | 187 | if extra: |
|
190 | 188 | extra = self.encode_extra(extra) |
|
191 | 189 | parseddate = "%s %s" % (parseddate, extra) |
|
192 | list.sort() | |
|
193 | l = [hex(manifest), user, parseddate] + list + ["", desc] | |
|
190 | l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc] | |
|
194 | 191 | text = "\n".join(l) |
|
195 | 192 | return self.addrevision(text, transaction, len(self), p1, p2) |
@@ -653,9 +653,7 b' class changeset_printer(object):' | |||
|
653 | 653 | self.ui.write(_("copies: %s\n") % ' '.join(copies)) |
|
654 | 654 | |
|
655 | 655 | if extra and self.ui.debugflag: |
|
656 |
|
|
|
657 | extraitems.sort() | |
|
658 | for key, value in extraitems: | |
|
656 | for key, value in util.sort(extra.items()): | |
|
659 | 657 | self.ui.write(_("extra: %s=%s\n") |
|
660 | 658 | % (key, value.encode('string_escape'))) |
|
661 | 659 | |
@@ -799,9 +797,7 b' class changeset_templater(changeset_prin' | |||
|
799 | 797 | return showlist('tag', self.repo.nodetags(changenode), **args) |
|
800 | 798 | |
|
801 | 799 | def showextras(**args): |
|
802 |
|
|
|
803 | extras.sort() | |
|
804 | for key, value in extras: | |
|
800 | for key, value in util.sort(changes[5].items()): | |
|
805 | 801 | args = args.copy() |
|
806 | 802 | args.update(dict(key=key, value=value)) |
|
807 | 803 | yield self.t('extra', **args) |
@@ -1129,9 +1125,7 b' def walkchangerevs(ui, repo, pats, chang' | |||
|
1129 | 1125 | for i, window in increasing_windows(0, len(revs)): |
|
1130 | 1126 | yield 'window', revs[0] < revs[-1], revs[-1] |
|
1131 | 1127 | nrevs = [rev for rev in revs[i:i+window] if want(rev)] |
|
1132 |
|
|
|
1133 | srevs.sort() | |
|
1134 | for rev in srevs: | |
|
1128 | for rev in util.sort(list(nrevs)): | |
|
1135 | 1129 | fns = fncache.get(rev) |
|
1136 | 1130 | if not fns: |
|
1137 | 1131 | def fns_generator(): |
@@ -1159,7 +1153,7 b' def commit(ui, repo, commitfunc, pats, o' | |||
|
1159 | 1153 | m = match(repo, pats, opts) |
|
1160 | 1154 | if pats: |
|
1161 | 1155 | modified, added, removed = repo.status(match=m)[:3] |
|
1162 | files = modified + added + removed | |
|
1156 | files = util.sort(modified + added + removed) | |
|
1163 | 1157 | slist = None |
|
1164 | 1158 | for f in m.files(): |
|
1165 | 1159 | if f == '.': |
@@ -1173,11 +1167,8 b' def commit(ui, repo, commitfunc, pats, o' | |||
|
1173 | 1167 | raise util.Abort(_("file %s not found!") % rel) |
|
1174 | 1168 | if stat.S_ISDIR(mode): |
|
1175 | 1169 | name = f + '/' |
|
1176 |
i |
|
|
1177 | slist = list(files) | |
|
1178 | slist.sort() | |
|
1179 | i = bisect.bisect(slist, name) | |
|
1180 | if i >= len(slist) or not slist[i].startswith(name): | |
|
1170 | i = bisect.bisect(files, name) | |
|
1171 | if i >= len(files) or not files[i].startswith(name): | |
|
1181 | 1172 | raise util.Abort(_("no match under directory %s!") |
|
1182 | 1173 | % rel) |
|
1183 | 1174 | elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)): |
@@ -380,9 +380,8 b' def branches(ui, repo, active=False):' | |||
|
380 | 380 | hexfunc = ui.debugflag and hex or short |
|
381 | 381 | activebranches = [util.tolocal(repo[n].branch()) |
|
382 | 382 | for n in repo.heads()] |
|
383 | branches = [(tag in activebranches, repo.changelog.rev(node), tag) | |
|
384 |
|
|
|
385 | branches.sort() | |
|
383 | branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag) | |
|
384 | for tag, node in repo.branchtags().items()]) | |
|
386 | 385 | branches.reverse() |
|
387 | 386 | |
|
388 | 387 | for isactive, node, tag in branches: |
@@ -635,9 +634,7 b" def debugcomplete(ui, cmd='', **opts):" | |||
|
635 | 634 | ui.write("%s\n" % "\n".join(options)) |
|
636 | 635 | return |
|
637 | 636 | |
|
638 |
|
|
|
639 | clist.sort() | |
|
640 | ui.write("%s\n" % "\n".join(clist)) | |
|
637 | ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table)))) | |
|
641 | 638 | |
|
642 | 639 | def debugfsinfo(ui, path = "."): |
|
643 | 640 | file('.debugfsinfo', 'w').write('') |
@@ -727,11 +724,9 b' def debugsetparents(ui, repo, rev1, rev2' | |||
|
727 | 724 | |
|
728 | 725 | def debugstate(ui, repo, nodates=None): |
|
729 | 726 | """show the contents of the current dirstate""" |
|
730 | k = repo.dirstate._map.items() | |
|
731 | k.sort() | |
|
732 | 727 | timestr = "" |
|
733 | 728 | showdate = not nodates |
|
734 | for file_, ent in k: | |
|
729 | for file_, ent in util.sort(repo.dirstate._map.items()): | |
|
735 | 730 | if showdate: |
|
736 | 731 | if ent[3] == -1: |
|
737 | 732 | # Pad or slice to locale representation |
@@ -1142,9 +1137,7 b' def grep(ui, repo, pattern, *pats, **opt' | |||
|
1142 | 1137 | except revlog.LookupError: |
|
1143 | 1138 | pass |
|
1144 | 1139 | elif st == 'iter': |
|
1145 |
|
|
|
1146 | states.sort() | |
|
1147 | for fn, m in states: | |
|
1140 | for fn, m in util.sort(matches[rev].items()): | |
|
1148 | 1141 | copy = copies.get(rev, {}).get(fn) |
|
1149 | 1142 | if fn in skip: |
|
1150 | 1143 | if copy: |
@@ -1162,9 +1155,7 b' def grep(ui, repo, pattern, *pats, **opt' | |||
|
1162 | 1155 | fstate[copy] = m |
|
1163 | 1156 | prev[fn] = rev |
|
1164 | 1157 | |
|
1165 |
f |
|
|
1166 | fstate.sort() | |
|
1167 | for fn, state in fstate: | |
|
1158 | for fn, state in util.sort(fstate.items()): | |
|
1168 | 1159 | if fn in skip: |
|
1169 | 1160 | continue |
|
1170 | 1161 | if fn not in copies.get(prev[fn], {}): |
@@ -1304,8 +1295,7 b' def help_(ui, name=None, with_version=Fa' | |||
|
1304 | 1295 | return |
|
1305 | 1296 | |
|
1306 | 1297 | ui.status(header) |
|
1307 |
fns = h |
|
|
1308 | fns.sort() | |
|
1298 | fns = util.sort(h) | |
|
1309 | 1299 | m = max(map(len, fns)) |
|
1310 | 1300 | for f in fns: |
|
1311 | 1301 | if ui.verbose: |
@@ -2215,9 +2205,7 b' def remove(ui, repo, *pats, **opts):' | |||
|
2215 | 2205 | warn(modified, _('is modified')) |
|
2216 | 2206 | warn(added, _('has been marked for add')) |
|
2217 | 2207 | |
|
2218 |
f |
|
|
2219 | files.sort() | |
|
2220 | for f in files: | |
|
2208 | for f in util.sort(remove + forget): | |
|
2221 | 2209 | if ui.verbose or not m.exact(f): |
|
2222 | 2210 | ui.status(_('removing %s\n') % m.rel(f)) |
|
2223 | 2211 | |
@@ -2401,10 +2389,7 b' def revert(ui, repo, *pats, **opts):' | |||
|
2401 | 2389 | (deleted, revert, remove, False, False), |
|
2402 | 2390 | ) |
|
2403 | 2391 | |
|
2404 | entries = names.items() | |
|
2405 | entries.sort() | |
|
2406 | ||
|
2407 | for abs, (rel, exact) in entries: | |
|
2392 | for abs, (rel, exact) in util.sort(names.items()): | |
|
2408 | 2393 | mfentry = mf.get(abs) |
|
2409 | 2394 | target = repo.wjoin(abs) |
|
2410 | 2395 | def handle(xlist, dobackup): |
@@ -71,9 +71,7 b' class changectx(object):' | |||
|
71 | 71 | return self.filectx(key) |
|
72 | 72 | |
|
73 | 73 | def __iter__(self): |
|
74 |
|
|
|
75 | a.sort() | |
|
76 | for f in a: | |
|
74 | for f in util.sort(self._manifest): | |
|
77 | 75 | yield f |
|
78 | 76 | |
|
79 | 77 | def changeset(self): return self._changeset |
@@ -134,10 +132,7 b' class changectx(object):' | |||
|
134 | 132 | def filectxs(self): |
|
135 | 133 | """generate a file context for each file in this changeset's |
|
136 | 134 | manifest""" |
|
137 | mf = self.manifest() | |
|
138 | m = mf.keys() | |
|
139 | m.sort() | |
|
140 | for f in m: | |
|
135 | for f in util.sort(mf): | |
|
141 | 136 | yield self.filectx(f, fileid=mf[f]) |
|
142 | 137 | |
|
143 | 138 | def ancestor(self, c2): |
@@ -383,12 +378,11 b' class filectx(object):' | |||
|
383 | 378 | # sort by revision (per file) which is a topological order |
|
384 | 379 | visit = [] |
|
385 | 380 | for f in files: |
|
386 |
fn = [(n.rev(), n) for n in needed |
|
|
381 | fn = [(n.rev(), n) for n in needed if n._path == f] | |
|
387 | 382 | visit.extend(fn) |
|
388 | visit.sort() | |
|
383 | ||
|
389 | 384 | hist = {} |
|
390 | ||
|
391 | for r, f in visit: | |
|
385 | for r, f in util.sort(visit): | |
|
392 | 386 | curr = decorate(f.data(), f) |
|
393 | 387 | for p in parents(f): |
|
394 | 388 | if p != nullid: |
@@ -530,9 +524,7 b' class workingctx(changectx):' | |||
|
530 | 524 | def date(self): return self._date |
|
531 | 525 | def description(self): return self._text |
|
532 | 526 | def files(self): |
|
533 | f = self.modified() + self.added() + self.removed() | |
|
534 | f.sort() | |
|
535 | return f | |
|
527 | return util.sort(self._status[0] + self._status[1] + self._status[2]) | |
|
536 | 528 | |
|
537 | 529 | def modified(self): return self._status[0] |
|
538 | 530 | def added(self): return self._status[1] |
@@ -688,8 +680,7 b' class memctx(object):' | |||
|
688 | 680 | parents = [(p or nullid) for p in parents] |
|
689 | 681 | p1, p2 = parents |
|
690 | 682 | self._parents = [changectx(self._repo, p) for p in (p1, p2)] |
|
691 | files = list(files) | |
|
692 | files.sort() | |
|
683 | files = util.sort(list(files)) | |
|
693 | 684 | self._status = [files, [], [], [], []] |
|
694 | 685 | self._filectxfn = filectxfn |
|
695 | 686 |
@@ -11,9 +11,7 b' import util, heapq' | |||
|
11 | 11 | |
|
12 | 12 | def _nonoverlap(d1, d2, d3): |
|
13 | 13 | "Return list of elements in d1 not in d2 or d3" |
|
14 |
|
|
|
15 | l.sort() | |
|
16 | return l | |
|
14 | return util.sort([d for d in d1 if d not in d3 and d not in d2]) | |
|
17 | 15 | |
|
18 | 16 | def _dirname(f): |
|
19 | 17 | s = f.rfind("/") |
@@ -49,9 +47,7 b' def _findoldnames(fctx, limit):' | |||
|
49 | 47 | visit += [(p, depth - 1) for p in fc.parents()] |
|
50 | 48 | |
|
51 | 49 | # return old names sorted by depth |
|
52 | old = old.values() | |
|
53 | old.sort() | |
|
54 | return [o[1] for o in old] | |
|
50 | return [o[1] for o in util.sort(old.values())] | |
|
55 | 51 | |
|
56 | 52 | def _findlimit(repo, a, b): |
|
57 | 53 | "find the earliest revision that's an ancestor of a or b but not both" |
@@ -153,9 +153,7 b' class dirstate(object):' | |||
|
153 | 153 | return key in self._map |
|
154 | 154 | |
|
155 | 155 | def __iter__(self): |
|
156 |
|
|
|
157 | a.sort() | |
|
158 | for x in a: | |
|
156 | for x in util.sort(self._map): | |
|
159 | 157 | yield x |
|
160 | 158 | |
|
161 | 159 | def parents(self): |
@@ -436,8 +434,7 b' class dirstate(object):' | |||
|
436 | 434 | if not unknown: |
|
437 | 435 | return ret |
|
438 | 436 | |
|
439 |
b = self._map |
|
|
440 | b.sort() | |
|
437 | b = util.sort(self._map) | |
|
441 | 438 | blen = len(b) |
|
442 | 439 | |
|
443 | 440 | for x in unknown: |
@@ -578,12 +575,10 b' class dirstate(object):' | |||
|
578 | 575 | add((nn, 'f', st)) |
|
579 | 576 | elif np in dc: |
|
580 | 577 | add((nn, 'm', st)) |
|
581 |
|
|
|
582 | return found | |
|
578 | return util.sort(found) | |
|
583 | 579 | |
|
584 | 580 | # step one, find all files that match our criteria |
|
585 |
fil |
|
|
586 | for ff in files: | |
|
581 | for ff in util.sort(files): | |
|
587 | 582 | nf = normpath(ff) |
|
588 | 583 | nn = self.normalize(nf) |
|
589 | 584 | f = _join(ff) |
@@ -617,9 +612,7 b' class dirstate(object):' | |||
|
617 | 612 | |
|
618 | 613 | # step two run through anything left in the dc hash and yield |
|
619 | 614 | # if we haven't already seen it |
|
620 | ks = dc.keys() | |
|
621 | ks.sort() | |
|
622 | for k in ks: | |
|
615 | for k in util.sort(dc): | |
|
623 | 616 | if k in known: |
|
624 | 617 | continue |
|
625 | 618 | known[k] = 1 |
@@ -63,8 +63,7 b' def _picktool(repo, ui, path, binary, sy' | |||
|
63 | 63 | if t not in tools: |
|
64 | 64 | tools[t] = int(_toolstr(ui, t, "priority", "0")) |
|
65 | 65 | names = tools.keys() |
|
66 | tools = [(-p,t) for t,p in tools.items()] | |
|
67 | tools.sort() | |
|
66 | tools = util.sort([(-p,t) for t,p in tools.items()]) | |
|
68 | 67 | uimerge = ui.config("ui", "merge") |
|
69 | 68 | if uimerge: |
|
70 | 69 | if uimerge not in names: |
@@ -60,7 +60,6 b' def bisect(changelog, state):' | |||
|
60 | 60 | children[prev] = [rev] |
|
61 | 61 | visit.append(prev) |
|
62 | 62 | |
|
63 | candidates.sort() | |
|
64 | 63 | # have we narrowed it down to one entry? |
|
65 | 64 | tot = len(candidates) |
|
66 | 65 | if tot == 1: |
@@ -71,7 +70,7 b' def bisect(changelog, state):' | |||
|
71 | 70 | best_rev = None |
|
72 | 71 | best_len = -1 |
|
73 | 72 | poison = {} |
|
74 | for rev in candidates: | |
|
73 | for rev in util.sort(candidates): | |
|
75 | 74 | if rev in poison: |
|
76 | 75 | for c in children.get(rev, []): |
|
77 | 76 | poison[c] = True # poison children |
@@ -19,8 +19,8 b' from request import wsgirequest' | |||
|
19 | 19 | class hgwebdir(object): |
|
20 | 20 | def __init__(self, config, parentui=None): |
|
21 | 21 | def cleannames(items): |
|
22 | return [(util.pconvert(name).strip('/'), path) | |
|
23 | for name, path in items] | |
|
22 | return util.sort([(util.pconvert(name).strip('/'), path) | |
|
23 | for name, path in items]) | |
|
24 | 24 | |
|
25 | 25 | self.parentui = parentui or ui.ui(report_untrusted=False, |
|
26 | 26 | interactive = False) |
@@ -34,7 +34,6 b' class hgwebdir(object):' | |||
|
34 | 34 | self.repos_sorted = ('', False) |
|
35 | 35 | elif isinstance(config, dict): |
|
36 | 36 | self.repos = cleannames(config.items()) |
|
37 | self.repos.sort() | |
|
38 | 37 | else: |
|
39 | 38 | if isinstance(config, util.configparser): |
|
40 | 39 | cp = config |
@@ -13,7 +13,7 b' from mercurial.util import binary, dates' | |||
|
13 | 13 | from mercurial.repo import RepoError |
|
14 | 14 | from common import paritygen, staticfile, get_contact, ErrorResponse |
|
15 | 15 | from common import HTTP_OK, HTTP_NOT_FOUND |
|
16 | from mercurial import graphmod | |
|
16 | from mercurial import graphmod, util | |
|
17 | 17 | |
|
18 | 18 | # __all__ is populated with the allowed commands. Be sure to add to it if |
|
19 | 19 | # you're adding a new command, or the new command won't work. |
@@ -288,9 +288,7 b' def manifest(web, req, tmpl):' | |||
|
288 | 288 | raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) |
|
289 | 289 | |
|
290 | 290 | def filelist(**map): |
|
291 | fl = files.keys() | |
|
292 | fl.sort() | |
|
293 | for f in fl: | |
|
291 | for f in util.sort(files): | |
|
294 | 292 | full, fnode = files[f] |
|
295 | 293 | if not fnode: |
|
296 | 294 | continue |
@@ -304,9 +302,7 b' def manifest(web, req, tmpl):' | |||
|
304 | 302 | "permissions": mf.flags(full)} |
|
305 | 303 | |
|
306 | 304 | def dirlist(**map): |
|
307 | fl = files.keys() | |
|
308 | fl.sort() | |
|
309 | for f in fl: | |
|
305 | for f in util.sort(files): | |
|
310 | 306 | full, fnode = files[f] |
|
311 | 307 | if fnode: |
|
312 | 308 | continue |
@@ -378,9 +374,7 b' def summary(web, req, tmpl):' | |||
|
378 | 374 | |
|
379 | 375 | b = web.repo.branchtags() |
|
380 | 376 | l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()] |
|
381 | l.sort() | |
|
382 | ||
|
383 | for r,n,t in l: | |
|
377 | for r,n,t in util.sort(l): | |
|
384 | 378 | yield {'parity': parity.next(), |
|
385 | 379 | 'branch': t, |
|
386 | 380 | 'node': hex(n), |
@@ -96,10 +96,9 b' def hook(ui, repo, name, throw=False, **' | |||
|
96 | 96 | oldstdout = os.dup(sys.__stdout__.fileno()) |
|
97 | 97 | os.dup2(sys.__stderr__.fileno(), sys.__stdout__.fileno()) |
|
98 | 98 | |
|
99 |
|
|
|
100 |
|
|
|
101 | hooks.sort() | |
|
102 | for hname, cmd in hooks: | |
|
99 | for hname, cmd in util.sort(ui.configitems('hooks')): | |
|
100 | if hname.split('.')[0] != name or not cmd: | |
|
101 | continue | |
|
103 | 102 | if callable(cmd): |
|
104 | 103 | r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r |
|
105 | 104 | elif cmd.startswith('python:'): |
@@ -366,8 +366,7 b' class localrepository(repo.repository):' | |||
|
366 | 366 | except: |
|
367 | 367 | r = -2 # sort to the beginning of the list if unknown |
|
368 | 368 | l.append((r, t, n)) |
|
369 | l.sort() | |
|
370 | return [(t, n) for r, t, n in l] | |
|
369 | return [(t, n) for r, t, n in util.sort(l)] | |
|
371 | 370 | |
|
372 | 371 | def nodetags(self, node): |
|
373 | 372 | '''return the tags associated with a node''' |
@@ -811,7 +810,7 b' class localrepository(repo.repository):' | |||
|
811 | 810 | tr = None |
|
812 | 811 | valid = 0 # don't save the dirstate if this isn't set |
|
813 | 812 | try: |
|
814 | commit = wctx.modified() + wctx.added() | |
|
813 | commit = util.sort(wctx.modified() + wctx.added()) | |
|
815 | 814 | remove = wctx.removed() |
|
816 | 815 | extra = wctx.extra().copy() |
|
817 | 816 | branchname = extra['branch'] |
@@ -844,7 +843,6 b' class localrepository(repo.repository):' | |||
|
844 | 843 | new = {} |
|
845 | 844 | changed = [] |
|
846 | 845 | linkrev = len(self) |
|
847 | commit.sort() | |
|
848 | 846 | for f in commit: |
|
849 | 847 | self.ui.note(f + "\n") |
|
850 | 848 | try: |
@@ -871,10 +869,9 b' class localrepository(repo.repository):' | |||
|
871 | 869 | |
|
872 | 870 | # update manifest |
|
873 | 871 | m1.update(new) |
|
874 | remove.sort() | |
|
875 | 872 | removed = [] |
|
876 | 873 | |
|
877 | for f in remove: | |
|
874 | for f in util.sort(remove): | |
|
878 | 875 | if f in m1: |
|
879 | 876 | del m1[f] |
|
880 | 877 | removed.append(f) |
@@ -950,10 +947,7 b' class localrepository(repo.repository):' | |||
|
950 | 947 | # for dirstate.walk, files=['.'] means "walk the whole tree". |
|
951 | 948 | # follow that here, too |
|
952 | 949 | fdict.pop('.', None) |
|
953 | mdict = self.manifest.read(self.changelog.read(node)[0]) | |
|
954 | mfiles = mdict.keys() | |
|
955 | mfiles.sort() | |
|
956 | for fn in mfiles: | |
|
950 | for fn in self[node]: | |
|
957 | 951 | for ffn in fdict: |
|
958 | 952 | # match if the file is the exact name or a directory |
|
959 | 953 | if ffn == fn or fn.startswith("%s/" % ffn): |
@@ -961,9 +955,7 b' class localrepository(repo.repository):' | |||
|
961 | 955 | break |
|
962 | 956 | if match(fn): |
|
963 | 957 | yield fn |
|
964 |
f |
|
|
965 | ffiles.sort() | |
|
966 | for fn in ffiles: | |
|
958 | for fn in util.sort(fdict): | |
|
967 | 959 | if match.bad(fn, 'No such file in rev ' + short(node)) \ |
|
968 | 960 | and match(fn): |
|
969 | 961 | yield fn |
@@ -1065,10 +1057,8 b' class localrepository(repo.repository):' | |||
|
1065 | 1057 | |
|
1066 | 1058 | # make sure to sort the files so we talk to the disk in a |
|
1067 | 1059 | # reasonable order |
|
1068 | mf2keys = mf2.keys() | |
|
1069 | mf2keys.sort() | |
|
1070 | 1060 | getnode = lambda fn: mf1.get(fn, nullid) |
|
1071 |
for fn in |
|
|
1061 | for fn in util.sort(mf2): | |
|
1072 | 1062 | if fn in mf1: |
|
1073 | 1063 | if (mf1.flags(fn) != mf2.flags(fn) or |
|
1074 | 1064 | (mf1[fn] != mf2[fn] and |
@@ -1190,8 +1180,7 b' class localrepository(repo.repository):' | |||
|
1190 | 1180 | heads = self.changelog.heads(start) |
|
1191 | 1181 | # sort the output in rev descending order |
|
1192 | 1182 | heads = [(-self.changelog.rev(h), h) for h in heads] |
|
1193 | heads.sort() | |
|
1194 | return [n for (r, n) in heads] | |
|
1183 | return [n for (r, n) in util.sort(heads)] | |
|
1195 | 1184 | |
|
1196 | 1185 | def branchheads(self, branch=None, start=None): |
|
1197 | 1186 | if branch is None: |
@@ -1843,10 +1832,8 b' class localrepository(repo.repository):' | |||
|
1843 | 1832 | add_extra_nodes(fname, |
|
1844 | 1833 | msng_filenode_set.setdefault(fname, {})) |
|
1845 | 1834 | changedfiles[fname] = 1 |
|
1846 | changedfiles = changedfiles.keys() | |
|
1847 | changedfiles.sort() | |
|
1848 | 1835 | # Go through all our files in order sorted by name. |
|
1849 | for fname in changedfiles: | |
|
1836 | for fname in util.sort(changedfiles): | |
|
1850 | 1837 | filerevlog = self.file(fname) |
|
1851 | 1838 | if not len(filerevlog): |
|
1852 | 1839 | raise util.Abort(_("empty or missing revlog for %s") % fname) |
@@ -1924,15 +1911,13 b' class localrepository(repo.repository):' | |||
|
1924 | 1911 | for chnk in cl.group(nodes, identity, |
|
1925 | 1912 | changed_file_collector(changedfiles)): |
|
1926 | 1913 | yield chnk |
|
1927 | changedfiles = changedfiles.keys() | |
|
1928 | changedfiles.sort() | |
|
1929 | 1914 | |
|
1930 | 1915 | mnfst = self.manifest |
|
1931 | 1916 | nodeiter = gennodelst(mnfst) |
|
1932 | 1917 | for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)): |
|
1933 | 1918 | yield chnk |
|
1934 | 1919 | |
|
1935 | for fname in changedfiles: | |
|
1920 | for fname in util.sort(changedfiles): | |
|
1936 | 1921 | filerevlog = self.file(fname) |
|
1937 | 1922 | if not len(filerevlog): |
|
1938 | 1923 | raise util.Abort(_("empty or missing revlog for %s") % fname) |
@@ -8,7 +8,7 b'' | |||
|
8 | 8 | from node import bin, hex, nullid |
|
9 | 9 | from revlog import revlog, RevlogError |
|
10 | 10 | from i18n import _ |
|
11 | import array, struct, mdiff, parsers | |
|
11 | import array, struct, mdiff, parsers, util | |
|
12 | 12 | |
|
13 | 13 | class manifestdict(dict): |
|
14 | 14 | def __init__(self, mapping=None, flags=None): |
@@ -126,9 +126,7 b' class manifest(revlog):' | |||
|
126 | 126 | # if we're using the listcache, make sure it is valid and |
|
127 | 127 | # parented by the same node we're diffing against |
|
128 | 128 | if not (changed and self.listcache and p1 and self.mapcache[0] == p1): |
|
129 |
files = map |
|
|
130 | files.sort() | |
|
131 | ||
|
129 | files = util.sort(map) | |
|
132 | 130 | for f in files: |
|
133 | 131 | checkforbidden(f) |
|
134 | 132 |
@@ -262,11 +262,10 b' def applyupdates(repo, action, wctx, mct' | |||
|
262 | 262 | "apply the merge action list to the working directory" |
|
263 | 263 | |
|
264 | 264 | updated, merged, removed, unresolved = 0, 0, 0, 0 |
|
265 | action.sort() | |
|
266 | ||
|
267 | 265 | ms = mergestate(repo) |
|
268 | 266 | ms.reset(wctx.parents()[0].node()) |
|
269 | 267 | moves = [] |
|
268 | action.sort() | |
|
270 | 269 | |
|
271 | 270 | # prescan for merges |
|
272 | 271 | for a in action: |
@@ -1094,8 +1094,7 b' def updatedir(ui, repo, patches):' | |||
|
1094 | 1094 | repo.copy(src, dst) |
|
1095 | 1095 | removes = removes.keys() |
|
1096 | 1096 | if removes: |
|
1097 |
remove |
|
|
1098 | repo.remove(removes, True) | |
|
1097 | repo.remove(util.sort(removes), True) | |
|
1099 | 1098 | for f in patches: |
|
1100 | 1099 | ctype, gp = patches[f] |
|
1101 | 1100 | if gp and gp.mode: |
@@ -1113,9 +1112,7 b' def updatedir(ui, repo, patches):' | |||
|
1113 | 1112 | cmdutil.addremove(repo, cfiles) |
|
1114 | 1113 | files = patches.keys() |
|
1115 | 1114 | files.extend([r for r in removes if r not in files]) |
|
1116 |
|
|
|
1117 | ||
|
1118 | return files | |
|
1115 | return util.sort(files) | |
|
1119 | 1116 | |
|
1120 | 1117 | def b85diff(to, tn): |
|
1121 | 1118 | '''print base85-encoded binary diff''' |
@@ -1208,13 +1205,10 b' def diff(repo, node1=None, node2=None, m' | |||
|
1208 | 1205 | for k, v in copy.items(): |
|
1209 | 1206 | copy[v] = k |
|
1210 | 1207 | |
|
1211 | all = modified + added + removed | |
|
1212 | all.sort() | |
|
1213 | 1208 | gone = {} |
|
1214 | ||
|
1215 | 1209 | gitmode = {'l': '120000', 'x': '100755', '': '100644'} |
|
1216 | 1210 | |
|
1217 | for f in all: | |
|
1211 | for f in util.sort(modified + added + removed): | |
|
1218 | 1212 | to = None |
|
1219 | 1213 | tn = None |
|
1220 | 1214 | dodiff = True |
@@ -34,8 +34,7 b' def walkrepo(root):' | |||
|
34 | 34 | for x in walk(os.path.join(root, 'data'), True): |
|
35 | 35 | yield x |
|
36 | 36 | # write manifest before changelog |
|
37 |
meta = |
|
|
38 | meta.sort() | |
|
37 | meta = util.sort(walk(root, False)) | |
|
39 | 38 | meta.reverse() |
|
40 | 39 | for x in meta: |
|
41 | 40 | yield x |
@@ -312,15 +312,11 b' class ui(object):' | |||
|
312 | 312 | items = self._configitems(section, untrusted=untrusted, abort=True) |
|
313 | 313 | if self.debugflag and not untrusted and self.ucdata: |
|
314 | 314 | uitems = self._configitems(section, untrusted=True, abort=False) |
|
315 | keys = uitems.keys() | |
|
316 | keys.sort() | |
|
317 | for k in keys: | |
|
315 | for k in util.sort(uitems): | |
|
318 | 316 | if uitems[k] != items.get(k): |
|
319 | 317 | self.warn(_("Ignoring untrusted configuration option " |
|
320 | 318 | "%s.%s = %s\n") % (section, k, uitems[k])) |
|
321 |
|
|
|
322 | x.sort() | |
|
323 | return x | |
|
319 | return util.sort(items.items()) | |
|
324 | 320 | |
|
325 | 321 | def walkconfig(self, untrusted=False): |
|
326 | 322 | cdata = self._get_cdata(untrusted) |
@@ -252,6 +252,12 b' def unique(g):' | |||
|
252 | 252 | """return the uniq elements of iterable g""" |
|
253 | 253 | return dict.fromkeys(g).keys() |
|
254 | 254 | |
|
255 | def sort(l): | |
|
256 | if not isinstance(l, list): | |
|
257 | l = list(l) | |
|
258 | l.sort() | |
|
259 | return l | |
|
260 | ||
|
255 | 261 | class Abort(Exception): |
|
256 | 262 | """Raised if a command needs to print an error and exit.""" |
|
257 | 263 |
@@ -7,7 +7,7 b'' | |||
|
7 | 7 | |
|
8 | 8 | from node import nullid, short |
|
9 | 9 | from i18n import _ |
|
10 | import revlog | |
|
10 | import revlog, util | |
|
11 | 11 | |
|
12 | 12 | def verify(repo): |
|
13 | 13 | lock = repo.lock() |
@@ -139,38 +139,26 b' def _verify(repo):' | |||
|
139 | 139 | ui.status(_("crosschecking files in changesets and manifests\n")) |
|
140 | 140 | |
|
141 | 141 | if havemf: |
|
142 | nm = [] | |
|
143 | for m in mflinkrevs: | |
|
144 | for c in mflinkrevs[m]: | |
|
145 | nm.append((c, m)) | |
|
146 | nm.sort() | |
|
147 | for c, m in nm: | |
|
142 | for c, m in util.sort([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]): | |
|
148 | 143 | err(c, _("changeset refers to unknown manifest %s") % short(m)) |
|
149 |
del mflinkrevs |
|
|
144 | del mflinkrevs | |
|
150 | 145 | |
|
151 |
f |
|
|
152 | fl.sort() | |
|
153 | for f in fl: | |
|
146 | for f in util.sort(filelinkrevs): | |
|
154 | 147 | if f not in filenodes: |
|
155 | 148 | lr = filelinkrevs[f][0] |
|
156 | 149 | err(lr, _("in changeset but not in manifest"), f) |
|
157 | del fl | |
|
158 | 150 | |
|
159 | 151 | if havecl: |
|
160 | fl = filenodes.keys() | |
|
161 | fl.sort() | |
|
162 | for f in fl: | |
|
152 | for f in util.sort(filenodes): | |
|
163 | 153 | if f not in filelinkrevs: |
|
164 | 154 | try: |
|
165 | 155 | lr = min([repo.file(f).linkrev(n) for n in filenodes[f]]) |
|
166 | 156 | except: |
|
167 | 157 | lr = None |
|
168 | 158 | err(lr, _("in manifest but not in changeset"), f) |
|
169 | del fl | |
|
170 | 159 | |
|
171 | 160 | ui.status(_("checking files\n")) |
|
172 |
files = |
|
|
173 | files.sort() | |
|
161 | files = util.sort(util.unique(filenodes.keys() + filelinkrevs.keys())) | |
|
174 | 162 | for f in files: |
|
175 | 163 | fl = repo.file(f) |
|
176 | 164 | checklog(fl, f) |
@@ -214,8 +202,7 b' def _verify(repo):' | |||
|
214 | 202 | # cross-check |
|
215 | 203 | if f in filenodes: |
|
216 | 204 | fns = [(mf.linkrev(l), n) for n,l in filenodes[f].items()] |
|
217 |
f |
|
|
218 | for lr, node in fns: | |
|
205 | for lr, node in util.sort(fns): | |
|
219 | 206 | err(lr, _("%s in manifests not found") % short(node), f) |
|
220 | 207 | |
|
221 | 208 | ui.status(_("%d files, %d changesets, %d total revisions\n") % |
General Comments 0
You need to be logged in to leave comments.
Login now