##// END OF EJS Templates
cleanup: delete lots of unused local variables...
Martin von Zweigbergk -
r41401:876494fd default
parent child Browse files
Show More
@@ -25,7 +25,7 b' def reducetest(a, b):'
25 25
26 26 try:
27 27 test1(a, b)
28 except Exception as inst:
28 except Exception:
29 29 reductions += 1
30 30 tries = 0
31 31 a = a2
@@ -610,7 +610,7 b' def checkfile(f, logfunc=_defaultlogger.'
610 610 try:
611 611 with opentext(f) as fp:
612 612 try:
613 pre = post = fp.read()
613 pre = fp.read()
614 614 except UnicodeDecodeError as e:
615 615 print("%s while reading %s" % (e, f))
616 616 return result
@@ -911,9 +911,7 b' def perfphasesremote(ui, repo, dest=None'
911 911 raise error.Abort((b'default repository not configured!'),
912 912 hint=(b"see 'hg help config.paths'"))
913 913 dest = path.pushloc or path.loc
914 branches = (path.branch, opts.get(b'branch') or [])
915 914 ui.status((b'analysing phase of %s\n') % util.hidepassword(dest))
916 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
917 915 other = hg.peer(repo, opts, dest)
918 916
919 917 # easier to perform discovery through the operation
@@ -450,7 +450,6 b' def synthesize(ui, repo, descpath, **opt'
450 450 path = fctx.path()
451 451 changes[path] = '\n'.join(lines) + '\n'
452 452 for __ in xrange(pick(filesremoved)):
453 path = random.choice(mfk)
454 453 for __ in xrange(10):
455 454 path = random.choice(mfk)
456 455 if path not in changes:
@@ -726,7 +726,6 b' class fixupstate(object):'
726 726 # nothing changed, nothing commited
727 727 nextp1 = ctx
728 728 continue
729 msg = ''
730 729 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
731 730 # changeset is no longer necessary
732 731 self.replacemap[ctx.node()] = None
@@ -118,7 +118,6 b' class blackboxlogger(object):'
118 118 date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
119 119 user = procutil.getuser()
120 120 pid = '%d' % procutil.getpid()
121 rev = '(unknown)'
122 121 changed = ''
123 122 ctx = self._repo[None]
124 123 parents = ctx.parents()
@@ -76,7 +76,6 b' class convert_cvs(converter_source):'
76 76 d = encoding.getcwd()
77 77 try:
78 78 os.chdir(self.path)
79 id = None
80 79
81 80 cache = 'update'
82 81 if not self.ui.configbool('convert', 'cvsps.cache'):
@@ -219,7 +218,7 b' class convert_cvs(converter_source):'
219 218 if "UseUnchanged" in r:
220 219 self.writep.write("UseUnchanged\n")
221 220 self.writep.flush()
222 r = self.readp.readline()
221 self.readp.readline()
223 222
224 223 def getheads(self):
225 224 self._parse()
@@ -105,10 +105,6 b' class mercurial_sink(common.converter_si'
105 105 if not branch:
106 106 branch = 'default'
107 107 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
108 if pbranches:
109 pbranch = pbranches[0][1]
110 else:
111 pbranch = 'default'
112 108
113 109 branchpath = os.path.join(self.path, branch)
114 110 if setbranch:
@@ -335,7 +335,6 b' class monotone_source(common.converter_s'
335 335
336 336 def before(self):
337 337 # Check if we have a new enough version to use automate stdio
338 version = 0.0
339 338 try:
340 339 versionstr = self.mtnrunsingle("interface_version")
341 340 version = float(versionstr)
@@ -984,7 +984,6 b' class svn_source(converter_source):'
984 984 # TODO: ra.get_file transmits the whole file instead of diffs.
985 985 if file in self.removed:
986 986 return None, None
987 mode = ''
988 987 try:
989 988 new_module, revnum = revsplit(rev)[1:]
990 989 if self.module != new_module:
@@ -71,7 +71,6 b' def _getannotate(repo, proto, path, last'
71 71 for p in [actx.revmappath, actx.linelogpath]:
72 72 if not os.path.exists(p):
73 73 continue
74 content = ''
75 74 with open(p, 'rb') as f:
76 75 content = f.read()
77 76 vfsbaselen = len(repo.vfs.base + '/')
@@ -109,7 +109,6 b' def _fctxannotate(orig, self, follow=Fal'
109 109
110 110 def _remotefctxannotate(orig, self, follow=False, skiprevs=None, diffopts=None):
111 111 # skipset: a set-like used to test if a fctx needs to be downloaded
112 skipset = None
113 112 with context.fctxannotatecontext(self, follow, diffopts) as ac:
114 113 skipset = revmap.revmap(ac.revmappath)
115 114 return orig(self, follow, skiprevs=skiprevs, diffopts=diffopts,
@@ -240,24 +240,6 b' def overridewalk(orig, self, match, subr'
240 240 clock = 'c:0:0'
241 241 notefiles = []
242 242
243 def fwarn(f, msg):
244 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
245 return False
246
247 def badtype(mode):
248 kind = _('unknown')
249 if stat.S_ISCHR(mode):
250 kind = _('character device')
251 elif stat.S_ISBLK(mode):
252 kind = _('block device')
253 elif stat.S_ISFIFO(mode):
254 kind = _('fifo')
255 elif stat.S_ISSOCK(mode):
256 kind = _('socket')
257 elif stat.S_ISDIR(mode):
258 kind = _('directory')
259 return _('unsupported file type (type is %s)') % kind
260
261 243 ignore = self._ignore
262 244 dirignore = self._dirignore
263 245 if unknown:
@@ -62,7 +62,6 b' def synthesize(vers, opts):'
62 62 vers['capabilities'] = {}
63 63 for name in opts['optional']:
64 64 vers['capabilities'][name] = check(parsed_version, name)
65 failed = False
66 65 for name in opts['required']:
67 66 have = check(parsed_version, name)
68 67 vers['capabilities'][name] = have
@@ -267,7 +267,7 b' class _BunserDict(object):'
267 267 key = key[3:]
268 268 try:
269 269 return self._values[self._keys.index(key)]
270 except ValueError as ex:
270 except ValueError:
271 271 raise KeyError('_BunserDict has no key %s' % key)
272 272
273 273 def __len__(self):
@@ -420,7 +420,6 b' class Bunser(object):'
420 420
421 421
422 422 def _pdu_info_helper(buf):
423 bser_version = -1
424 423 if buf[0:2] == EMPTY_HEADER[0:2]:
425 424 bser_version = 1
426 425 bser_capabilities = 0
@@ -83,7 +83,6 b' def parseoptions(ui, cmdoptions, args):'
83 83 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
84 84 break
85 85 except getopt.GetoptError as ex:
86 flag = None
87 86 if "requires argument" in ex.msg:
88 87 raise
89 88 if ('--' + ex.opt) in ex.msg:
@@ -348,7 +348,6 b' class journalstorage(object):'
348 348
349 349 def _write(self, vfs, entry):
350 350 with self.jlock(vfs):
351 version = None
352 351 # open file in amend mode to ensure it is created if missing
353 352 with vfs('namejournal', mode='a+b') as f:
354 353 f.seek(0, os.SEEK_SET)
@@ -210,8 +210,6 b' def removelargefiles(ui, repo, isaddremo'
210 210 ui.warn(msg % m.rel(f))
211 211 return int(len(files) > 0)
212 212
213 result = 0
214
215 213 if after:
216 214 remove = deleted
217 215 result = warn(modified + added + clean,
@@ -43,7 +43,6 b' def openstore(repo=None, remote=None, pu'
43 43 path, _branches = hg.parseurl(path)
44 44 remote = hg.peer(repo or ui, {}, path)
45 45 elif path == 'default-push' or path == 'default':
46 path = ''
47 46 remote = repo
48 47 else:
49 48 path, _branches = hg.parseurl(path)
@@ -277,7 +277,6 b' def getoldnodedrevmap(repo, nodelist):'
277 277 The ``old node``, if not None, is guaranteed to be the last diff of
278 278 corresponding Differential Revision, and exist in the repo.
279 279 """
280 url, token = readurltoken(repo)
281 280 unfi = repo.unfiltered()
282 281 nodemap = unfi.changelog.nodemap
283 282
@@ -1804,7 +1804,6 b' def clearrebased(ui, repo, destmap, stat'
1804 1804
1805 1805 def pullrebase(orig, ui, repo, *args, **opts):
1806 1806 'Call rebase after pull if the latter has been invoked with --rebase'
1807 ret = None
1808 1807 if opts.get(r'rebase'):
1809 1808 if ui.configbool('commands', 'rebase.requiredest'):
1810 1809 msg = _('rebase destination required by configuration')
@@ -457,8 +457,6 b' class mutablebasepack(versionmixin):'
457 457 pass
458 458
459 459 def writeindex(self):
460 rawindex = ''
461
462 460 largefanout = len(self.entries) > SMALLFANOUTCUTOFF
463 461 if largefanout:
464 462 params = indexparams(LARGEFANOUTPREFIX, self.VERSION)
@@ -250,10 +250,8 b' class datapack(basepack.basepack):'
250 250 break
251 251 if node > midnode:
252 252 start = mid
253 startnode = midnode
254 253 elif node < midnode:
255 254 end = mid
256 endnode = midnode
257 255 else:
258 256 return None
259 257
@@ -175,7 +175,6 b' def _decompressblob(raw):'
175 175 return zlib.decompress(raw)
176 176
177 177 def parsefileblob(path, decompress):
178 raw = None
179 178 f = open(path, "rb")
180 179 try:
181 180 raw = f.read()
@@ -259,10 +259,8 b' class historypack(basepack.basepack):'
259 259 return self._index[mid:mid + entrylen]
260 260 if node > midnode:
261 261 start = mid
262 startnode = midnode
263 262 elif node < midnode:
264 263 end = mid
265 endnode = midnode
266 264 return None
267 265
268 266 def markledger(self, ledger, options=None):
@@ -514,7 +512,6 b' class mutablehistorypack(basepack.mutabl'
514 512
515 513 fileindexentries.append(rawentry)
516 514
517 nodecountraw = ''
518 515 nodecountraw = struct.pack('!Q', nodecount)
519 516 return (''.join(fileindexentries) + nodecountraw +
520 517 ''.join(nodeindexentries))
@@ -61,8 +61,6 b' class remotefilelog(object):'
61 61 return t[s + 2:]
62 62
63 63 def add(self, text, meta, transaction, linknode, p1=None, p2=None):
64 hashtext = text
65
66 64 # hash with the metadata, like in vanilla filelogs
67 65 hashtext = shallowutil.createrevlogtext(text, meta.get('copy'),
68 66 meta.get('copyrev'))
@@ -601,7 +601,6 b' class repacker(object):'
601 601 # TODO: Optimize the deltachain fetching. Since we're
602 602 # iterating over the different version of the file, we may
603 603 # be fetching the same deltachain over and over again.
604 meta = None
605 604 if deltabase != nullid:
606 605 deltaentry = self.data.getdelta(filename, node)
607 606 delta, deltabasename, origdeltabase, meta = deltaentry
@@ -938,7 +938,6 b' class Listener(object):'
938 938 self.zeroconf.engine.addReader(self, self.zeroconf.socket)
939 939
940 940 def handle_read(self):
941 data = addr = port = None
942 941 sock = self.zeroconf.socket
943 942 try:
944 943 data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE)
@@ -1230,7 +1229,6 b' class ServiceInfo(object):'
1230 1229 delay = _LISTENER_TIME
1231 1230 next = now + delay
1232 1231 last = now + timeout
1233 result = 0
1234 1232 try:
1235 1233 zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY,
1236 1234 _CLASS_IN))
@@ -306,7 +306,6 b' def _readactive(repo, marks):'
306 306 itself as we commit. This function returns the name of that bookmark.
307 307 It is stored in .hg/bookmarks.current
308 308 """
309 mark = None
310 309 try:
311 310 file = repo.vfs('bookmarks.current')
312 311 except IOError as inst:
@@ -275,7 +275,7 b' class cg1unpacker(object):'
275 275 # because we need to use the top level value (if they exist)
276 276 # in this function.
277 277 srctype = tr.hookargs.setdefault('source', srctype)
278 url = tr.hookargs.setdefault('url', url)
278 tr.hookargs.setdefault('url', url)
279 279 repo.hook('prechangegroup',
280 280 throw=True, **pycompat.strkwargs(tr.hookargs))
281 281
@@ -1251,10 +1251,6 b' def copy(ui, repo, pats, opts, rename=Fa'
1251 1251 else:
1252 1252 ui.warn(_('%s: cannot copy - %s\n') %
1253 1253 (relsrc, encoding.strtolocal(inst.strerror)))
1254 if rename:
1255 hint = _("('hg rename --after' to record the rename)\n")
1256 else:
1257 hint = _("('hg copy --after' to record the copy)\n")
1258 1254 return True # report a failure
1259 1255
1260 1256 if ui.verbose or not exact:
@@ -169,7 +169,7 b' def _terminfosetup(ui, mode, formatted):'
169 169 ui._terminfoparams[key[9:]] = newval
170 170 try:
171 171 curses.setupterm()
172 except curses.error as e:
172 except curses.error:
173 173 ui._terminfoparams.clear()
174 174 return
175 175
@@ -2633,7 +2633,6 b' def _abortgraft(ui, repo, graftstate):'
2633 2633 raise error.Abort(_("cannot abort using an old graftstate"))
2634 2634
2635 2635 # changeset from which graft operation was started
2636 startctx = None
2637 2636 if len(newnodes) > 0:
2638 2637 startctx = repo[newnodes[0]].p1()
2639 2638 else:
@@ -5503,7 +5502,6 b' def summary(ui, repo, **opts):'
5503 5502 pnode = parents[0].node()
5504 5503 marks = []
5505 5504
5506 ms = None
5507 5505 try:
5508 5506 ms = mergemod.mergestate.read(repo)
5509 5507 except error.UnsupportedMergeRecords as e:
@@ -5908,7 +5906,6 b' def tags(ui, repo, **opts):'
5908 5906 ui.pager('tags')
5909 5907 fm = ui.formatter('tags', opts)
5910 5908 hexfunc = fm.hexfunc
5911 tagtype = ""
5912 5909
5913 5910 for t, n in reversed(repo.tagslist()):
5914 5911 hn = hexfunc(n)
@@ -745,7 +745,6 b' def debugstate(ui, repo, **opts):'
745 745 nodates = True
746 746 datesort = opts.get(r'datesort')
747 747
748 timestr = ""
749 748 if datesort:
750 749 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 750 else:
@@ -1182,13 +1181,6 b' def debuginstall(ui, **opts):'
1182 1181 '''
1183 1182 opts = pycompat.byteskwargs(opts)
1184 1183
1185 def writetemp(contents):
1186 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1187 f = os.fdopen(fd, r"wb")
1188 f.write(contents)
1189 f.close()
1190 return name
1191
1192 1184 problems = 0
1193 1185
1194 1186 fm = ui.formatter('debuginstall', opts)
@@ -2569,7 +2561,6 b' def debugssl(ui, repo, source=None, **op'
2569 2561
2570 2562 source, branches = hg.parseurl(ui.expandpath(source))
2571 2563 url = util.url(source)
2572 addr = None
2573 2564
2574 2565 defaultport = {'https': 443, 'ssh': 22}
2575 2566 if url.scheme in defaultport:
@@ -297,7 +297,6 b' def getbundlespec(ui, fh):'
297 297 'client'))
298 298 elif part.type == 'stream2' and version is None:
299 299 # A stream2 part requires to be part of a v2 bundle
300 version = "v2"
301 300 requirements = urlreq.unquote(part.params['requirements'])
302 301 splitted = requirements.split()
303 302 params = bundle2._formatrequirementsparams(splitted)
@@ -462,7 +462,6 b' def _imergeauto(repo, mynode, orig, fcd,'
462 462 Generic driver for _imergelocal and _imergeother
463 463 """
464 464 assert localorother is not None
465 tool, toolpath, binary, symlink, scriptfn = toolconf
466 465 r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
467 466 localorother=localorother)
468 467 return True, r
@@ -282,7 +282,7 b' def unshare(ui, repo):'
282 282 called.
283 283 """
284 284
285 destlock = lock = None
285 destlock = None
286 286 lock = repo.lock()
287 287 try:
288 288 # we use locks here because if we race with commit, we
@@ -143,7 +143,7 b' def rawindexentries(ui, repos, req, subd'
143 143 path = path[:-len(discarded) - 1]
144 144
145 145 try:
146 r = hg.repository(ui, path)
146 hg.repository(ui, path)
147 147 directory = False
148 148 except (IOError, error.RepoError):
149 149 pass
@@ -2539,7 +2539,7 b' class localrepository(object):'
2539 2539 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2540 2540 trp, changed)
2541 2541 m.setflag(f, fctx.flags())
2542 except OSError as inst:
2542 except OSError:
2543 2543 self.ui.warn(_("trouble committing %s!\n") % f)
2544 2544 raise
2545 2545 except IOError as inst:
@@ -97,7 +97,6 b' def saveremotenames(repo, remotepath, br'
97 97
98 98 def activepath(repo, remote):
99 99 """returns remote path"""
100 local = None
101 100 # is the remote a local peer
102 101 local = remote.local()
103 102
@@ -283,7 +283,6 b' class _lazymanifest(object):'
283 283 if len(self.extradata) == 0:
284 284 return
285 285 l = []
286 last_cut = 0
287 286 i = 0
288 287 offset = 0
289 288 self.extrainfo = [0] * len(self.positions)
@@ -1186,9 +1186,6 b' def manifestmerge(repo, wctx, p2, pa, br'
1186 1186
1187 1187 diff = m1.diff(m2, match=matcher)
1188 1188
1189 if matcher is None:
1190 matcher = matchmod.always('', '')
1191
1192 1189 actions = {}
1193 1190 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1194 1191 if n1 and n2: # file exists on both local and remote side
@@ -641,7 +641,6 b' def formathtml(blocks):'
641 641
642 642 def parse(text, indent=0, keep=None, admonitions=None):
643 643 """Parse text into a list of blocks"""
644 pruned = []
645 644 blocks = findblocks(text)
646 645 for b in blocks:
647 646 b['indent'] += indent
@@ -736,7 +735,6 b' def _getsections(blocks):'
736 735 '''return a list of (section path, nesting level, blocks) tuples'''
737 736 nest = ""
738 737 names = ()
739 level = 0
740 738 secs = []
741 739
742 740 def getname(b):
@@ -1448,7 +1448,6 b' class binhunk(object):'
1448 1448 hunk.append(l)
1449 1449 return l.rstrip('\r\n')
1450 1450
1451 size = 0
1452 1451 while True:
1453 1452 line = getline(lr, self.hunk)
1454 1453 if not line:
@@ -1903,7 +1902,6 b' def iterhunks(fp):'
1903 1902 if not gitpatches:
1904 1903 raise PatchError(_('failed to synchronize metadata for "%s"')
1905 1904 % afile[2:])
1906 gp = gitpatches[-1]
1907 1905 newfile = True
1908 1906 elif x.startswith('---'):
1909 1907 # check for a unified diff
@@ -2353,7 +2351,6 b' def diffhunks(repo, node1=None, node2=No'
2353 2351 modified = filterrel(modified)
2354 2352 added = filterrel(added)
2355 2353 removed = filterrel(removed)
2356 relfiltered = True
2357 2354 # filter out copies where either side isn't inside the relative root
2358 2355 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2359 2356 if dst.startswith(relroot)
@@ -816,9 +816,6 b' def write_to_chrome(data, fp, minthresho'
816 816 id2stack[-1].update(parent=parent)
817 817 return myid
818 818
819 def endswith(a, b):
820 return list(a)[-len(b):] == list(b)
821
822 819 # The sampling profiler can sample multiple times without
823 820 # advancing the clock, potentially causing the Chrome trace viewer
824 821 # to render single-pixel columns that we cannot zoom in on. We
@@ -858,9 +855,6 b' def write_to_chrome(data, fp, minthresho'
858 855 # events given only stack snapshots.
859 856
860 857 for sample in data.samples:
861 tos = sample.stack[0]
862 name = tos.function
863 path = simplifypath(tos.path)
864 858 stack = tuple((('%s:%d' % (simplifypath(frame.path), frame.lineno),
865 859 frame.function) for frame in sample.stack))
866 860 qstack = collections.deque(stack)
@@ -145,7 +145,6 b' def submerge(repo, wctx, mctx, actx, ove'
145 145
146 146 promptssrc = filemerge.partextras(labels)
147 147 for s, l in sorted(s1.iteritems()):
148 prompts = None
149 148 a = sa.get(s, nullstate)
150 149 ld = l # local state with possible dirty flag for compares
151 150 if wctx.sub(s).dirty():
@@ -218,7 +217,6 b' def submerge(repo, wctx, mctx, actx, ove'
218 217 wctx.sub(s).remove()
219 218
220 219 for s, r in sorted(s2.items()):
221 prompts = None
222 220 if s in s1:
223 221 continue
224 222 elif s not in sa:
@@ -89,7 +89,7 b' def _playback(journal, report, opener, v'
89 89 except (IOError, OSError) as inst:
90 90 if inst.errno != errno.ENOENT:
91 91 raise
92 except (IOError, OSError, error.Abort) as inst:
92 except (IOError, OSError, error.Abort):
93 93 if not c:
94 94 raise
95 95
@@ -101,7 +101,7 b' def _playback(journal, report, opener, v'
101 101 for f in backupfiles:
102 102 if opener.exists(f):
103 103 opener.unlink(f)
104 except (IOError, OSError, error.Abort) as inst:
104 except (IOError, OSError, error.Abort):
105 105 # only pure backup file remains, it is sage to ignore any error
106 106 pass
107 107
@@ -566,8 +566,6 b' class ui(object):'
566 566 candidate = self._data(untrusted).get(s, n, None)
567 567 if candidate is not None:
568 568 value = candidate
569 section = s
570 name = n
571 569 break
572 570
573 571 if self.debugflag and not untrusted and self._reportuntrusted:
@@ -424,8 +424,6 b' def getbundle(repo, proto, others):'
424 424 raise error.Abort(bundle2requiredmain,
425 425 hint=bundle2requiredhint)
426 426
427 prefercompressed = True
428
429 427 try:
430 428 clheads = set(repo.changelog.heads())
431 429 heads = set(opts.get('heads', set()))
@@ -578,7 +576,6 b' def unbundle(repo, proto, heads):'
578 576 repo.ui.debug('redirecting incoming bundle to %s\n' %
579 577 tempname)
580 578 fp = os.fdopen(fd, pycompat.sysstr('wb+'))
581 r = 0
582 579 for p in payload:
583 580 fp.write(p)
584 581 fp.seek(0)
@@ -1225,7 +1225,6 b' class Test(unittest.TestCase):'
1225 1225 killdaemons(env['DAEMON_PIDS'])
1226 1226 return ret
1227 1227
1228 output = b''
1229 1228 proc.tochild.close()
1230 1229
1231 1230 try:
@@ -123,7 +123,6 b' def test_missingancestors(seed, rng):'
123 123 # reference slow algorithm
124 124 naiveinc = naiveincrementalmissingancestors(ancs, bases)
125 125 seq = []
126 revs = []
127 126 for _ in xrange(inccount):
128 127 if rng.random() < 0.2:
129 128 newbases = samplerevs(graphnodes)
@@ -15,7 +15,6 b' maxdeltab = 10'
15 15 def _genedits(seed, endrev):
16 16 lines = []
17 17 random.seed(seed)
18 rev = 0
19 18 for rev in range(0, endrev):
20 19 n = len(lines)
21 20 a1 = random.randint(0, n)
@@ -161,7 +161,7 b' class histpacktests(unittest.TestCase):'
161 161 pack = self.createPack(revisions)
162 162
163 163 # Verify the pack contents
164 for (filename, node), (p1, p2, lastnode) in allentries.items():
164 for (filename, node) in allentries:
165 165 ancestors = pack.getancestors(filename, node)
166 166 self.assertEqual(ancestorcounts[(filename, node)],
167 167 len(ancestors))
@@ -417,7 +417,6 b' def findsnapshottest(rlog):'
417 417 print(' got: %s' % result15)
418 418
419 419 def maintest():
420 expected = rl = None
421 420 with newtransaction() as tr:
422 421 rl = newrevlog(recreate=True)
423 422 expected = writecases(rl, tr)
General Comments 0
You need to be logged in to leave comments. Login now