##// END OF EJS Templates
cleanup: delete lots of unused local variables...
Martin von Zweigbergk -
r41401:876494fd default
parent child Browse files
Show More
@@ -25,7 +25,7 b' def reducetest(a, b):'
25
25
26 try:
26 try:
27 test1(a, b)
27 test1(a, b)
28 except Exception as inst:
28 except Exception:
29 reductions += 1
29 reductions += 1
30 tries = 0
30 tries = 0
31 a = a2
31 a = a2
@@ -610,7 +610,7 b' def checkfile(f, logfunc=_defaultlogger.'
610 try:
610 try:
611 with opentext(f) as fp:
611 with opentext(f) as fp:
612 try:
612 try:
613 pre = post = fp.read()
613 pre = fp.read()
614 except UnicodeDecodeError as e:
614 except UnicodeDecodeError as e:
615 print("%s while reading %s" % (e, f))
615 print("%s while reading %s" % (e, f))
616 return result
616 return result
@@ -911,9 +911,7 b' def perfphasesremote(ui, repo, dest=None'
911 raise error.Abort((b'default repository not configured!'),
911 raise error.Abort((b'default repository not configured!'),
912 hint=(b"see 'hg help config.paths'"))
912 hint=(b"see 'hg help config.paths'"))
913 dest = path.pushloc or path.loc
913 dest = path.pushloc or path.loc
914 branches = (path.branch, opts.get(b'branch') or [])
915 ui.status((b'analysing phase of %s\n') % util.hidepassword(dest))
914 ui.status((b'analysing phase of %s\n') % util.hidepassword(dest))
916 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
917 other = hg.peer(repo, opts, dest)
915 other = hg.peer(repo, opts, dest)
918
916
919 # easier to perform discovery through the operation
917 # easier to perform discovery through the operation
@@ -450,7 +450,6 b' def synthesize(ui, repo, descpath, **opt'
450 path = fctx.path()
450 path = fctx.path()
451 changes[path] = '\n'.join(lines) + '\n'
451 changes[path] = '\n'.join(lines) + '\n'
452 for __ in xrange(pick(filesremoved)):
452 for __ in xrange(pick(filesremoved)):
453 path = random.choice(mfk)
454 for __ in xrange(10):
453 for __ in xrange(10):
455 path = random.choice(mfk)
454 path = random.choice(mfk)
456 if path not in changes:
455 if path not in changes:
@@ -726,7 +726,6 b' class fixupstate(object):'
726 # nothing changed, nothing commited
726 # nothing changed, nothing commited
727 nextp1 = ctx
727 nextp1 = ctx
728 continue
728 continue
729 msg = ''
730 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
729 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
731 # changeset is no longer necessary
730 # changeset is no longer necessary
732 self.replacemap[ctx.node()] = None
731 self.replacemap[ctx.node()] = None
@@ -118,7 +118,6 b' class blackboxlogger(object):'
118 date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
118 date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
119 user = procutil.getuser()
119 user = procutil.getuser()
120 pid = '%d' % procutil.getpid()
120 pid = '%d' % procutil.getpid()
121 rev = '(unknown)'
122 changed = ''
121 changed = ''
123 ctx = self._repo[None]
122 ctx = self._repo[None]
124 parents = ctx.parents()
123 parents = ctx.parents()
@@ -76,7 +76,6 b' class convert_cvs(converter_source):'
76 d = encoding.getcwd()
76 d = encoding.getcwd()
77 try:
77 try:
78 os.chdir(self.path)
78 os.chdir(self.path)
79 id = None
80
79
81 cache = 'update'
80 cache = 'update'
82 if not self.ui.configbool('convert', 'cvsps.cache'):
81 if not self.ui.configbool('convert', 'cvsps.cache'):
@@ -219,7 +218,7 b' class convert_cvs(converter_source):'
219 if "UseUnchanged" in r:
218 if "UseUnchanged" in r:
220 self.writep.write("UseUnchanged\n")
219 self.writep.write("UseUnchanged\n")
221 self.writep.flush()
220 self.writep.flush()
222 r = self.readp.readline()
221 self.readp.readline()
223
222
224 def getheads(self):
223 def getheads(self):
225 self._parse()
224 self._parse()
@@ -105,10 +105,6 b' class mercurial_sink(common.converter_si'
105 if not branch:
105 if not branch:
106 branch = 'default'
106 branch = 'default'
107 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
107 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
108 if pbranches:
109 pbranch = pbranches[0][1]
110 else:
111 pbranch = 'default'
112
108
113 branchpath = os.path.join(self.path, branch)
109 branchpath = os.path.join(self.path, branch)
114 if setbranch:
110 if setbranch:
@@ -335,7 +335,6 b' class monotone_source(common.converter_s'
335
335
336 def before(self):
336 def before(self):
337 # Check if we have a new enough version to use automate stdio
337 # Check if we have a new enough version to use automate stdio
338 version = 0.0
339 try:
338 try:
340 versionstr = self.mtnrunsingle("interface_version")
339 versionstr = self.mtnrunsingle("interface_version")
341 version = float(versionstr)
340 version = float(versionstr)
@@ -984,7 +984,6 b' class svn_source(converter_source):'
984 # TODO: ra.get_file transmits the whole file instead of diffs.
984 # TODO: ra.get_file transmits the whole file instead of diffs.
985 if file in self.removed:
985 if file in self.removed:
986 return None, None
986 return None, None
987 mode = ''
988 try:
987 try:
989 new_module, revnum = revsplit(rev)[1:]
988 new_module, revnum = revsplit(rev)[1:]
990 if self.module != new_module:
989 if self.module != new_module:
@@ -71,7 +71,6 b' def _getannotate(repo, proto, path, last'
71 for p in [actx.revmappath, actx.linelogpath]:
71 for p in [actx.revmappath, actx.linelogpath]:
72 if not os.path.exists(p):
72 if not os.path.exists(p):
73 continue
73 continue
74 content = ''
75 with open(p, 'rb') as f:
74 with open(p, 'rb') as f:
76 content = f.read()
75 content = f.read()
77 vfsbaselen = len(repo.vfs.base + '/')
76 vfsbaselen = len(repo.vfs.base + '/')
@@ -109,7 +109,6 b' def _fctxannotate(orig, self, follow=Fal'
109
109
110 def _remotefctxannotate(orig, self, follow=False, skiprevs=None, diffopts=None):
110 def _remotefctxannotate(orig, self, follow=False, skiprevs=None, diffopts=None):
111 # skipset: a set-like used to test if a fctx needs to be downloaded
111 # skipset: a set-like used to test if a fctx needs to be downloaded
112 skipset = None
113 with context.fctxannotatecontext(self, follow, diffopts) as ac:
112 with context.fctxannotatecontext(self, follow, diffopts) as ac:
114 skipset = revmap.revmap(ac.revmappath)
113 skipset = revmap.revmap(ac.revmappath)
115 return orig(self, follow, skiprevs=skiprevs, diffopts=diffopts,
114 return orig(self, follow, skiprevs=skiprevs, diffopts=diffopts,
@@ -240,24 +240,6 b' def overridewalk(orig, self, match, subr'
240 clock = 'c:0:0'
240 clock = 'c:0:0'
241 notefiles = []
241 notefiles = []
242
242
243 def fwarn(f, msg):
244 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
245 return False
246
247 def badtype(mode):
248 kind = _('unknown')
249 if stat.S_ISCHR(mode):
250 kind = _('character device')
251 elif stat.S_ISBLK(mode):
252 kind = _('block device')
253 elif stat.S_ISFIFO(mode):
254 kind = _('fifo')
255 elif stat.S_ISSOCK(mode):
256 kind = _('socket')
257 elif stat.S_ISDIR(mode):
258 kind = _('directory')
259 return _('unsupported file type (type is %s)') % kind
260
261 ignore = self._ignore
243 ignore = self._ignore
262 dirignore = self._dirignore
244 dirignore = self._dirignore
263 if unknown:
245 if unknown:
@@ -62,7 +62,6 b' def synthesize(vers, opts):'
62 vers['capabilities'] = {}
62 vers['capabilities'] = {}
63 for name in opts['optional']:
63 for name in opts['optional']:
64 vers['capabilities'][name] = check(parsed_version, name)
64 vers['capabilities'][name] = check(parsed_version, name)
65 failed = False
66 for name in opts['required']:
65 for name in opts['required']:
67 have = check(parsed_version, name)
66 have = check(parsed_version, name)
68 vers['capabilities'][name] = have
67 vers['capabilities'][name] = have
@@ -267,7 +267,7 b' class _BunserDict(object):'
267 key = key[3:]
267 key = key[3:]
268 try:
268 try:
269 return self._values[self._keys.index(key)]
269 return self._values[self._keys.index(key)]
270 except ValueError as ex:
270 except ValueError:
271 raise KeyError('_BunserDict has no key %s' % key)
271 raise KeyError('_BunserDict has no key %s' % key)
272
272
273 def __len__(self):
273 def __len__(self):
@@ -420,7 +420,6 b' class Bunser(object):'
420
420
421
421
422 def _pdu_info_helper(buf):
422 def _pdu_info_helper(buf):
423 bser_version = -1
424 if buf[0:2] == EMPTY_HEADER[0:2]:
423 if buf[0:2] == EMPTY_HEADER[0:2]:
425 bser_version = 1
424 bser_version = 1
426 bser_capabilities = 0
425 bser_capabilities = 0
@@ -83,7 +83,6 b' def parseoptions(ui, cmdoptions, args):'
83 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
83 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
84 break
84 break
85 except getopt.GetoptError as ex:
85 except getopt.GetoptError as ex:
86 flag = None
87 if "requires argument" in ex.msg:
86 if "requires argument" in ex.msg:
88 raise
87 raise
89 if ('--' + ex.opt) in ex.msg:
88 if ('--' + ex.opt) in ex.msg:
@@ -348,7 +348,6 b' class journalstorage(object):'
348
348
349 def _write(self, vfs, entry):
349 def _write(self, vfs, entry):
350 with self.jlock(vfs):
350 with self.jlock(vfs):
351 version = None
352 # open file in amend mode to ensure it is created if missing
351 # open file in amend mode to ensure it is created if missing
353 with vfs('namejournal', mode='a+b') as f:
352 with vfs('namejournal', mode='a+b') as f:
354 f.seek(0, os.SEEK_SET)
353 f.seek(0, os.SEEK_SET)
@@ -210,8 +210,6 b' def removelargefiles(ui, repo, isaddremo'
210 ui.warn(msg % m.rel(f))
210 ui.warn(msg % m.rel(f))
211 return int(len(files) > 0)
211 return int(len(files) > 0)
212
212
213 result = 0
214
215 if after:
213 if after:
216 remove = deleted
214 remove = deleted
217 result = warn(modified + added + clean,
215 result = warn(modified + added + clean,
@@ -43,7 +43,6 b' def openstore(repo=None, remote=None, pu'
43 path, _branches = hg.parseurl(path)
43 path, _branches = hg.parseurl(path)
44 remote = hg.peer(repo or ui, {}, path)
44 remote = hg.peer(repo or ui, {}, path)
45 elif path == 'default-push' or path == 'default':
45 elif path == 'default-push' or path == 'default':
46 path = ''
47 remote = repo
46 remote = repo
48 else:
47 else:
49 path, _branches = hg.parseurl(path)
48 path, _branches = hg.parseurl(path)
@@ -277,7 +277,6 b' def getoldnodedrevmap(repo, nodelist):'
277 The ``old node``, if not None, is guaranteed to be the last diff of
277 The ``old node``, if not None, is guaranteed to be the last diff of
278 corresponding Differential Revision, and exist in the repo.
278 corresponding Differential Revision, and exist in the repo.
279 """
279 """
280 url, token = readurltoken(repo)
281 unfi = repo.unfiltered()
280 unfi = repo.unfiltered()
282 nodemap = unfi.changelog.nodemap
281 nodemap = unfi.changelog.nodemap
283
282
@@ -1804,7 +1804,6 b' def clearrebased(ui, repo, destmap, stat'
1804
1804
1805 def pullrebase(orig, ui, repo, *args, **opts):
1805 def pullrebase(orig, ui, repo, *args, **opts):
1806 'Call rebase after pull if the latter has been invoked with --rebase'
1806 'Call rebase after pull if the latter has been invoked with --rebase'
1807 ret = None
1808 if opts.get(r'rebase'):
1807 if opts.get(r'rebase'):
1809 if ui.configbool('commands', 'rebase.requiredest'):
1808 if ui.configbool('commands', 'rebase.requiredest'):
1810 msg = _('rebase destination required by configuration')
1809 msg = _('rebase destination required by configuration')
@@ -457,8 +457,6 b' class mutablebasepack(versionmixin):'
457 pass
457 pass
458
458
459 def writeindex(self):
459 def writeindex(self):
460 rawindex = ''
461
462 largefanout = len(self.entries) > SMALLFANOUTCUTOFF
460 largefanout = len(self.entries) > SMALLFANOUTCUTOFF
463 if largefanout:
461 if largefanout:
464 params = indexparams(LARGEFANOUTPREFIX, self.VERSION)
462 params = indexparams(LARGEFANOUTPREFIX, self.VERSION)
@@ -250,10 +250,8 b' class datapack(basepack.basepack):'
250 break
250 break
251 if node > midnode:
251 if node > midnode:
252 start = mid
252 start = mid
253 startnode = midnode
254 elif node < midnode:
253 elif node < midnode:
255 end = mid
254 end = mid
256 endnode = midnode
257 else:
255 else:
258 return None
256 return None
259
257
@@ -175,7 +175,6 b' def _decompressblob(raw):'
175 return zlib.decompress(raw)
175 return zlib.decompress(raw)
176
176
177 def parsefileblob(path, decompress):
177 def parsefileblob(path, decompress):
178 raw = None
179 f = open(path, "rb")
178 f = open(path, "rb")
180 try:
179 try:
181 raw = f.read()
180 raw = f.read()
@@ -259,10 +259,8 b' class historypack(basepack.basepack):'
259 return self._index[mid:mid + entrylen]
259 return self._index[mid:mid + entrylen]
260 if node > midnode:
260 if node > midnode:
261 start = mid
261 start = mid
262 startnode = midnode
263 elif node < midnode:
262 elif node < midnode:
264 end = mid
263 end = mid
265 endnode = midnode
266 return None
264 return None
267
265
268 def markledger(self, ledger, options=None):
266 def markledger(self, ledger, options=None):
@@ -514,7 +512,6 b' class mutablehistorypack(basepack.mutabl'
514
512
515 fileindexentries.append(rawentry)
513 fileindexentries.append(rawentry)
516
514
517 nodecountraw = ''
518 nodecountraw = struct.pack('!Q', nodecount)
515 nodecountraw = struct.pack('!Q', nodecount)
519 return (''.join(fileindexentries) + nodecountraw +
516 return (''.join(fileindexentries) + nodecountraw +
520 ''.join(nodeindexentries))
517 ''.join(nodeindexentries))
@@ -61,8 +61,6 b' class remotefilelog(object):'
61 return t[s + 2:]
61 return t[s + 2:]
62
62
63 def add(self, text, meta, transaction, linknode, p1=None, p2=None):
63 def add(self, text, meta, transaction, linknode, p1=None, p2=None):
64 hashtext = text
65
66 # hash with the metadata, like in vanilla filelogs
64 # hash with the metadata, like in vanilla filelogs
67 hashtext = shallowutil.createrevlogtext(text, meta.get('copy'),
65 hashtext = shallowutil.createrevlogtext(text, meta.get('copy'),
68 meta.get('copyrev'))
66 meta.get('copyrev'))
@@ -601,7 +601,6 b' class repacker(object):'
601 # TODO: Optimize the deltachain fetching. Since we're
601 # TODO: Optimize the deltachain fetching. Since we're
602 # iterating over the different version of the file, we may
602 # iterating over the different version of the file, we may
603 # be fetching the same deltachain over and over again.
603 # be fetching the same deltachain over and over again.
604 meta = None
605 if deltabase != nullid:
604 if deltabase != nullid:
606 deltaentry = self.data.getdelta(filename, node)
605 deltaentry = self.data.getdelta(filename, node)
607 delta, deltabasename, origdeltabase, meta = deltaentry
606 delta, deltabasename, origdeltabase, meta = deltaentry
@@ -938,7 +938,6 b' class Listener(object):'
938 self.zeroconf.engine.addReader(self, self.zeroconf.socket)
938 self.zeroconf.engine.addReader(self, self.zeroconf.socket)
939
939
940 def handle_read(self):
940 def handle_read(self):
941 data = addr = port = None
942 sock = self.zeroconf.socket
941 sock = self.zeroconf.socket
943 try:
942 try:
944 data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE)
943 data, (addr, port) = sock.recvfrom(_MAX_MSG_ABSOLUTE)
@@ -1230,7 +1229,6 b' class ServiceInfo(object):'
1230 delay = _LISTENER_TIME
1229 delay = _LISTENER_TIME
1231 next = now + delay
1230 next = now + delay
1232 last = now + timeout
1231 last = now + timeout
1233 result = 0
1234 try:
1232 try:
1235 zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY,
1233 zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY,
1236 _CLASS_IN))
1234 _CLASS_IN))
@@ -306,7 +306,6 b' def _readactive(repo, marks):'
306 itself as we commit. This function returns the name of that bookmark.
306 itself as we commit. This function returns the name of that bookmark.
307 It is stored in .hg/bookmarks.current
307 It is stored in .hg/bookmarks.current
308 """
308 """
309 mark = None
310 try:
309 try:
311 file = repo.vfs('bookmarks.current')
310 file = repo.vfs('bookmarks.current')
312 except IOError as inst:
311 except IOError as inst:
@@ -275,7 +275,7 b' class cg1unpacker(object):'
275 # because we need to use the top level value (if they exist)
275 # because we need to use the top level value (if they exist)
276 # in this function.
276 # in this function.
277 srctype = tr.hookargs.setdefault('source', srctype)
277 srctype = tr.hookargs.setdefault('source', srctype)
278 url = tr.hookargs.setdefault('url', url)
278 tr.hookargs.setdefault('url', url)
279 repo.hook('prechangegroup',
279 repo.hook('prechangegroup',
280 throw=True, **pycompat.strkwargs(tr.hookargs))
280 throw=True, **pycompat.strkwargs(tr.hookargs))
281
281
@@ -1251,10 +1251,6 b' def copy(ui, repo, pats, opts, rename=Fa'
1251 else:
1251 else:
1252 ui.warn(_('%s: cannot copy - %s\n') %
1252 ui.warn(_('%s: cannot copy - %s\n') %
1253 (relsrc, encoding.strtolocal(inst.strerror)))
1253 (relsrc, encoding.strtolocal(inst.strerror)))
1254 if rename:
1255 hint = _("('hg rename --after' to record the rename)\n")
1256 else:
1257 hint = _("('hg copy --after' to record the copy)\n")
1258 return True # report a failure
1254 return True # report a failure
1259
1255
1260 if ui.verbose or not exact:
1256 if ui.verbose or not exact:
@@ -169,7 +169,7 b' def _terminfosetup(ui, mode, formatted):'
169 ui._terminfoparams[key[9:]] = newval
169 ui._terminfoparams[key[9:]] = newval
170 try:
170 try:
171 curses.setupterm()
171 curses.setupterm()
172 except curses.error as e:
172 except curses.error:
173 ui._terminfoparams.clear()
173 ui._terminfoparams.clear()
174 return
174 return
175
175
@@ -2633,7 +2633,6 b' def _abortgraft(ui, repo, graftstate):'
2633 raise error.Abort(_("cannot abort using an old graftstate"))
2633 raise error.Abort(_("cannot abort using an old graftstate"))
2634
2634
2635 # changeset from which graft operation was started
2635 # changeset from which graft operation was started
2636 startctx = None
2637 if len(newnodes) > 0:
2636 if len(newnodes) > 0:
2638 startctx = repo[newnodes[0]].p1()
2637 startctx = repo[newnodes[0]].p1()
2639 else:
2638 else:
@@ -5503,7 +5502,6 b' def summary(ui, repo, **opts):'
5503 pnode = parents[0].node()
5502 pnode = parents[0].node()
5504 marks = []
5503 marks = []
5505
5504
5506 ms = None
5507 try:
5505 try:
5508 ms = mergemod.mergestate.read(repo)
5506 ms = mergemod.mergestate.read(repo)
5509 except error.UnsupportedMergeRecords as e:
5507 except error.UnsupportedMergeRecords as e:
@@ -5908,7 +5906,6 b' def tags(ui, repo, **opts):'
5908 ui.pager('tags')
5906 ui.pager('tags')
5909 fm = ui.formatter('tags', opts)
5907 fm = ui.formatter('tags', opts)
5910 hexfunc = fm.hexfunc
5908 hexfunc = fm.hexfunc
5911 tagtype = ""
5912
5909
5913 for t, n in reversed(repo.tagslist()):
5910 for t, n in reversed(repo.tagslist()):
5914 hn = hexfunc(n)
5911 hn = hexfunc(n)
@@ -745,7 +745,6 b' def debugstate(ui, repo, **opts):'
745 nodates = True
745 nodates = True
746 datesort = opts.get(r'datesort')
746 datesort = opts.get(r'datesort')
747
747
748 timestr = ""
749 if datesort:
748 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
749 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
750 else:
@@ -1182,13 +1181,6 b' def debuginstall(ui, **opts):'
1182 '''
1181 '''
1183 opts = pycompat.byteskwargs(opts)
1182 opts = pycompat.byteskwargs(opts)
1184
1183
1185 def writetemp(contents):
1186 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1187 f = os.fdopen(fd, r"wb")
1188 f.write(contents)
1189 f.close()
1190 return name
1191
1192 problems = 0
1184 problems = 0
1193
1185
1194 fm = ui.formatter('debuginstall', opts)
1186 fm = ui.formatter('debuginstall', opts)
@@ -2569,7 +2561,6 b' def debugssl(ui, repo, source=None, **op'
2569
2561
2570 source, branches = hg.parseurl(ui.expandpath(source))
2562 source, branches = hg.parseurl(ui.expandpath(source))
2571 url = util.url(source)
2563 url = util.url(source)
2572 addr = None
2573
2564
2574 defaultport = {'https': 443, 'ssh': 22}
2565 defaultport = {'https': 443, 'ssh': 22}
2575 if url.scheme in defaultport:
2566 if url.scheme in defaultport:
@@ -297,7 +297,6 b' def getbundlespec(ui, fh):'
297 'client'))
297 'client'))
298 elif part.type == 'stream2' and version is None:
298 elif part.type == 'stream2' and version is None:
299 # A stream2 part requires to be part of a v2 bundle
299 # A stream2 part requires to be part of a v2 bundle
300 version = "v2"
301 requirements = urlreq.unquote(part.params['requirements'])
300 requirements = urlreq.unquote(part.params['requirements'])
302 splitted = requirements.split()
301 splitted = requirements.split()
303 params = bundle2._formatrequirementsparams(splitted)
302 params = bundle2._formatrequirementsparams(splitted)
@@ -462,7 +462,6 b' def _imergeauto(repo, mynode, orig, fcd,'
462 Generic driver for _imergelocal and _imergeother
462 Generic driver for _imergelocal and _imergeother
463 """
463 """
464 assert localorother is not None
464 assert localorother is not None
465 tool, toolpath, binary, symlink, scriptfn = toolconf
466 r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
465 r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
467 localorother=localorother)
466 localorother=localorother)
468 return True, r
467 return True, r
@@ -282,7 +282,7 b' def unshare(ui, repo):'
282 called.
282 called.
283 """
283 """
284
284
285 destlock = lock = None
285 destlock = None
286 lock = repo.lock()
286 lock = repo.lock()
287 try:
287 try:
288 # we use locks here because if we race with commit, we
288 # we use locks here because if we race with commit, we
@@ -143,7 +143,7 b' def rawindexentries(ui, repos, req, subd'
143 path = path[:-len(discarded) - 1]
143 path = path[:-len(discarded) - 1]
144
144
145 try:
145 try:
146 r = hg.repository(ui, path)
146 hg.repository(ui, path)
147 directory = False
147 directory = False
148 except (IOError, error.RepoError):
148 except (IOError, error.RepoError):
149 pass
149 pass
@@ -2539,7 +2539,7 b' class localrepository(object):'
2539 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2539 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2540 trp, changed)
2540 trp, changed)
2541 m.setflag(f, fctx.flags())
2541 m.setflag(f, fctx.flags())
2542 except OSError as inst:
2542 except OSError:
2543 self.ui.warn(_("trouble committing %s!\n") % f)
2543 self.ui.warn(_("trouble committing %s!\n") % f)
2544 raise
2544 raise
2545 except IOError as inst:
2545 except IOError as inst:
@@ -97,7 +97,6 b' def saveremotenames(repo, remotepath, br'
97
97
98 def activepath(repo, remote):
98 def activepath(repo, remote):
99 """returns remote path"""
99 """returns remote path"""
100 local = None
101 # is the remote a local peer
100 # is the remote a local peer
102 local = remote.local()
101 local = remote.local()
103
102
@@ -283,7 +283,6 b' class _lazymanifest(object):'
283 if len(self.extradata) == 0:
283 if len(self.extradata) == 0:
284 return
284 return
285 l = []
285 l = []
286 last_cut = 0
287 i = 0
286 i = 0
288 offset = 0
287 offset = 0
289 self.extrainfo = [0] * len(self.positions)
288 self.extrainfo = [0] * len(self.positions)
@@ -1186,9 +1186,6 b' def manifestmerge(repo, wctx, p2, pa, br'
1186
1186
1187 diff = m1.diff(m2, match=matcher)
1187 diff = m1.diff(m2, match=matcher)
1188
1188
1189 if matcher is None:
1190 matcher = matchmod.always('', '')
1191
1192 actions = {}
1189 actions = {}
1193 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1190 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1194 if n1 and n2: # file exists on both local and remote side
1191 if n1 and n2: # file exists on both local and remote side
@@ -641,7 +641,6 b' def formathtml(blocks):'
641
641
642 def parse(text, indent=0, keep=None, admonitions=None):
642 def parse(text, indent=0, keep=None, admonitions=None):
643 """Parse text into a list of blocks"""
643 """Parse text into a list of blocks"""
644 pruned = []
645 blocks = findblocks(text)
644 blocks = findblocks(text)
646 for b in blocks:
645 for b in blocks:
647 b['indent'] += indent
646 b['indent'] += indent
@@ -736,7 +735,6 b' def _getsections(blocks):'
736 '''return a list of (section path, nesting level, blocks) tuples'''
735 '''return a list of (section path, nesting level, blocks) tuples'''
737 nest = ""
736 nest = ""
738 names = ()
737 names = ()
739 level = 0
740 secs = []
738 secs = []
741
739
742 def getname(b):
740 def getname(b):
@@ -1448,7 +1448,6 b' class binhunk(object):'
1448 hunk.append(l)
1448 hunk.append(l)
1449 return l.rstrip('\r\n')
1449 return l.rstrip('\r\n')
1450
1450
1451 size = 0
1452 while True:
1451 while True:
1453 line = getline(lr, self.hunk)
1452 line = getline(lr, self.hunk)
1454 if not line:
1453 if not line:
@@ -1903,7 +1902,6 b' def iterhunks(fp):'
1903 if not gitpatches:
1902 if not gitpatches:
1904 raise PatchError(_('failed to synchronize metadata for "%s"')
1903 raise PatchError(_('failed to synchronize metadata for "%s"')
1905 % afile[2:])
1904 % afile[2:])
1906 gp = gitpatches[-1]
1907 newfile = True
1905 newfile = True
1908 elif x.startswith('---'):
1906 elif x.startswith('---'):
1909 # check for a unified diff
1907 # check for a unified diff
@@ -2353,7 +2351,6 b' def diffhunks(repo, node1=None, node2=No'
2353 modified = filterrel(modified)
2351 modified = filterrel(modified)
2354 added = filterrel(added)
2352 added = filterrel(added)
2355 removed = filterrel(removed)
2353 removed = filterrel(removed)
2356 relfiltered = True
2357 # filter out copies where either side isn't inside the relative root
2354 # filter out copies where either side isn't inside the relative root
2358 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2355 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2359 if dst.startswith(relroot)
2356 if dst.startswith(relroot)
@@ -816,9 +816,6 b' def write_to_chrome(data, fp, minthresho'
816 id2stack[-1].update(parent=parent)
816 id2stack[-1].update(parent=parent)
817 return myid
817 return myid
818
818
819 def endswith(a, b):
820 return list(a)[-len(b):] == list(b)
821
822 # The sampling profiler can sample multiple times without
819 # The sampling profiler can sample multiple times without
823 # advancing the clock, potentially causing the Chrome trace viewer
820 # advancing the clock, potentially causing the Chrome trace viewer
824 # to render single-pixel columns that we cannot zoom in on. We
821 # to render single-pixel columns that we cannot zoom in on. We
@@ -858,9 +855,6 b' def write_to_chrome(data, fp, minthresho'
858 # events given only stack snapshots.
855 # events given only stack snapshots.
859
856
860 for sample in data.samples:
857 for sample in data.samples:
861 tos = sample.stack[0]
862 name = tos.function
863 path = simplifypath(tos.path)
864 stack = tuple((('%s:%d' % (simplifypath(frame.path), frame.lineno),
858 stack = tuple((('%s:%d' % (simplifypath(frame.path), frame.lineno),
865 frame.function) for frame in sample.stack))
859 frame.function) for frame in sample.stack))
866 qstack = collections.deque(stack)
860 qstack = collections.deque(stack)
@@ -145,7 +145,6 b' def submerge(repo, wctx, mctx, actx, ove'
145
145
146 promptssrc = filemerge.partextras(labels)
146 promptssrc = filemerge.partextras(labels)
147 for s, l in sorted(s1.iteritems()):
147 for s, l in sorted(s1.iteritems()):
148 prompts = None
149 a = sa.get(s, nullstate)
148 a = sa.get(s, nullstate)
150 ld = l # local state with possible dirty flag for compares
149 ld = l # local state with possible dirty flag for compares
151 if wctx.sub(s).dirty():
150 if wctx.sub(s).dirty():
@@ -218,7 +217,6 b' def submerge(repo, wctx, mctx, actx, ove'
218 wctx.sub(s).remove()
217 wctx.sub(s).remove()
219
218
220 for s, r in sorted(s2.items()):
219 for s, r in sorted(s2.items()):
221 prompts = None
222 if s in s1:
220 if s in s1:
223 continue
221 continue
224 elif s not in sa:
222 elif s not in sa:
@@ -89,7 +89,7 b' def _playback(journal, report, opener, v'
89 except (IOError, OSError) as inst:
89 except (IOError, OSError) as inst:
90 if inst.errno != errno.ENOENT:
90 if inst.errno != errno.ENOENT:
91 raise
91 raise
92 except (IOError, OSError, error.Abort) as inst:
92 except (IOError, OSError, error.Abort):
93 if not c:
93 if not c:
94 raise
94 raise
95
95
@@ -101,7 +101,7 b' def _playback(journal, report, opener, v'
101 for f in backupfiles:
101 for f in backupfiles:
102 if opener.exists(f):
102 if opener.exists(f):
103 opener.unlink(f)
103 opener.unlink(f)
104 except (IOError, OSError, error.Abort) as inst:
104 except (IOError, OSError, error.Abort):
105 # only pure backup file remains, it is sage to ignore any error
105 # only pure backup file remains, it is sage to ignore any error
106 pass
106 pass
107
107
@@ -566,8 +566,6 b' class ui(object):'
566 candidate = self._data(untrusted).get(s, n, None)
566 candidate = self._data(untrusted).get(s, n, None)
567 if candidate is not None:
567 if candidate is not None:
568 value = candidate
568 value = candidate
569 section = s
570 name = n
571 break
569 break
572
570
573 if self.debugflag and not untrusted and self._reportuntrusted:
571 if self.debugflag and not untrusted and self._reportuntrusted:
@@ -424,8 +424,6 b' def getbundle(repo, proto, others):'
424 raise error.Abort(bundle2requiredmain,
424 raise error.Abort(bundle2requiredmain,
425 hint=bundle2requiredhint)
425 hint=bundle2requiredhint)
426
426
427 prefercompressed = True
428
429 try:
427 try:
430 clheads = set(repo.changelog.heads())
428 clheads = set(repo.changelog.heads())
431 heads = set(opts.get('heads', set()))
429 heads = set(opts.get('heads', set()))
@@ -578,7 +576,6 b' def unbundle(repo, proto, heads):'
578 repo.ui.debug('redirecting incoming bundle to %s\n' %
576 repo.ui.debug('redirecting incoming bundle to %s\n' %
579 tempname)
577 tempname)
580 fp = os.fdopen(fd, pycompat.sysstr('wb+'))
578 fp = os.fdopen(fd, pycompat.sysstr('wb+'))
581 r = 0
582 for p in payload:
579 for p in payload:
583 fp.write(p)
580 fp.write(p)
584 fp.seek(0)
581 fp.seek(0)
@@ -1225,7 +1225,6 b' class Test(unittest.TestCase):'
1225 killdaemons(env['DAEMON_PIDS'])
1225 killdaemons(env['DAEMON_PIDS'])
1226 return ret
1226 return ret
1227
1227
1228 output = b''
1229 proc.tochild.close()
1228 proc.tochild.close()
1230
1229
1231 try:
1230 try:
@@ -123,7 +123,6 b' def test_missingancestors(seed, rng):'
123 # reference slow algorithm
123 # reference slow algorithm
124 naiveinc = naiveincrementalmissingancestors(ancs, bases)
124 naiveinc = naiveincrementalmissingancestors(ancs, bases)
125 seq = []
125 seq = []
126 revs = []
127 for _ in xrange(inccount):
126 for _ in xrange(inccount):
128 if rng.random() < 0.2:
127 if rng.random() < 0.2:
129 newbases = samplerevs(graphnodes)
128 newbases = samplerevs(graphnodes)
@@ -15,7 +15,6 b' maxdeltab = 10'
15 def _genedits(seed, endrev):
15 def _genedits(seed, endrev):
16 lines = []
16 lines = []
17 random.seed(seed)
17 random.seed(seed)
18 rev = 0
19 for rev in range(0, endrev):
18 for rev in range(0, endrev):
20 n = len(lines)
19 n = len(lines)
21 a1 = random.randint(0, n)
20 a1 = random.randint(0, n)
@@ -161,7 +161,7 b' class histpacktests(unittest.TestCase):'
161 pack = self.createPack(revisions)
161 pack = self.createPack(revisions)
162
162
163 # Verify the pack contents
163 # Verify the pack contents
164 for (filename, node), (p1, p2, lastnode) in allentries.items():
164 for (filename, node) in allentries:
165 ancestors = pack.getancestors(filename, node)
165 ancestors = pack.getancestors(filename, node)
166 self.assertEqual(ancestorcounts[(filename, node)],
166 self.assertEqual(ancestorcounts[(filename, node)],
167 len(ancestors))
167 len(ancestors))
@@ -417,7 +417,6 b' def findsnapshottest(rlog):'
417 print(' got: %s' % result15)
417 print(' got: %s' % result15)
418
418
419 def maintest():
419 def maintest():
420 expected = rl = None
421 with newtransaction() as tr:
420 with newtransaction() as tr:
422 rl = newrevlog(recreate=True)
421 rl = newrevlog(recreate=True)
423 expected = writecases(rl, tr)
422 expected = writecases(rl, tr)
General Comments 0
You need to be logged in to leave comments. Login now