##// END OF EJS Templates
py3: delete b'' prefix from safehasattr arguments...
Martin von Zweigbergk -
r43385:4aa72cdf default
parent child Browse files
Show More
@@ -862,7 +862,7 b' class fixupstate(object):'
862 862 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
863 863 noop = lambda: 0
864 864 restore = noop
865 if util.safehasattr(dirstate, b'_fsmonitorstate'):
865 if util.safehasattr(dirstate, '_fsmonitorstate'):
866 866 bak = dirstate._fsmonitorstate.invalidate
867 867
868 868 def restore():
@@ -731,13 +731,13 b' class cookietransportrequest(object):'
731 731 # inheritance with a new-style class.
732 732 class cookietransport(cookietransportrequest, xmlrpclib.Transport):
733 733 def __init__(self, use_datetime=0):
734 if util.safehasattr(xmlrpclib.Transport, b"__init__"):
734 if util.safehasattr(xmlrpclib.Transport, "__init__"):
735 735 xmlrpclib.Transport.__init__(self, use_datetime)
736 736
737 737
738 738 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
739 739 def __init__(self, use_datetime=0):
740 if util.safehasattr(xmlrpclib.Transport, b"__init__"):
740 if util.safehasattr(xmlrpclib.Transport, "__init__"):
741 741 xmlrpclib.SafeTransport.__init__(self, use_datetime)
742 742
743 743
@@ -53,7 +53,7 b' def extsetup(ui):'
53 53
54 54
55 55 def _commit(orig, ui, repo, *pats, **opts):
56 if util.safehasattr(repo, b'unfiltered'):
56 if util.safehasattr(repo, 'unfiltered'):
57 57 repo = repo.unfiltered()
58 58
59 59 class repoextra(repo.__class__):
@@ -218,7 +218,7 b' def fastannotate(ui, repo, *pats, **opts'
218 218 paths = list(_matchpaths(repo, rev, pats, opts, aopts))
219 219
220 220 # for client, prefetch from the server
221 if util.safehasattr(repo, b'prefetchfastannotate'):
221 if util.safehasattr(repo, 'prefetchfastannotate'):
222 222 repo.prefetchfastannotate(paths)
223 223
224 224 for path in paths:
@@ -273,7 +273,7 b' def _annotatewrapper(orig, ui, repo, *pa'
273 273
274 274 # check if we need to do prefetch (client-side)
275 275 rev = opts.get(r'rev')
276 if util.safehasattr(repo, b'prefetchfastannotate') and rev is not None:
276 if util.safehasattr(repo, 'prefetchfastannotate') and rev is not None:
277 277 paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts)))
278 278 repo.prefetchfastannotate(paths)
279 279
@@ -320,7 +320,7 b' def debugbuildannotatecache(ui, repo, *p'
320 320 ctx = scmutil.revsingle(repo, rev)
321 321 m = scmutil.match(ctx, pats, opts)
322 322 paths = list(ctx.walk(m))
323 if util.safehasattr(repo, b'prefetchfastannotate'):
323 if util.safehasattr(repo, 'prefetchfastannotate'):
324 324 # client
325 325 if opts.get(b'REV'):
326 326 raise error.Abort(_(b'--rev cannot be used for client'))
@@ -61,7 +61,7 b' class client(object):'
61 61
62 62 def getcurrentclock(self):
63 63 result = self.command(b'clock')
64 if not util.safehasattr(result, b'clock'):
64 if not util.safehasattr(result, 'clock'):
65 65 raise Unavailable(
66 66 b'clock result is missing clock value', invalidate=True
67 67 )
@@ -102,7 +102,7 b' def _setupdirstate(repo, dirstate):'
102 102 def wrapdirstate(orig, repo):
103 103 """Make journal storage available to the dirstate object"""
104 104 dirstate = orig(repo)
105 if util.safehasattr(repo, b'journal'):
105 if util.safehasattr(repo, 'journal'):
106 106 _setupdirstate(repo, dirstate)
107 107 return dirstate
108 108
@@ -111,7 +111,7 b' def recorddirstateparents(dirstate, old,'
111 111 """Records all dirstate parent changes in the journal."""
112 112 old = list(old)
113 113 new = list(new)
114 if util.safehasattr(dirstate, b'journalstorage'):
114 if util.safehasattr(dirstate, 'journalstorage'):
115 115 # only record two hashes if there was a merge
116 116 oldhashes = old[:1] if old[1] == node.nullid else old
117 117 newhashes = new[:1] if new[1] == node.nullid else new
@@ -124,7 +124,7 b' def recorddirstateparents(dirstate, old,'
124 124 def recordbookmarks(orig, store, fp):
125 125 """Records all bookmark changes in the journal."""
126 126 repo = store._repo
127 if util.safehasattr(repo, b'journal'):
127 if util.safehasattr(repo, 'journal'):
128 128 oldmarks = bookmarks.bmstore(repo)
129 129 for mark, value in pycompat.iteritems(store):
130 130 oldvalue = oldmarks.get(mark, node.nullid)
@@ -186,7 +186,7 b' def unsharejournal(orig, ui, repo, repop'
186 186 if (
187 187 repo.path == repopath
188 188 and repo.shared()
189 and util.safehasattr(repo, b'journal')
189 and util.safehasattr(repo, 'journal')
190 190 ):
191 191 sharedrepo = hg.sharedreposource(repo)
192 192 sharedfeatures = _readsharedfeatures(repo)
@@ -45,7 +45,7 b' def handlewsgirequest(orig, rctx, req, r'
45 45 if not rctx.repo.ui.configbool(b'experimental', b'lfs.serve'):
46 46 return False
47 47
48 if not util.safehasattr(rctx.repo.svfs, b'lfslocalblobstore'):
48 if not util.safehasattr(rctx.repo.svfs, 'lfslocalblobstore'):
49 49 return False
50 50
51 51 if not req.dispatchpath:
@@ -266,7 +266,7 b' def _handlechangespec(op, inpart):'
266 266 # will currently always be there when using the core+narrowhg server, but
267 267 # other servers may include a changespec part even when not widening (e.g.
268 268 # because we're deepening a shallow repo).
269 if util.safehasattr(repo, b'setnewnarrowpats'):
269 if util.safehasattr(repo, 'setnewnarrowpats'):
270 270 repo.setnewnarrowpats()
271 271
272 272
@@ -348,9 +348,9 b' def setup():'
348 348
349 349 def wrappedcghandler(op, inpart):
350 350 origcghandler(op, inpart)
351 if util.safehasattr(op, b'_widen_bundle'):
351 if util.safehasattr(op, '_widen_bundle'):
352 352 handlechangegroup_widen(op, inpart)
353 if util.safehasattr(op, b'_bookmarksbackup'):
353 if util.safehasattr(op, '_bookmarksbackup'):
354 354 localrepo.localrepository._bookmarks.set(
355 355 op.repo, op._bookmarksbackup
356 356 )
@@ -47,12 +47,12 b' class connectionpool(object):'
47 47 # close pipee first so peer.cleanup reading it won't deadlock,
48 48 # if there are other processes with pipeo open (i.e. us).
49 49 peer = orig.im_self
50 if util.safehasattr(peer, b'pipee'):
50 if util.safehasattr(peer, 'pipee'):
51 51 peer.pipee.close()
52 52 return orig()
53 53
54 54 peer = hg.peer(self._repo.ui, {}, path)
55 if util.safehasattr(peer, b'cleanup'):
55 if util.safehasattr(peer, 'cleanup'):
56 56 extensions.wrapfunction(peer, b'cleanup', _cleanup)
57 57
58 58 conn = connection(pathpool, peer)
@@ -84,5 +84,5 b' class connection(object):'
84 84 self.close()
85 85
86 86 def close(self):
87 if util.safehasattr(self.peer, b'cleanup'):
87 if util.safehasattr(self.peer, 'cleanup'):
88 88 self.peer.cleanup()
@@ -89,7 +89,7 b' def peersetup(ui, peer):'
89 89 not in self.capabilities()
90 90 ):
91 91 return
92 if not util.safehasattr(self, b'_localrepo'):
92 if not util.safehasattr(self, '_localrepo'):
93 93 return
94 94 if (
95 95 constants.SHALLOWREPO_REQUIREMENT
@@ -129,7 +129,7 b' def peersetup(ui, peer):'
129 129
130 130 def _callstream(self, command, **opts):
131 131 supertype = super(remotefilepeer, self)
132 if not util.safehasattr(supertype, b'_sendrequest'):
132 if not util.safehasattr(supertype, '_sendrequest'):
133 133 self._updatecallstreamopts(command, pycompat.byteskwargs(opts))
134 134 return super(remotefilepeer, self)._callstream(command, **opts)
135 135
@@ -54,7 +54,7 b' def backgroundrepack('
54 54 def fullrepack(repo, options=None):
55 55 """If ``packsonly`` is True, stores creating only loose objects are skipped.
56 56 """
57 if util.safehasattr(repo, b'shareddatastores'):
57 if util.safehasattr(repo, 'shareddatastores'):
58 58 datasource = contentstore.unioncontentstore(*repo.shareddatastores)
59 59 historysource = metadatastore.unionmetadatastore(
60 60 *repo.sharedhistorystores, allowincomplete=True
@@ -72,7 +72,7 b' def fullrepack(repo, options=None):'
72 72 options=options,
73 73 )
74 74
75 if util.safehasattr(repo.manifestlog, b'datastore'):
75 if util.safehasattr(repo.manifestlog, 'datastore'):
76 76 localdata, shareddata = _getmanifeststores(repo)
77 77 lpackpath, ldstores, lhstores = localdata
78 78 spackpath, sdstores, shstores = shareddata
@@ -112,7 +112,7 b' def incrementalrepack(repo, options=None'
112 112 """This repacks the repo by looking at the distribution of pack files in the
113 113 repo and performing the most minimal repack to keep the repo in good shape.
114 114 """
115 if util.safehasattr(repo, b'shareddatastores'):
115 if util.safehasattr(repo, 'shareddatastores'):
116 116 packpath = shallowutil.getcachepackpath(
117 117 repo, constants.FILEPACK_CATEGORY
118 118 )
@@ -125,7 +125,7 b' def incrementalrepack(repo, options=None'
125 125 options=options,
126 126 )
127 127
128 if util.safehasattr(repo.manifestlog, b'datastore'):
128 if util.safehasattr(repo.manifestlog, 'datastore'):
129 129 localdata, shareddata = _getmanifeststores(repo)
130 130 lpackpath, ldstores, lhstores = localdata
131 131 spackpath, sdstores, shstores = shareddata
@@ -901,7 +901,7 b' class repackentry(object):'
901 901
902 902
903 903 def repacklockvfs(repo):
904 if util.safehasattr(repo, b'name'):
904 if util.safehasattr(repo, 'name'):
905 905 # Lock in the shared cache so repacks across multiple copies of the same
906 906 # repo are coordinated.
907 907 sharedcachepath = shallowutil.getcachepackpath(
@@ -345,7 +345,7 b' def wraprepo(repo):'
345 345 repo.excludepattern = repo.ui.configlist(
346 346 b"remotefilelog", b"excludepattern", None
347 347 )
348 if not util.safehasattr(repo, b'connectionpool'):
348 if not util.safehasattr(repo, 'connectionpool'):
349 349 repo.connectionpool = connectionpool.connectionpool(repo)
350 350
351 351 if repo.includepattern or repo.excludepattern:
@@ -936,7 +936,7 b' class unbundle20(unpackermixin):'
936 936
937 937 def close(self):
938 938 """close underlying file"""
939 if util.safehasattr(self._fp, b'close'):
939 if util.safehasattr(self._fp, 'close'):
940 940 return self._fp.close()
941 941
942 942
@@ -1024,7 +1024,7 b' class bundlepart(object):'
1024 1024
1025 1025 The new part have the very same content but no partid assigned yet.
1026 1026 Parts with generated data cannot be copied."""
1027 assert not util.safehasattr(self.data, b'next')
1027 assert not util.safehasattr(self.data, 'next')
1028 1028 return self.__class__(
1029 1029 self.type,
1030 1030 self._mandatoryparams,
@@ -1093,7 +1093,7 b' class bundlepart(object):'
1093 1093 msg.append(b')')
1094 1094 if not self.data:
1095 1095 msg.append(b' empty payload')
1096 elif util.safehasattr(self.data, b'next') or util.safehasattr(
1096 elif util.safehasattr(self.data, 'next') or util.safehasattr(
1097 1097 self.data, b'__next__'
1098 1098 ):
1099 1099 msg.append(b' streamed payload')
@@ -1189,7 +1189,7 b' class bundlepart(object):'
1189 1189 Exists to handle the different methods to provide data to a part."""
1190 1190 # we only support fixed size data now.
1191 1191 # This will be improved in the future.
1192 if util.safehasattr(self.data, b'next') or util.safehasattr(
1192 if util.safehasattr(self.data, 'next') or util.safehasattr(
1193 1193 self.data, b'__next__'
1194 1194 ):
1195 1195 buff = util.chunkbuffer(self.data)
@@ -1336,7 +1336,7 b' class unbundlepart(unpackermixin):'
1336 1336
1337 1337 def __init__(self, ui, header, fp):
1338 1338 super(unbundlepart, self).__init__(fp)
1339 self._seekable = util.safehasattr(fp, b'seek') and util.safehasattr(
1339 self._seekable = util.safehasattr(fp, 'seek') and util.safehasattr(
1340 1340 fp, b'tell'
1341 1341 )
1342 1342 self.ui = ui
@@ -212,7 +212,7 b' class bundlepeer(localrepo.localpeer):'
212 212 class bundlephasecache(phases.phasecache):
213 213 def __init__(self, *args, **kwargs):
214 214 super(bundlephasecache, self).__init__(*args, **kwargs)
215 if util.safehasattr(self, b'opener'):
215 if util.safehasattr(self, 'opener'):
216 216 self.opener = vfsmod.readonlyvfs(self.opener)
217 217
218 218 def write(self):
@@ -159,7 +159,7 b' def _flipbit(v, node):'
159 159 def ctxpvec(ctx):
160 160 '''construct a pvec for ctx while filling in the cache'''
161 161 r = ctx.repo()
162 if not util.safehasattr(r, b"_pveccache"):
162 if not util.safehasattr(r, "_pveccache"):
163 163 r._pveccache = {}
164 164 pvc = r._pveccache
165 165 if ctx.rev() not in pvc:
@@ -65,7 +65,7 b' class _funcregistrarbase(object):'
65 65 msg = b'duplicate registration for name: "%s"' % name
66 66 raise error.ProgrammingError(msg)
67 67
68 if func.__doc__ and not util.safehasattr(func, b'_origdoc'):
68 if func.__doc__ and not util.safehasattr(func, '_origdoc'):
69 69 func._origdoc = func.__doc__.strip()
70 70 doc = pycompat.sysbytes(func._origdoc)
71 71 func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
@@ -259,8 +259,8 b' def mainfrozen():'
259 259 (portable, not much used).
260 260 """
261 261 return (
262 pycompat.safehasattr(sys, b"frozen")
263 or pycompat.safehasattr(sys, b"importers") # new py2exe
262 pycompat.safehasattr(sys, "frozen")
263 or pycompat.safehasattr(sys, "importers") # new py2exe
264 264 or imp.is_frozen(r"__main__") # old py2exe
265 265 ) # tools/freeze
266 266
General Comments 0
You need to be logged in to leave comments. Login now