Show More
@@ -40,7 +40,6 b' from mercurial import (' | |||
|
40 | 40 | extensions, |
|
41 | 41 | hg, |
|
42 | 42 | ui as uimod, |
|
43 | util, | |
|
44 | 43 | ) |
|
45 | 44 | |
|
46 | 45 | basedir = os.path.abspath( |
@@ -66,7 +65,7 b' def runperfcommand(reponame, command, *a' | |||
|
66 | 65 | os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "") |
|
67 | 66 | # for "historical portability" |
|
68 | 67 | # ui.load() has been available since d83ca85 |
|
69 |
if |
|
|
68 | if hasattr(uimod.ui, "load"): | |
|
70 | 69 | ui = uimod.ui.load() |
|
71 | 70 | else: |
|
72 | 71 | ui = uimod.ui() |
@@ -873,7 +873,7 b' class fixupstate:' | |||
|
873 | 873 | # be slow. in absorb's case, no need to invalidate fsmonitorstate. |
|
874 | 874 | noop = lambda: 0 |
|
875 | 875 | restore = noop |
|
876 |
if |
|
|
876 | if hasattr(dirstate, '_fsmonitorstate'): | |
|
877 | 877 | bak = dirstate._fsmonitorstate.invalidate |
|
878 | 878 | |
|
879 | 879 | def restore(): |
@@ -766,13 +766,13 b' class cookietransportrequest:' | |||
|
766 | 766 | # inheritance with a new-style class. |
|
767 | 767 | class cookietransport(cookietransportrequest, xmlrpclib.Transport): |
|
768 | 768 | def __init__(self, use_datetime=0): |
|
769 |
if |
|
|
769 | if hasattr(xmlrpclib.Transport, "__init__"): | |
|
770 | 770 | xmlrpclib.Transport.__init__(self, use_datetime) |
|
771 | 771 | |
|
772 | 772 | |
|
773 | 773 | class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport): |
|
774 | 774 | def __init__(self, use_datetime=0): |
|
775 |
if |
|
|
775 | if hasattr(xmlrpclib.Transport, "__init__"): | |
|
776 | 776 | xmlrpclib.SafeTransport.__init__(self, use_datetime) |
|
777 | 777 | |
|
778 | 778 |
@@ -987,7 +987,7 b' def reposetup(ui, repo):' | |||
|
987 | 987 | @localrepo.unfilteredmethod |
|
988 | 988 | def clonebundles_lock(self, wait=True): |
|
989 | 989 | '''Lock the repository file related to clone bundles''' |
|
990 |
if not |
|
|
990 | if not hasattr(self, '_cb_lock_ref'): | |
|
991 | 991 | self._cb_lock_ref = None |
|
992 | 992 | l = self._currentlock(self._cb_lock_ref) |
|
993 | 993 | if l is not None: |
@@ -16,7 +16,6 b' from mercurial import (' | |||
|
16 | 16 | error, |
|
17 | 17 | extensions, |
|
18 | 18 | registrar, |
|
19 | util, | |
|
20 | 19 | ) |
|
21 | 20 | |
|
22 | 21 | cmdtable = {} |
@@ -52,7 +51,7 b' def extsetup(ui):' | |||
|
52 | 51 | |
|
53 | 52 | |
|
54 | 53 | def _commit(orig, ui, repo, *pats, **opts): |
|
55 |
if |
|
|
54 | if hasattr(repo, 'unfiltered'): | |
|
56 | 55 | repo = repo.unfiltered() |
|
57 | 56 | |
|
58 | 57 | class repoextra(repo.__class__): |
@@ -198,9 +198,9 b' def createlog(ui, directory=None, root=b' | |||
|
198 | 198 | oldlog = pickle.load(open(cachefile, b'rb')) |
|
199 | 199 | for e in oldlog: |
|
200 | 200 | if not ( |
|
201 |
|
|
|
202 |
and |
|
|
203 |
and |
|
|
201 | hasattr(e, b'branchpoints') | |
|
202 | and hasattr(e, b'commitid') | |
|
203 | and hasattr(e, b'mergepoint') | |
|
204 | 204 | ): |
|
205 | 205 | ui.status(_(b'ignoring old cache\n')) |
|
206 | 206 | oldlog = [] |
@@ -28,7 +28,6 b' Pool = svn.core.Pool' | |||
|
28 | 28 | SubversionException = svn.core.SubversionException |
|
29 | 29 | |
|
30 | 30 | from mercurial.pycompat import getattr |
|
31 | from mercurial import util | |
|
32 | 31 | |
|
33 | 32 | # Some older versions of the Python bindings need to be |
|
34 | 33 | # explicitly initialized. But what we want to do probably |
@@ -63,7 +62,7 b' def _create_auth_baton(pool):' | |||
|
63 | 62 | if p: |
|
64 | 63 | providers.append(p) |
|
65 | 64 | else: |
|
66 |
if |
|
|
65 | if hasattr(svn.client, 'get_windows_simple_provider'): | |
|
67 | 66 | providers.append(svn.client.get_windows_simple_provider(pool)) |
|
68 | 67 | |
|
69 | 68 | return svn.core.svn_auth_open(providers, pool) |
@@ -85,7 +84,7 b' class SvnRaTransport:' | |||
|
85 | 84 | self.password = b'' |
|
86 | 85 | |
|
87 | 86 | # Only Subversion 1.4 has reparent() |
|
88 |
if ra is None or not |
|
|
87 | if ra is None or not hasattr(svn.ra, 'reparent'): | |
|
89 | 88 | self.client = svn.client.create_context(self.pool) |
|
90 | 89 | ab = _create_auth_baton(self.pool) |
|
91 | 90 | self.client.auth_baton = ab |
@@ -19,7 +19,6 b' from mercurial import (' | |||
|
19 | 19 | pycompat, |
|
20 | 20 | registrar, |
|
21 | 21 | scmutil, |
|
22 | util, | |
|
23 | 22 | ) |
|
24 | 23 | |
|
25 | 24 | from . import ( |
@@ -218,7 +217,7 b' def fastannotate(ui, repo, *pats, **opts' | |||
|
218 | 217 | paths = list(_matchpaths(repo, rev, pats, opts, aopts)) |
|
219 | 218 | |
|
220 | 219 | # for client, prefetch from the server |
|
221 |
if |
|
|
220 | if hasattr(repo, 'prefetchfastannotate'): | |
|
222 | 221 | repo.prefetchfastannotate(paths) |
|
223 | 222 | |
|
224 | 223 | for path in paths: |
@@ -273,7 +272,7 b' def _annotatewrapper(orig, ui, repo, *pa' | |||
|
273 | 272 | |
|
274 | 273 | # check if we need to do prefetch (client-side) |
|
275 | 274 | rev = opts.get('rev') |
|
276 |
if |
|
|
275 | if hasattr(repo, 'prefetchfastannotate') and rev is not None: | |
|
277 | 276 | paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts))) |
|
278 | 277 | repo.prefetchfastannotate(paths) |
|
279 | 278 | |
@@ -320,7 +319,7 b' def debugbuildannotatecache(ui, repo, *p' | |||
|
320 | 319 | ctx = logcmdutil.revsingle(repo, rev) |
|
321 | 320 | m = scmutil.match(ctx, pats, opts) |
|
322 | 321 | paths = list(ctx.walk(m)) |
|
323 |
if |
|
|
322 | if hasattr(repo, 'prefetchfastannotate'): | |
|
324 | 323 | # client |
|
325 | 324 | if opts.get(b'REV'): |
|
326 | 325 | raise error.Abort(_(b'--rev cannot be used for client')) |
@@ -324,7 +324,7 b' class _annotatecontext:' | |||
|
324 | 324 | b'(resolved fctx: %s)\n' |
|
325 | 325 | % ( |
|
326 | 326 | self.path, |
|
327 |
stringutil.pprint( |
|
|
327 | stringutil.pprint(hasattr(revfctx, 'node')), | |
|
328 | 328 | ) |
|
329 | 329 | ) |
|
330 | 330 | return self.annotatedirectly(revfctx, showpath, showlines) |
@@ -332,7 +332,7 b' def overridewalk(orig, self, match, subr' | |||
|
332 | 332 | matchfn = match.matchfn |
|
333 | 333 | matchalways = match.always() |
|
334 | 334 | dmap = self._map |
|
335 |
if |
|
|
335 | if hasattr(dmap, b'_map'): | |
|
336 | 336 | # for better performance, directly access the inner dirstate map if the |
|
337 | 337 | # standard dirstate implementation is in use. |
|
338 | 338 | dmap = dmap._map |
@@ -744,7 +744,7 b' def makedirstate(repo, dirstate):' | |||
|
744 | 744 | def wrapdirstate(orig, self): |
|
745 | 745 | ds = orig(self) |
|
746 | 746 | # only override the dirstate when Watchman is available for the repo |
|
747 |
if |
|
|
747 | if hasattr(self, b'_fsmonitorstate'): | |
|
748 | 748 | makedirstate(self, ds) |
|
749 | 749 | return ds |
|
750 | 750 | |
@@ -811,7 +811,7 b' class state_update:' | |||
|
811 | 811 | self.oldnode = self.repo[b'.'].node() |
|
812 | 812 | |
|
813 | 813 | if self.repo.currentwlock() is None: |
|
814 |
if |
|
|
814 | if hasattr(self.repo, b'wlocknostateupdate'): | |
|
815 | 815 | self._lock = self.repo.wlocknostateupdate() |
|
816 | 816 | else: |
|
817 | 817 | self._lock = self.repo.wlock() |
@@ -839,7 +839,7 b' class state_update:' | |||
|
839 | 839 | self._lock.release() |
|
840 | 840 | |
|
841 | 841 | def _state(self, cmd, commithash, status=b'ok'): |
|
842 |
if not |
|
|
842 | if not hasattr(self.repo, b'_watchmanclient'): | |
|
843 | 843 | return False |
|
844 | 844 | try: |
|
845 | 845 | self.repo._watchmanclient.command( |
@@ -69,7 +69,7 b' class client:' | |||
|
69 | 69 | |
|
70 | 70 | def getcurrentclock(self): |
|
71 | 71 | result = self.command(b'clock') |
|
72 |
if not |
|
|
72 | if not hasattr(result, 'clock'): | |
|
73 | 73 | raise Unavailable( |
|
74 | 74 | b'clock result is missing clock value', invalidate=True |
|
75 | 75 | ) |
@@ -103,7 +103,7 b' def _setupdirstate(repo, dirstate):' | |||
|
103 | 103 | def wrapdirstate(orig, repo): |
|
104 | 104 | """Make journal storage available to the dirstate object""" |
|
105 | 105 | dirstate = orig(repo) |
|
106 |
if |
|
|
106 | if hasattr(repo, 'journal'): | |
|
107 | 107 | _setupdirstate(repo, dirstate) |
|
108 | 108 | return dirstate |
|
109 | 109 | |
@@ -112,7 +112,7 b' def recorddirstateparents(dirstate, old,' | |||
|
112 | 112 | """Records all dirstate parent changes in the journal.""" |
|
113 | 113 | old = list(old) |
|
114 | 114 | new = list(new) |
|
115 |
if |
|
|
115 | if hasattr(dirstate, 'journalstorage'): | |
|
116 | 116 | # only record two hashes if there was a merge |
|
117 | 117 | oldhashes = old[:1] if old[1] == dirstate._nodeconstants.nullid else old |
|
118 | 118 | newhashes = new[:1] if new[1] == dirstate._nodeconstants.nullid else new |
@@ -125,7 +125,7 b' def recorddirstateparents(dirstate, old,' | |||
|
125 | 125 | def recordbookmarks(orig, store, fp): |
|
126 | 126 | """Records all bookmark changes in the journal.""" |
|
127 | 127 | repo = store._repo |
|
128 |
if |
|
|
128 | if hasattr(repo, 'journal'): | |
|
129 | 129 | oldmarks = bookmarks.bmstore(repo) |
|
130 | 130 | all_marks = set(b for b, n in oldmarks.items()) |
|
131 | 131 | all_marks.update(b for b, n in store.items()) |
@@ -185,11 +185,7 b' def wrappostshare(orig, sourcerepo, dest' | |||
|
185 | 185 | |
|
186 | 186 | def unsharejournal(orig, ui, repo, repopath): |
|
187 | 187 | """Copy shared journal entries into this repo when unsharing""" |
|
188 | if ( | |
|
189 | repo.path == repopath | |
|
190 | and repo.shared() | |
|
191 | and util.safehasattr(repo, 'journal') | |
|
192 | ): | |
|
188 | if repo.path == repopath and repo.shared() and hasattr(repo, 'journal'): | |
|
193 | 189 | sharedrepo = hg.sharedreposource(repo) |
|
194 | 190 | sharedfeatures = _readsharedfeatures(repo) |
|
195 | 191 | if sharedrepo and sharedfeatures > {b'journal'}: |
@@ -814,7 +814,7 b' def getstatuswriter(ui, repo, forcibly=N' | |||
|
814 | 814 | Otherwise, this returns the function to always write out (or |
|
815 | 815 | ignore if ``not forcibly``) status. |
|
816 | 816 | """ |
|
817 |
if forcibly is None and |
|
|
817 | if forcibly is None and hasattr(repo, '_largefilesenabled'): | |
|
818 | 818 | return repo._lfstatuswriters[-1] |
|
819 | 819 | else: |
|
820 | 820 | if forcibly: |
@@ -1167,7 +1167,7 b' def hgclone(orig, ui, opts, *args, **kwa' | |||
|
1167 | 1167 | |
|
1168 | 1168 | @eh.wrapcommand(b'rebase', extension=b'rebase') |
|
1169 | 1169 | def overriderebasecmd(orig, ui, repo, **opts): |
|
1170 |
if not |
|
|
1170 | if not hasattr(repo, '_largefilesenabled'): | |
|
1171 | 1171 | return orig(ui, repo, **opts) |
|
1172 | 1172 | |
|
1173 | 1173 | resuming = opts.get('continue') |
@@ -1298,7 +1298,7 b' def overridearchive(' | |||
|
1298 | 1298 | # allow only hgsubrepos to set this, instead of the current scheme |
|
1299 | 1299 | # where the parent sets this for the child. |
|
1300 | 1300 | with ( |
|
1301 |
|
|
|
1301 | hasattr(sub, '_repo') | |
|
1302 | 1302 | and lfstatus(sub._repo) |
|
1303 | 1303 | or util.nullcontextmanager() |
|
1304 | 1304 | ): |
@@ -1309,7 +1309,7 b' def overridearchive(' | |||
|
1309 | 1309 | |
|
1310 | 1310 | @eh.wrapfunction(subrepo.hgsubrepo, 'archive') |
|
1311 | 1311 | def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True): |
|
1312 |
lfenabled = |
|
|
1312 | lfenabled = hasattr(repo._repo, '_largefilesenabled') | |
|
1313 | 1313 | if not lfenabled or not repo._repo.lfstatus: |
|
1314 | 1314 | return orig(repo, archiver, prefix, match, decode) |
|
1315 | 1315 | |
@@ -1364,7 +1364,7 b' def hgsubrepoarchive(orig, repo, archive' | |||
|
1364 | 1364 | # would allow only hgsubrepos to set this, instead of the current scheme |
|
1365 | 1365 | # where the parent sets this for the child. |
|
1366 | 1366 | with ( |
|
1367 |
|
|
|
1367 | hasattr(sub, '_repo') | |
|
1368 | 1368 | and lfstatus(sub._repo) |
|
1369 | 1369 | or util.nullcontextmanager() |
|
1370 | 1370 | ): |
@@ -57,7 +57,7 b' def openstore(repo=None, remote=None, pu' | |||
|
57 | 57 | |
|
58 | 58 | # The path could be a scheme so use Mercurial's normal functionality |
|
59 | 59 | # to resolve the scheme to a repository and use its path |
|
60 |
path = |
|
|
60 | path = hasattr(remote, 'url') and remote.url() or remote.path | |
|
61 | 61 | |
|
62 | 62 | match = _scheme_re.match(path) |
|
63 | 63 | if not match: # regular filesystem path |
@@ -271,7 +271,7 b' def _urlerrorreason(urlerror):' | |||
|
271 | 271 | if isinstance(urlerror.reason, Exception): |
|
272 | 272 | inst = urlerror.reason |
|
273 | 273 | |
|
274 |
if |
|
|
274 | if hasattr(inst, 'reason'): | |
|
275 | 275 | try: # usually it is in the form (errno, strerror) |
|
276 | 276 | reason = inst.reason.args[1] |
|
277 | 277 | except (AttributeError, IndexError): |
@@ -751,7 +751,7 b' def remote(repo, remote=None):' | |||
|
751 | 751 | if lfsurl is None: |
|
752 | 752 | if remote: |
|
753 | 753 | path = remote |
|
754 |
elif |
|
|
754 | elif hasattr(repo, '_subtoppath'): | |
|
755 | 755 | # The pull command sets this during the optional update phase, which |
|
756 | 756 | # tells exactly where the pull originated, whether 'paths.default' |
|
757 | 757 | # or explicit. |
@@ -16,7 +16,6 b' from mercurial.hgweb import common as hg' | |||
|
16 | 16 | from mercurial import ( |
|
17 | 17 | exthelper, |
|
18 | 18 | pycompat, |
|
19 | util, | |
|
20 | 19 | wireprotoserver, |
|
21 | 20 | ) |
|
22 | 21 | |
@@ -44,7 +43,7 b' def handlewsgirequest(orig, rctx, req, r' | |||
|
44 | 43 | if not rctx.repo.ui.configbool(b'experimental', b'lfs.serve'): |
|
45 | 44 | return False |
|
46 | 45 | |
|
47 |
if not |
|
|
46 | if not hasattr(rctx.repo.svfs, 'lfslocalblobstore'): | |
|
48 | 47 | return False |
|
49 | 48 | |
|
50 | 49 | if not req.dispatchpath: |
@@ -26,7 +26,6 b' from mercurial import (' | |||
|
26 | 26 | localrepo, |
|
27 | 27 | revlog, |
|
28 | 28 | scmutil, |
|
29 | util, | |
|
30 | 29 | vfs as vfsmod, |
|
31 | 30 | wireprotov1server, |
|
32 | 31 | ) |
@@ -72,7 +71,7 b' def allsupportedversions(orig, ui):' | |||
|
72 | 71 | def _capabilities(orig, repo, proto): |
|
73 | 72 | '''Wrap server command to announce lfs server capability''' |
|
74 | 73 | caps = orig(repo, proto) |
|
75 |
if |
|
|
74 | if hasattr(repo.svfs, 'lfslocalblobstore'): | |
|
76 | 75 | # Advertise a slightly different capability when lfs is *required*, so |
|
77 | 76 | # that the client knows it MUST load the extension. If lfs is not |
|
78 | 77 | # required on the server, there's no reason to autoload the extension |
@@ -335,14 +334,14 b' def vfsinit(orig, self, othervfs):' | |||
|
335 | 334 | # also copy lfs blobstores. note: this can run before reposetup, so lfs |
|
336 | 335 | # blobstore attributes are not always ready at this time. |
|
337 | 336 | for name in ['lfslocalblobstore', 'lfsremoteblobstore']: |
|
338 |
if |
|
|
337 | if hasattr(othervfs, name): | |
|
339 | 338 | setattr(self, name, getattr(othervfs, name)) |
|
340 | 339 | |
|
341 | 340 | |
|
342 | 341 | def _prefetchfiles(repo, revmatches): |
|
343 | 342 | """Ensure that required LFS blobs are present, fetching them as a group if |
|
344 | 343 | needed.""" |
|
345 |
if not |
|
|
344 | if not hasattr(repo.svfs, 'lfslocalblobstore'): | |
|
346 | 345 | return |
|
347 | 346 | |
|
348 | 347 | pointers = [] |
@@ -366,7 +365,7 b' def _prefetchfiles(repo, revmatches):' | |||
|
366 | 365 | |
|
367 | 366 | def _canskipupload(repo): |
|
368 | 367 | # Skip if this hasn't been passed to reposetup() |
|
369 |
if not |
|
|
368 | if not hasattr(repo.svfs, 'lfsremoteblobstore'): | |
|
370 | 369 | return True |
|
371 | 370 | |
|
372 | 371 | # if remotestore is a null store, upload is a no-op and can be skipped |
@@ -375,7 +374,7 b' def _canskipupload(repo):' | |||
|
375 | 374 | |
|
376 | 375 | def candownload(repo): |
|
377 | 376 | # Skip if this hasn't been passed to reposetup() |
|
378 |
if not |
|
|
377 | if not hasattr(repo.svfs, 'lfsremoteblobstore'): | |
|
379 | 378 | return False |
|
380 | 379 | |
|
381 | 380 | # if remotestore is a null store, downloads will lead to nothing |
@@ -524,7 +523,7 b' def upgradefinishdatamigration(orig, ui,' | |||
|
524 | 523 | orig(ui, srcrepo, dstrepo, requirements) |
|
525 | 524 | |
|
526 | 525 | # Skip if this hasn't been passed to reposetup() |
|
527 |
if |
|
|
526 | if hasattr(srcrepo.svfs, 'lfslocalblobstore') and hasattr( | |
|
528 | 527 | dstrepo.svfs, 'lfslocalblobstore' |
|
529 | 528 | ): |
|
530 | 529 | srclfsvfs = srcrepo.svfs.lfslocalblobstore.vfs |
@@ -4186,7 +4186,7 b' def reposetup(ui, repo):' | |||
|
4186 | 4186 | |
|
4187 | 4187 | |
|
4188 | 4188 | def mqimport(orig, ui, repo, *args, **kwargs): |
|
4189 |
if |
|
|
4189 | if hasattr(repo, 'abortifwdirpatched') and not kwargs.get( | |
|
4190 | 4190 | 'no_commit', False |
|
4191 | 4191 | ): |
|
4192 | 4192 | repo.abortifwdirpatched( |
@@ -259,7 +259,7 b' def _handlechangespec(op, inpart):' | |||
|
259 | 259 | # will currently always be there when using the core+narrowhg server, but |
|
260 | 260 | # other servers may include a changespec part even when not widening (e.g. |
|
261 | 261 | # because we're deepening a shallow repo). |
|
262 |
if |
|
|
262 | if hasattr(repo, 'setnewnarrowpats'): | |
|
263 | 263 | op.gettransaction() |
|
264 | 264 | repo.setnewnarrowpats() |
|
265 | 265 | |
@@ -333,9 +333,9 b' def setup():' | |||
|
333 | 333 | |
|
334 | 334 | def wrappedcghandler(op, inpart): |
|
335 | 335 | origcghandler(op, inpart) |
|
336 |
if |
|
|
336 | if hasattr(op, '_widen_bundle'): | |
|
337 | 337 | handlechangegroup_widen(op, inpart) |
|
338 |
if |
|
|
338 | if hasattr(op, '_bookmarksbackup'): | |
|
339 | 339 | localrepo.localrepository._bookmarks.set( |
|
340 | 340 | op.repo, op._bookmarksbackup |
|
341 | 341 | ) |
@@ -60,9 +60,7 b' def relink(ui, repo, origin=None, **opts' | |||
|
60 | 60 | command is running. (Both repositories will be locked against |
|
61 | 61 | writes.) |
|
62 | 62 | """ |
|
63 |
if not |
|
|
64 | util, 'samedevice' | |
|
65 | ): | |
|
63 | if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'): | |
|
66 | 64 | raise error.Abort(_(b'hardlinks are not supported on this system')) |
|
67 | 65 | |
|
68 | 66 | if origin is None and b'default-relink' in ui.paths: |
@@ -425,7 +425,7 b' def cloneshallow(orig, ui, repo, *args, ' | |||
|
425 | 425 | finally: |
|
426 | 426 | if opts.get('shallow'): |
|
427 | 427 | for r in repos: |
|
428 |
if |
|
|
428 | if hasattr(r, 'fileservice'): | |
|
429 | 429 | r.fileservice.close() |
|
430 | 430 | |
|
431 | 431 | |
@@ -904,7 +904,7 b' def gcclient(ui, cachepath):' | |||
|
904 | 904 | if not isenabled(repo): |
|
905 | 905 | continue |
|
906 | 906 | |
|
907 |
if not |
|
|
907 | if not hasattr(repo, 'name'): | |
|
908 | 908 | ui.warn( |
|
909 | 909 | _(b"repo %s is a misconfigured remotefilelog repo\n") % path |
|
910 | 910 | ) |
@@ -1034,7 +1034,7 b' def wcpprefetch(ui, repo, **kwargs):' | |||
|
1034 | 1034 | bgprefetchrevs = revdatelimit(ui, bgprefetchrevs) |
|
1035 | 1035 | |
|
1036 | 1036 | def anon(unused_success): |
|
1037 |
if |
|
|
1037 | if hasattr(repo, 'ranprefetch') and repo.ranprefetch: | |
|
1038 | 1038 | return |
|
1039 | 1039 | repo.ranprefetch = True |
|
1040 | 1040 | repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack) |
@@ -1080,9 +1080,9 b' def exchangepull(orig, repo, remote, *ar' | |||
|
1080 | 1080 | source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs |
|
1081 | 1081 | ) |
|
1082 | 1082 | |
|
1083 |
if |
|
|
1083 | if hasattr(remote, '_callstream'): | |
|
1084 | 1084 | remote._localrepo = repo |
|
1085 |
elif |
|
|
1085 | elif hasattr(remote, 'getbundle'): | |
|
1086 | 1086 | extensions.wrapfunction(remote, 'getbundle', localgetbundle) |
|
1087 | 1087 | |
|
1088 | 1088 | return orig(repo, remote, *args, **kwargs) |
@@ -415,7 +415,7 b' class baseunionstore:' | |||
|
415 | 415 | |
|
416 | 416 | def markforrefresh(self): |
|
417 | 417 | for store in self.stores: |
|
418 |
if |
|
|
418 | if hasattr(store, b'markforrefresh'): | |
|
419 | 419 | store.markforrefresh() |
|
420 | 420 | |
|
421 | 421 | @staticmethod |
@@ -9,7 +9,6 b'' | |||
|
9 | 9 | from mercurial import ( |
|
10 | 10 | hg, |
|
11 | 11 | sshpeer, |
|
12 | util, | |
|
13 | 12 | ) |
|
14 | 13 | |
|
15 | 14 | _sshv1peer = sshpeer.sshv1peer |
@@ -41,14 +40,14 b' class connectionpool:' | |||
|
41 | 40 | if conn is None: |
|
42 | 41 | |
|
43 | 42 | peer = hg.peer(self._repo.ui, {}, path) |
|
44 |
if |
|
|
43 | if hasattr(peer, '_cleanup'): | |
|
45 | 44 | |
|
46 | 45 | class mypeer(peer.__class__): |
|
47 | 46 | def _cleanup(self, warn=None): |
|
48 | 47 | # close pipee first so peer.cleanup reading it won't |
|
49 | 48 | # deadlock, if there are other processes with pipeo |
|
50 | 49 | # open (i.e. us). |
|
51 |
if |
|
|
50 | if hasattr(self, 'pipee'): | |
|
52 | 51 | self.pipee.close() |
|
53 | 52 | return super(mypeer, self)._cleanup() |
|
54 | 53 | |
@@ -83,5 +82,5 b' class connection:' | |||
|
83 | 82 | self.close() |
|
84 | 83 | |
|
85 | 84 | def close(self): |
|
86 |
if |
|
|
85 | if hasattr(self.peer, 'cleanup'): | |
|
87 | 86 | self.peer.cleanup() |
@@ -92,7 +92,7 b' def peersetup(ui, peer):' | |||
|
92 | 92 | not in self.capabilities() |
|
93 | 93 | ): |
|
94 | 94 | return |
|
95 |
if not |
|
|
95 | if not hasattr(self, '_localrepo'): | |
|
96 | 96 | return |
|
97 | 97 | if ( |
|
98 | 98 | constants.SHALLOWREPO_REQUIREMENT |
@@ -132,7 +132,7 b' def peersetup(ui, peer):' | |||
|
132 | 132 | |
|
133 | 133 | def _callstream(self, command, **opts): |
|
134 | 134 | supertype = super(remotefilepeer, self) |
|
135 |
if not |
|
|
135 | if not hasattr(supertype, '_sendrequest'): | |
|
136 | 136 | self._updatecallstreamopts(command, pycompat.byteskwargs(opts)) |
|
137 | 137 | return super(remotefilepeer, self)._callstream(command, **opts) |
|
138 | 138 | |
@@ -641,9 +641,7 b' class fileserverclient:' | |||
|
641 | 641 | self._lfsprefetch(fileids) |
|
642 | 642 | |
|
643 | 643 | def _lfsprefetch(self, fileids): |
|
644 |
if not _lfsmod or not |
|
|
645 | self.repo.svfs, b'lfslocalblobstore' | |
|
646 | ): | |
|
644 | if not _lfsmod or not hasattr(self.repo.svfs, b'lfslocalblobstore'): | |
|
647 | 645 | return |
|
648 | 646 | if not _lfsmod.wrapper.candownload(self.repo): |
|
649 | 647 | return |
@@ -228,7 +228,7 b' def onetimesetup(ui):' | |||
|
228 | 228 | # When generating file blobs, taking the real path is too slow on large |
|
229 | 229 | # repos, so force it to just return the linkrev directly. |
|
230 | 230 | repo = self._repo |
|
231 |
if |
|
|
231 | if hasattr(repo, 'forcelinkrev') and repo.forcelinkrev: | |
|
232 | 232 | return self._filelog.linkrev(self._filelog.rev(self._filenode)) |
|
233 | 233 | return orig(self, *args, **kwargs) |
|
234 | 234 |
@@ -49,7 +49,7 b' def backgroundrepack(repo, incremental=T' | |||
|
49 | 49 | |
|
50 | 50 | def fullrepack(repo, options=None): |
|
51 | 51 | """If ``packsonly`` is True, stores creating only loose objects are skipped.""" |
|
52 |
if |
|
|
52 | if hasattr(repo, 'shareddatastores'): | |
|
53 | 53 | datasource = contentstore.unioncontentstore(*repo.shareddatastores) |
|
54 | 54 | historysource = metadatastore.unionmetadatastore( |
|
55 | 55 | *repo.sharedhistorystores, allowincomplete=True |
@@ -67,7 +67,7 b' def fullrepack(repo, options=None):' | |||
|
67 | 67 | options=options, |
|
68 | 68 | ) |
|
69 | 69 | |
|
70 |
if |
|
|
70 | if hasattr(repo.manifestlog, 'datastore'): | |
|
71 | 71 | localdata, shareddata = _getmanifeststores(repo) |
|
72 | 72 | lpackpath, ldstores, lhstores = localdata |
|
73 | 73 | spackpath, sdstores, shstores = shareddata |
@@ -107,7 +107,7 b' def incrementalrepack(repo, options=None' | |||
|
107 | 107 | """This repacks the repo by looking at the distribution of pack files in the |
|
108 | 108 | repo and performing the most minimal repack to keep the repo in good shape. |
|
109 | 109 | """ |
|
110 |
if |
|
|
110 | if hasattr(repo, 'shareddatastores'): | |
|
111 | 111 | packpath = shallowutil.getcachepackpath( |
|
112 | 112 | repo, constants.FILEPACK_CATEGORY |
|
113 | 113 | ) |
@@ -120,7 +120,7 b' def incrementalrepack(repo, options=None' | |||
|
120 | 120 | options=options, |
|
121 | 121 | ) |
|
122 | 122 | |
|
123 |
if |
|
|
123 | if hasattr(repo.manifestlog, 'datastore'): | |
|
124 | 124 | localdata, shareddata = _getmanifeststores(repo) |
|
125 | 125 | lpackpath, ldstores, lhstores = localdata |
|
126 | 126 | spackpath, sdstores, shstores = shareddata |
@@ -895,7 +895,7 b' class repackentry:' | |||
|
895 | 895 | |
|
896 | 896 | |
|
897 | 897 | def repacklockvfs(repo): |
|
898 |
if |
|
|
898 | if hasattr(repo, 'name'): | |
|
899 | 899 | # Lock in the shared cache so repacks across multiple copies of the same |
|
900 | 900 | # repo are coordinated. |
|
901 | 901 | sharedcachepath = shallowutil.getcachepackpath( |
@@ -340,7 +340,7 b' def wraprepo(repo):' | |||
|
340 | 340 | repo.excludepattern = repo.ui.configlist( |
|
341 | 341 | b"remotefilelog", b"excludepattern", None |
|
342 | 342 | ) |
|
343 |
if not |
|
|
343 | if not hasattr(repo, 'connectionpool'): | |
|
344 | 344 | repo.connectionpool = connectionpool.connectionpool(repo) |
|
345 | 345 | |
|
346 | 346 | if repo.includepattern or repo.excludepattern: |
@@ -980,7 +980,7 b' class unbundle20(unpackermixin):' | |||
|
980 | 980 | |
|
981 | 981 | def close(self): |
|
982 | 982 | """close underlying file""" |
|
983 |
if |
|
|
983 | if hasattr(self._fp, 'close'): | |
|
984 | 984 | return self._fp.close() |
|
985 | 985 | |
|
986 | 986 | |
@@ -1068,7 +1068,7 b' class bundlepart:' | |||
|
1068 | 1068 | |
|
1069 | 1069 | The new part have the very same content but no partid assigned yet. |
|
1070 | 1070 | Parts with generated data cannot be copied.""" |
|
1071 |
assert not |
|
|
1071 | assert not hasattr(self.data, 'next') | |
|
1072 | 1072 | return self.__class__( |
|
1073 | 1073 | self.type, |
|
1074 | 1074 | self._mandatoryparams, |
@@ -1137,9 +1137,7 b' class bundlepart:' | |||
|
1137 | 1137 | msg.append(b')') |
|
1138 | 1138 | if not self.data: |
|
1139 | 1139 | msg.append(b' empty payload') |
|
1140 |
elif |
|
|
1141 | self.data, '__next__' | |
|
1142 | ): | |
|
1140 | elif hasattr(self.data, 'next') or hasattr(self.data, '__next__'): | |
|
1143 | 1141 | msg.append(b' streamed payload') |
|
1144 | 1142 | else: |
|
1145 | 1143 | msg.append(b' %i bytes payload' % len(self.data)) |
@@ -1233,9 +1231,7 b' class bundlepart:' | |||
|
1233 | 1231 | Exists to handle the different methods to provide data to a part.""" |
|
1234 | 1232 | # we only support fixed size data now. |
|
1235 | 1233 | # This will be improved in the future. |
|
1236 |
if |
|
|
1237 | self.data, '__next__' | |
|
1238 | ): | |
|
1234 | if hasattr(self.data, 'next') or hasattr(self.data, '__next__'): | |
|
1239 | 1235 | buff = util.chunkbuffer(self.data) |
|
1240 | 1236 | chunk = buff.read(preferedchunksize) |
|
1241 | 1237 | while chunk: |
@@ -1380,9 +1376,7 b' class unbundlepart(unpackermixin):' | |||
|
1380 | 1376 | |
|
1381 | 1377 | def __init__(self, ui, header, fp): |
|
1382 | 1378 | super(unbundlepart, self).__init__(fp) |
|
1383 |
self._seekable = |
|
|
1384 | fp, 'tell' | |
|
1385 | ) | |
|
1379 | self._seekable = hasattr(fp, 'seek') and hasattr(fp, 'tell') | |
|
1386 | 1380 | self.ui = ui |
|
1387 | 1381 | # unbundle state attr |
|
1388 | 1382 | self._headerdata = header |
@@ -245,7 +245,7 b' class bundlepeer(localrepo.localpeer):' | |||
|
245 | 245 | class bundlephasecache(phases.phasecache): |
|
246 | 246 | def __init__(self, *args, **kwargs): |
|
247 | 247 | super(bundlephasecache, self).__init__(*args, **kwargs) |
|
248 |
if |
|
|
248 | if hasattr(self, 'opener'): | |
|
249 | 249 | self.opener = vfsmod.readonlyvfs(self.opener) |
|
250 | 250 | |
|
251 | 251 | def write(self): |
@@ -1043,7 +1043,7 b' def _resolvenarrowrevisioninfo(' | |||
|
1043 | 1043 | return i |
|
1044 | 1044 | # We failed to resolve a parent for this node, so |
|
1045 | 1045 | # we crash the changegroup construction. |
|
1046 |
if |
|
|
1046 | if hasattr(store, 'target'): | |
|
1047 | 1047 | target = store.display_id |
|
1048 | 1048 | else: |
|
1049 | 1049 | # some revlog not actually a revlog |
@@ -236,7 +236,7 b' def _newchgui(srcui, csystem, attachio):' | |||
|
236 | 236 | # will behave differently (i.e. write to stdout). |
|
237 | 237 | if ( |
|
238 | 238 | out is not self.fout |
|
239 |
or not |
|
|
239 | or not hasattr(self.fout, 'fileno') | |
|
240 | 240 | or self.fout.fileno() != procutil.stdout.fileno() |
|
241 | 241 | or self._finoutredirected |
|
242 | 242 | ): |
@@ -262,7 +262,7 b' def _loadnewui(srcui, args, cdebug):' | |||
|
262 | 262 | newui = srcui.__class__.load() |
|
263 | 263 | for a in ['fin', 'fout', 'ferr', 'environ']: |
|
264 | 264 | setattr(newui, a, getattr(srcui, a)) |
|
265 |
if |
|
|
265 | if hasattr(srcui, '_csystem'): | |
|
266 | 266 | newui._csystem = srcui._csystem |
|
267 | 267 | |
|
268 | 268 | # command line args |
@@ -603,7 +603,7 b' class chgcmdserver(commandserver.server)' | |||
|
603 | 603 | } |
|
604 | 604 | ) |
|
605 | 605 | |
|
606 |
if |
|
|
606 | if hasattr(procutil, 'setprocname'): | |
|
607 | 607 | |
|
608 | 608 | def setprocname(self): |
|
609 | 609 | """Change process title""" |
@@ -1449,7 +1449,7 b' def openstorage(repo, cmd, file_, opts, ' | |||
|
1449 | 1449 | if returnrevlog: |
|
1450 | 1450 | if isinstance(r, revlog.revlog): |
|
1451 | 1451 | pass |
|
1452 |
elif |
|
|
1452 | elif hasattr(r, '_revlog'): | |
|
1453 | 1453 | r = r._revlog # pytype: disable=attribute-error |
|
1454 | 1454 | elif r is not None: |
|
1455 | 1455 | raise error.InputError( |
@@ -332,7 +332,7 b' class server:' | |||
|
332 | 332 | # any kind of interaction must use server channels, but chg may |
|
333 | 333 | # replace channels by fully functional tty files. so nontty is |
|
334 | 334 | # enforced only if cin is a channel. |
|
335 |
if not |
|
|
335 | if not hasattr(self.cin, 'fileno'): | |
|
336 | 336 | ui.setconfig(b'ui', b'nontty', b'true', b'commandserver') |
|
337 | 337 | |
|
338 | 338 | req = dispatch.request( |
@@ -384,7 +384,7 b' class server:' | |||
|
384 | 384 | if self.cmsg: |
|
385 | 385 | hellomsg += b'message-encoding: %s\n' % self.cmsg.encoding |
|
386 | 386 | hellomsg += b'pid: %d' % procutil.getpid() |
|
387 |
if |
|
|
387 | if hasattr(os, 'getpgid'): | |
|
388 | 388 | hellomsg += b'\n' |
|
389 | 389 | hellomsg += b'pgid: %d' % os.getpgid(0) |
|
390 | 390 | |
@@ -559,7 +559,7 b' class unixforkingservice:' | |||
|
559 | 559 | self.ui = ui |
|
560 | 560 | self.repo = repo |
|
561 | 561 | self.address = opts[b'address'] |
|
562 |
if not |
|
|
562 | if not hasattr(socket, 'AF_UNIX'): | |
|
563 | 563 | raise error.Abort(_(b'unsupported platform')) |
|
564 | 564 | if not self.address: |
|
565 | 565 | raise error.Abort(_(b'no socket path specified with --address')) |
@@ -588,7 +588,7 b' class unixforkingservice:' | |||
|
588 | 588 | o = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM) |
|
589 | 589 | self._mainipc, self._workeripc = o |
|
590 | 590 | self._servicehandler.bindsocket(self._sock, self.address) |
|
591 |
if |
|
|
591 | if hasattr(procutil, 'unblocksignal'): | |
|
592 | 592 | procutil.unblocksignal(signal.SIGCHLD) |
|
593 | 593 | o = signal.signal(signal.SIGCHLD, self._sigchldhandler) |
|
594 | 594 | self._oldsigchldhandler = o |
@@ -573,7 +573,7 b' def chunkselector(ui, headerlist, operat' | |||
|
573 | 573 | ui.write(_(b'starting interactive selection\n')) |
|
574 | 574 | chunkselector = curseschunkselector(headerlist, ui, operation) |
|
575 | 575 | origsigtstp = sentinel = object() |
|
576 |
if |
|
|
576 | if hasattr(signal, 'SIGTSTP'): | |
|
577 | 577 | origsigtstp = signal.getsignal(signal.SIGTSTP) |
|
578 | 578 | try: |
|
579 | 579 | with util.with_lc_ctype(): |
@@ -1944,7 +1944,7 b' are you sure you want to review/edit and' | |||
|
1944 | 1944 | """ |
|
1945 | 1945 | |
|
1946 | 1946 | origsigwinch = sentinel = object() |
|
1947 |
if |
|
|
1947 | if hasattr(signal, 'SIGWINCH'): | |
|
1948 | 1948 | origsigwinch = signal.signal(signal.SIGWINCH, self.sigwinchhandler) |
|
1949 | 1949 | try: |
|
1950 | 1950 | return self._main(stdscr) |
@@ -1282,7 +1282,7 b' def debugdiscovery(ui, repo, remoteurl=b' | |||
|
1282 | 1282 | if opts.get(b'old'): |
|
1283 | 1283 | |
|
1284 | 1284 | def doit(pushedrevs, remoteheads, remote=remote): |
|
1285 |
if not |
|
|
1285 | if not hasattr(remote, 'branches'): | |
|
1286 | 1286 | # enable in-client legacy support |
|
1287 | 1287 | remote = localrepo.locallegacypeer(remote.local()) |
|
1288 | 1288 | if remote_revs: |
@@ -1482,7 +1482,7 b' def debugextensions(ui, repo, **opts):' | |||
|
1482 | 1482 | isinternal = extensions.ismoduleinternal(extmod) |
|
1483 | 1483 | extsource = None |
|
1484 | 1484 | |
|
1485 |
if |
|
|
1485 | if hasattr(extmod, '__file__'): | |
|
1486 | 1486 | extsource = pycompat.fsencode(extmod.__file__) |
|
1487 | 1487 | elif getattr(sys, 'oxidized', False): |
|
1488 | 1488 | extsource = pycompat.sysexecutable |
@@ -1722,7 +1722,7 b' def debugformat(ui, repo, **opts):' | |||
|
1722 | 1722 | if fm.isplain(): |
|
1723 | 1723 | |
|
1724 | 1724 | def formatvalue(value): |
|
1725 |
if |
|
|
1725 | if hasattr(value, 'startswith'): | |
|
1726 | 1726 | return value |
|
1727 | 1727 | if value: |
|
1728 | 1728 | return b'yes' |
@@ -1947,7 +1947,7 b' def debugindexstats(ui, repo):' | |||
|
1947 | 1947 | """show stats related to the changelog index""" |
|
1948 | 1948 | repo.changelog.shortest(repo.nullid, 1) |
|
1949 | 1949 | index = repo.changelog.index |
|
1950 |
if not |
|
|
1950 | if not hasattr(index, 'stats'): | |
|
1951 | 1951 | raise error.Abort(_(b'debugindexstats only works with native code')) |
|
1952 | 1952 | for k, v in sorted(index.stats().items()): |
|
1953 | 1953 | ui.write(b'%s: %d\n' % (k, v)) |
@@ -1983,7 +1983,7 b' def debuginstall(ui, **opts):' | |||
|
1983 | 1983 | |
|
1984 | 1984 | # Python |
|
1985 | 1985 | pythonlib = None |
|
1986 |
if |
|
|
1986 | if hasattr(os, '__file__'): | |
|
1987 | 1987 | pythonlib = os.path.dirname(pycompat.fsencode(os.__file__)) |
|
1988 | 1988 | elif getattr(sys, 'oxidized', False): |
|
1989 | 1989 | pythonlib = pycompat.sysexecutable |
@@ -2065,7 +2065,7 b' def debuginstall(ui, **opts):' | |||
|
2065 | 2065 | |
|
2066 | 2066 | # compiled modules |
|
2067 | 2067 | hgmodules = None |
|
2068 |
if |
|
|
2068 | if hasattr(sys.modules[__name__], '__file__'): | |
|
2069 | 2069 | hgmodules = os.path.dirname(pycompat.fsencode(__file__)) |
|
2070 | 2070 | elif getattr(sys, 'oxidized', False): |
|
2071 | 2071 | hgmodules = pycompat.sysexecutable |
@@ -2649,7 +2649,7 b' def debugnodemap(ui, repo, file_=None, *' | |||
|
2649 | 2649 | if isinstance(r, (manifest.manifestrevlog, filelog.filelog)): |
|
2650 | 2650 | r = r._revlog |
|
2651 | 2651 | if opts['dump_new']: |
|
2652 |
if |
|
|
2652 | if hasattr(r.index, "nodemap_data_all"): | |
|
2653 | 2653 | data = r.index.nodemap_data_all() |
|
2654 | 2654 | else: |
|
2655 | 2655 | data = nodemap.persistent_data(r.index) |
@@ -377,7 +377,7 b' class dirstatemap(_dirstatemapcommon):' | |||
|
377 | 377 | return |
|
378 | 378 | |
|
379 | 379 | # TODO: adjust this estimate for dirstate-v2 |
|
380 |
if |
|
|
380 | if hasattr(parsers, 'dict_new_presized'): | |
|
381 | 381 | # Make an estimate of the number of files in the dirstate based on |
|
382 | 382 | # its size. This trades wasting some memory for avoiding costly |
|
383 | 383 | # resizes. Each entry have a prefix of 17 bytes followed by one or |
@@ -107,7 +107,7 b' class request:' | |||
|
107 | 107 | def _flushstdio(ui, err): |
|
108 | 108 | status = None |
|
109 | 109 | # In all cases we try to flush stdio streams. |
|
110 |
if |
|
|
110 | if hasattr(ui, 'fout'): | |
|
111 | 111 | assert ui is not None # help pytype |
|
112 | 112 | assert ui.fout is not None # help pytype |
|
113 | 113 | try: |
@@ -116,7 +116,7 b' def _flushstdio(ui, err):' | |||
|
116 | 116 | err = e |
|
117 | 117 | status = -1 |
|
118 | 118 | |
|
119 |
if |
|
|
119 | if hasattr(ui, 'ferr'): | |
|
120 | 120 | assert ui is not None # help pytype |
|
121 | 121 | assert ui.ferr is not None # help pytype |
|
122 | 122 | try: |
@@ -170,7 +170,7 b' def initstdio():' | |||
|
170 | 170 | "newline": "\n", |
|
171 | 171 | "line_buffering": sys.stdout.line_buffering, |
|
172 | 172 | } |
|
173 |
if |
|
|
173 | if hasattr(sys.stdout, "write_through"): | |
|
174 | 174 | # pytype: disable=attribute-error |
|
175 | 175 | kwargs["write_through"] = sys.stdout.write_through |
|
176 | 176 | # pytype: enable=attribute-error |
@@ -183,7 +183,7 b' def initstdio():' | |||
|
183 | 183 | "newline": "\n", |
|
184 | 184 | "line_buffering": sys.stderr.line_buffering, |
|
185 | 185 | } |
|
186 |
if |
|
|
186 | if hasattr(sys.stderr, "write_through"): | |
|
187 | 187 | # pytype: disable=attribute-error |
|
188 | 188 | kwargs["write_through"] = sys.stderr.write_through |
|
189 | 189 | # pytype: enable=attribute-error |
@@ -520,7 +520,7 b' def _callcatch(ui, func):' | |||
|
520 | 520 | def aliasargs(fn, givenargs): |
|
521 | 521 | args = [] |
|
522 | 522 | # only care about alias 'args', ignore 'args' set by extensions.wrapfunction |
|
523 |
if not |
|
|
523 | if not hasattr(fn, '_origfunc'): | |
|
524 | 524 | args = getattr(fn, 'args', args) |
|
525 | 525 | if args: |
|
526 | 526 | cmd = b' '.join(map(procutil.shellquote, args)) |
@@ -708,7 +708,7 b' class cmdalias:' | |||
|
708 | 708 | } |
|
709 | 709 | if name not in adefaults: |
|
710 | 710 | raise AttributeError(name) |
|
711 |
if self.badalias or |
|
|
711 | if self.badalias or hasattr(self, 'shell'): | |
|
712 | 712 | return adefaults[name] |
|
713 | 713 | return getattr(self.fn, name) |
|
714 | 714 | |
@@ -734,7 +734,7 b' class cmdalias:' | |||
|
734 | 734 | self.name, |
|
735 | 735 | self.definition, |
|
736 | 736 | ) |
|
737 |
if |
|
|
737 | if hasattr(self, 'shell'): | |
|
738 | 738 | return self.fn(ui, *args, **opts) |
|
739 | 739 | else: |
|
740 | 740 | try: |
@@ -1024,7 +1024,7 b' def _checkshellalias(lui, ui, args):' | |||
|
1024 | 1024 | cmd = aliases[0] |
|
1025 | 1025 | fn = entry[0] |
|
1026 | 1026 | |
|
1027 |
if cmd and |
|
|
1027 | if cmd and hasattr(fn, 'shell'): | |
|
1028 | 1028 | # shell alias shouldn't receive early options which are consumed by hg |
|
1029 | 1029 | _earlyopts, args = _earlysplitopts(args) |
|
1030 | 1030 | d = lambda: fn(ui, *args[1:]) |
@@ -172,7 +172,7 b' def _validatecmdtable(ui, cmdtable):' | |||
|
172 | 172 | """Check if extension commands have required attributes""" |
|
173 | 173 | for c, e in cmdtable.items(): |
|
174 | 174 | f = e[0] |
|
175 |
missing = [a for a in _cmdfuncattrs if not |
|
|
175 | missing = [a for a in _cmdfuncattrs if not hasattr(f, a)] | |
|
176 | 176 | if not missing: |
|
177 | 177 | continue |
|
178 | 178 | msg = b'missing attributes: %s' |
@@ -742,7 +742,7 b' def _disabledpaths():' | |||
|
742 | 742 | |
|
743 | 743 | # The hgext might not have a __file__ attribute (e.g. in PyOxidizer) and |
|
744 | 744 | # it might not be on a filesystem even if it does. |
|
745 |
if |
|
|
745 | if hasattr(hgext, '__file__'): | |
|
746 | 746 | extpath = os.path.dirname( |
|
747 | 747 | util.abspath(pycompat.fsencode(hgext.__file__)) |
|
748 | 748 | ) |
@@ -857,7 +857,7 b' def disabled_help(name):' | |||
|
857 | 857 | |
|
858 | 858 | # The extensions are filesystem based, so either an error occurred |
|
859 | 859 | # or all are enabled. |
|
860 |
if |
|
|
860 | if hasattr(hgext, '__file__'): | |
|
861 | 861 | return |
|
862 | 862 | |
|
863 | 863 | if name in _order: # enabled |
@@ -987,13 +987,13 b' def notloaded():' | |||
|
987 | 987 | |
|
988 | 988 | def moduleversion(module): |
|
989 | 989 | '''return version information from given module as a string''' |
|
990 |
if |
|
|
990 | if hasattr(module, 'getversion') and callable(module.getversion): | |
|
991 | 991 | try: |
|
992 | 992 | version = module.getversion() |
|
993 | 993 | except Exception: |
|
994 | 994 | version = b'unknown' |
|
995 | 995 | |
|
996 |
elif |
|
|
996 | elif hasattr(module, '__version__'): | |
|
997 | 997 | version = module.__version__ |
|
998 | 998 | else: |
|
999 | 999 | version = b'' |
@@ -43,7 +43,6 b' from . import (' | |||
|
43 | 43 | templatefuncs, |
|
44 | 44 | templatekw, |
|
45 | 45 | ui as uimod, |
|
46 | util, | |
|
47 | 46 | ) |
|
48 | 47 | from .hgweb import webcommands |
|
49 | 48 | from .utils import ( |
@@ -810,7 +809,7 b' def help_(' | |||
|
810 | 809 | doc = gettext(pycompat.getdoc(entry[0])) |
|
811 | 810 | if not doc: |
|
812 | 811 | doc = _(b"(no help text available)") |
|
813 |
if |
|
|
812 | if hasattr(entry[0], 'definition'): # aliased command | |
|
814 | 813 | source = entry[0].source |
|
815 | 814 | if entry[0].definition.startswith(b'!'): # shell alias |
|
816 | 815 | doc = _(b'shell alias for: %s\n\n%s\n\ndefined by: %s\n') % ( |
@@ -66,7 +66,7 b" sharedbookmarks = b'bookmarks'" | |||
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | def addbranchrevs(lrepo, other, branches, revs, remotehidden=False): |
|
69 |
if |
|
|
69 | if hasattr(other, 'peer'): | |
|
70 | 70 | # a courtesy to callers using a localrepo for other |
|
71 | 71 | peer = other.peer(remotehidden=remotehidden) |
|
72 | 72 | else: |
@@ -174,7 +174,7 b' def islocal(repo):' | |||
|
174 | 174 | cls.instance # make sure we load the module |
|
175 | 175 | else: |
|
176 | 176 | cls = LocalFactory |
|
177 |
if |
|
|
177 | if hasattr(cls, 'islocal'): | |
|
178 | 178 | return cls.islocal(repo) # pytype: disable=module-attr |
|
179 | 179 | return False |
|
180 | 180 | repo.ui.deprecwarn(b"use obj.local() instead of islocal(obj)", b"6.4") |
@@ -254,7 +254,7 b' def peer(' | |||
|
254 | 254 | '''return a repository peer for the specified path''' |
|
255 | 255 | ui = getattr(uiorrepo, 'ui', uiorrepo) |
|
256 | 256 | rui = remoteui(uiorrepo, opts) |
|
257 |
if |
|
|
257 | if hasattr(path, 'url'): | |
|
258 | 258 | # this is already a urlutil.path object |
|
259 | 259 | peer_path = path |
|
260 | 260 | else: |
@@ -317,7 +317,7 b' def sharedreposource(repo):' | |||
|
317 | 317 | if repo.sharedpath == repo.path: |
|
318 | 318 | return None |
|
319 | 319 | |
|
320 |
if |
|
|
320 | if hasattr(repo, 'srcrepo') and repo.srcrepo: | |
|
321 | 321 | return repo.srcrepo |
|
322 | 322 | |
|
323 | 323 | # the sharedpath always ends in the .hg; we want the path to the repo |
@@ -340,7 +340,7 b' def share(' | |||
|
340 | 340 | '''create a shared repository''' |
|
341 | 341 | |
|
342 | 342 | not_local_msg = _(b'can only share local repositories') |
|
343 |
if |
|
|
343 | if hasattr(source, 'local'): | |
|
344 | 344 | if source.local() is None: |
|
345 | 345 | raise error.Abort(not_local_msg) |
|
346 | 346 | elif not islocal(source): |
@@ -729,7 +729,7 b' def clone(' | |||
|
729 | 729 | branches = (src_path.branch, branch or []) |
|
730 | 730 | source = src_path.loc |
|
731 | 731 | else: |
|
732 |
if |
|
|
732 | if hasattr(source, 'peer'): | |
|
733 | 733 | srcpeer = source.peer() # in case we were called with a localrepo |
|
734 | 734 | else: |
|
735 | 735 | srcpeer = source |
@@ -1567,7 +1567,7 b' def verify(repo, level=None):' | |||
|
1567 | 1567 | |
|
1568 | 1568 | def remoteui(src, opts): |
|
1569 | 1569 | """build a remote ui from ui or repo and opts""" |
|
1570 |
if |
|
|
1570 | if hasattr(src, 'baseui'): # looks like a repository | |
|
1571 | 1571 | dst = src.baseui.copy() # drop repo-specific config |
|
1572 | 1572 | src = src.ui # copy target options from repo |
|
1573 | 1573 | else: # assume it's a global ui object |
@@ -34,7 +34,6 b' from .. import (' | |||
|
34 | 34 | templater, |
|
35 | 35 | templateutil, |
|
36 | 36 | ui as uimod, |
|
37 | util, | |
|
38 | 37 | wireprotoserver, |
|
39 | 38 | ) |
|
40 | 39 | |
@@ -403,7 +402,7 b' class hgweb:' | |||
|
403 | 402 | cmd = cmd[style + 1 :] |
|
404 | 403 | |
|
405 | 404 | # avoid accepting e.g. style parameter as command |
|
406 |
if |
|
|
405 | if hasattr(webcommands, pycompat.sysstr(cmd)): | |
|
407 | 406 | req.qsparams[b'cmd'] = cmd |
|
408 | 407 | |
|
409 | 408 | if cmd == b'static': |
@@ -478,7 +477,7 b' class hgweb:' | |||
|
478 | 477 | |
|
479 | 478 | except (error.LookupError, error.RepoLookupError) as err: |
|
480 | 479 | msg = pycompat.bytestr(err) |
|
481 |
if |
|
|
480 | if hasattr(err, 'name') and not isinstance( | |
|
482 | 481 | err, error.ManifestLookupError |
|
483 | 482 | ): |
|
484 | 483 | msg = b'revision not found: %s' % err.name |
@@ -100,7 +100,7 b' class _httprequesthandler(httpservermod.' | |||
|
100 | 100 | |
|
101 | 101 | def log_request(self, code='-', size='-'): |
|
102 | 102 | xheaders = [] |
|
103 |
if |
|
|
103 | if hasattr(self, 'headers'): | |
|
104 | 104 | xheaders = [ |
|
105 | 105 | h for h in self.headers.items() if h[0].startswith('x-') |
|
106 | 106 | ] |
@@ -214,7 +214,7 b' class _httprequesthandler(httpservermod.' | |||
|
214 | 214 | env['wsgi.multithread'] = isinstance( |
|
215 | 215 | self.server, socketserver.ThreadingMixIn |
|
216 | 216 | ) |
|
217 |
if |
|
|
217 | if hasattr(socketserver, 'ForkingMixIn'): | |
|
218 | 218 | env['wsgi.multiprocess'] = isinstance( |
|
219 | 219 | self.server, socketserver.ForkingMixIn |
|
220 | 220 | ) |
@@ -344,7 +344,7 b' try:' | |||
|
344 | 344 | threading.active_count() # silence pyflakes and bypass demandimport |
|
345 | 345 | _mixin = socketserver.ThreadingMixIn |
|
346 | 346 | except ImportError: |
|
347 |
if |
|
|
347 | if hasattr(os, "fork"): | |
|
348 | 348 | _mixin = socketserver.ForkingMixIn |
|
349 | 349 | else: |
|
350 | 350 |
@@ -211,7 +211,7 b' def _ctxsgen(context, ctxs):' | |||
|
211 | 211 | b'description': s.description(), |
|
212 | 212 | b'branch': s.branch(), |
|
213 | 213 | } |
|
214 |
if |
|
|
214 | if hasattr(s, 'path'): | |
|
215 | 215 | d[b'file'] = s.path() |
|
216 | 216 | yield d |
|
217 | 217 |
@@ -65,7 +65,7 b' def encodevalueinheaders(value, header, ' | |||
|
65 | 65 | class _multifile: |
|
66 | 66 | def __init__(self, *fileobjs): |
|
67 | 67 | for f in fileobjs: |
|
68 |
if not |
|
|
68 | if not hasattr(f, 'length'): | |
|
69 | 69 | raise ValueError( |
|
70 | 70 | b'_multifile only supports file objects that ' |
|
71 | 71 | b'have a length but this one does not:', |
@@ -180,7 +180,7 b' def makev1commandrequest(' | |||
|
180 | 180 | qs = b'?%s' % urlreq.urlencode(q) |
|
181 | 181 | cu = b"%s%s" % (repobaseurl, qs) |
|
182 | 182 | size = 0 |
|
183 |
if |
|
|
183 | if hasattr(data, 'length'): | |
|
184 | 184 | size = data.length |
|
185 | 185 | elif data is not None: |
|
186 | 186 | size = len(data) |
@@ -420,7 +420,7 b' class localpeer(repository.peer):' | |||
|
420 | 420 | try: |
|
421 | 421 | bundle = exchange.readbundle(self.ui, bundle, None) |
|
422 | 422 | ret = exchange.unbundle(self._repo, bundle, heads, b'push', url) |
|
423 |
if |
|
|
423 | if hasattr(ret, 'getchunks'): | |
|
424 | 424 | # This is a bundle20 object, turn it into an unbundler. |
|
425 | 425 | # This little dance should be dropped eventually when the |
|
426 | 426 | # API is finally improved. |
@@ -1461,7 +1461,7 b' class localrepository:' | |||
|
1461 | 1461 | if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool( |
|
1462 | 1462 | b'devel', b'check-locks' |
|
1463 | 1463 | ): |
|
1464 |
if |
|
|
1464 | if hasattr(self.svfs, 'vfs'): # this is filtervfs | |
|
1465 | 1465 | self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit) |
|
1466 | 1466 | else: # standard vfs |
|
1467 | 1467 | self.svfs.audit = self._getsvfsward(self.svfs.audit) |
@@ -1523,8 +1523,8 b' class localrepository:' | |||
|
1523 | 1523 | repo = rref() |
|
1524 | 1524 | if ( |
|
1525 | 1525 | repo is None |
|
1526 |
or not |
|
|
1527 |
or not |
|
|
1526 | or not hasattr(repo, '_wlockref') | |
|
1527 | or not hasattr(repo, '_lockref') | |
|
1528 | 1528 | ): |
|
1529 | 1529 | return |
|
1530 | 1530 | if mode in (None, b'r', b'rb'): |
@@ -1572,7 +1572,7 b' class localrepository:' | |||
|
1572 | 1572 | def checksvfs(path, mode=None): |
|
1573 | 1573 | ret = origfunc(path, mode=mode) |
|
1574 | 1574 | repo = rref() |
|
1575 |
if repo is None or not |
|
|
1575 | if repo is None or not hasattr(repo, '_lockref'): | |
|
1576 | 1576 | return |
|
1577 | 1577 | if mode in (None, b'r', b'rb'): |
|
1578 | 1578 | return |
@@ -1628,7 +1628,7 b' class manifestrevlog:' | |||
|
1628 | 1628 | |
|
1629 | 1629 | def _setupmanifestcachehooks(self, repo): |
|
1630 | 1630 | """Persist the manifestfulltextcache on lock release""" |
|
1631 |
if not |
|
|
1631 | if not hasattr(repo, '_wlockref'): | |
|
1632 | 1632 | return |
|
1633 | 1633 | |
|
1634 | 1634 | self._fulltextcache._opener = repo.wcachevfs |
@@ -211,11 +211,7 b' def blocksinrange(blocks, rangeb):' | |||
|
211 | 211 | |
|
212 | 212 | |
|
213 | 213 | def chooseblocksfunc(opts=None): |
|
214 | if ( | |
|
215 | opts is None | |
|
216 | or not opts.xdiff | |
|
217 | or not util.safehasattr(bdiff, 'xdiffblocks') | |
|
218 | ): | |
|
214 | if opts is None or not opts.xdiff or not hasattr(bdiff, 'xdiffblocks'): | |
|
219 | 215 | return bdiff.blocks |
|
220 | 216 | else: |
|
221 | 217 | return bdiff.xdiffblocks |
@@ -168,7 +168,7 b' def split(stream):' | |||
|
168 | 168 | |
|
169 | 169 | mimeheaders = [b'content-type'] |
|
170 | 170 | |
|
171 |
if not |
|
|
171 | if not hasattr(stream, 'next'): | |
|
172 | 172 | # http responses, for example, have readline but not next |
|
173 | 173 | stream = fiter(stream) |
|
174 | 174 | |
@@ -1703,7 +1703,7 b' def reversehunks(hunks):' | |||
|
1703 | 1703 | |
|
1704 | 1704 | newhunks = [] |
|
1705 | 1705 | for c in hunks: |
|
1706 |
if |
|
|
1706 | if hasattr(c, 'reversehunk'): | |
|
1707 | 1707 | c = c.reversehunk() |
|
1708 | 1708 | newhunks.append(c) |
|
1709 | 1709 | return newhunks |
@@ -377,7 +377,7 b' class dirs:' | |||
|
377 | 377 | return d in self._dirs |
|
378 | 378 | |
|
379 | 379 | |
|
380 |
if |
|
|
380 | if hasattr(parsers, 'dirs'): | |
|
381 | 381 | dirs = parsers.dirs |
|
382 | 382 | |
|
383 | 383 | if rustdirs is not None: |
@@ -159,7 +159,7 b' def _flipbit(v, node):' | |||
|
159 | 159 | def ctxpvec(ctx): |
|
160 | 160 | '''construct a pvec for ctx while filling in the cache''' |
|
161 | 161 | r = ctx.repo() |
|
162 |
if not |
|
|
162 | if not hasattr(r, "_pveccache"): | |
|
163 | 163 | r._pveccache = {} |
|
164 | 164 | pvc = r._pveccache |
|
165 | 165 | if ctx.rev() not in pvc: |
@@ -10,7 +10,6 b' from . import (' | |||
|
10 | 10 | configitems, |
|
11 | 11 | error, |
|
12 | 12 | pycompat, |
|
13 | util, | |
|
14 | 13 | ) |
|
15 | 14 | |
|
16 | 15 | # unlike the other registered items, config options are neither functions or |
@@ -64,7 +63,7 b' class _funcregistrarbase:' | |||
|
64 | 63 | msg = b'duplicate registration for name: "%s"' % name |
|
65 | 64 | raise error.ProgrammingError(msg) |
|
66 | 65 | |
|
67 |
if func.__doc__ and not |
|
|
66 | if func.__doc__ and not hasattr(func, '_origdoc'): | |
|
68 | 67 | func._origdoc = func.__doc__.strip() |
|
69 | 68 | doc = pycompat.sysbytes(func._origdoc) |
|
70 | 69 | func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc)) |
@@ -296,13 +296,12 b' class filteredchangelogmixin:' | |||
|
296 | 296 | This returns a version of 'revs' to be used thereafter by the caller. |
|
297 | 297 | In particular, if revs is an iterator, it is converted into a set. |
|
298 | 298 | """ |
|
299 | safehasattr = util.safehasattr | |
|
300 | if safehasattr(revs, '__next__'): | |
|
299 | if hasattr(revs, '__next__'): | |
|
301 | 300 | # Note that inspect.isgenerator() is not true for iterators, |
|
302 | 301 | revs = set(revs) |
|
303 | 302 | |
|
304 | 303 | filteredrevs = self.filteredrevs |
|
305 |
if |
|
|
304 | if hasattr(revs, 'first'): # smartset | |
|
306 | 305 | offenders = revs & filteredrevs |
|
307 | 306 | else: |
|
308 | 307 | offenders = filteredrevs.intersection(revs) |
@@ -167,7 +167,7 b' def _verify_revision(rl, skipflags, stat' | |||
|
167 | 167 | # We also consider we have a "fast" implementation in "pure" python because |
|
168 | 168 | # people using pure don't really have performance consideration (and a |
|
169 | 169 | # wheelbarrow of other slowness source) |
|
170 |
HAS_FAST_PERSISTENT_NODEMAP = rustrevlog is not None or |
|
|
170 | HAS_FAST_PERSISTENT_NODEMAP = rustrevlog is not None or hasattr( | |
|
171 | 171 | parsers, 'BaseIndexObject' |
|
172 | 172 | ) |
|
173 | 173 | |
@@ -214,7 +214,7 b' def parse_index_cl_v2(data, inline):' | |||
|
214 | 214 | return index, cache |
|
215 | 215 | |
|
216 | 216 | |
|
217 |
if |
|
|
217 | if hasattr(parsers, 'parse_index_devel_nodemap'): | |
|
218 | 218 | |
|
219 | 219 | def parse_index_v1_nodemap(data, inline): |
|
220 | 220 | index, cache = parsers.parse_index_devel_nodemap(data, inline) |
@@ -730,7 +730,7 b' class revlog:' | |||
|
730 | 730 | use_nodemap = ( |
|
731 | 731 | not self._inline |
|
732 | 732 | and self._nodemap_file is not None |
|
733 |
and |
|
|
733 | and hasattr(index, 'update_nodemap_data') | |
|
734 | 734 | ) |
|
735 | 735 | if use_nodemap: |
|
736 | 736 | nodemap_data = nodemaputil.persisted_data(self) |
@@ -911,7 +911,7 b' class revlog:' | |||
|
911 | 911 | use_nodemap = ( |
|
912 | 912 | not self._inline |
|
913 | 913 | and self._nodemap_file is not None |
|
914 |
and |
|
|
914 | and hasattr(self.index, 'update_nodemap_data') | |
|
915 | 915 | ) |
|
916 | 916 | if use_nodemap: |
|
917 | 917 | nodemap_data = nodemaputil.persisted_data(self) |
@@ -1887,7 +1887,7 b' class revlog:' | |||
|
1887 | 1887 | """tells whether rev is a snapshot""" |
|
1888 | 1888 | if not self._sparserevlog: |
|
1889 | 1889 | return self.deltaparent(rev) == nullrev |
|
1890 |
elif |
|
|
1890 | elif hasattr(self.index, 'issnapshot'): | |
|
1891 | 1891 | # directly assign the method to cache the testing and access |
|
1892 | 1892 | self.issnapshot = self.index.issnapshot |
|
1893 | 1893 | return self.issnapshot(rev) |
@@ -13,7 +13,6 b' from .. import (' | |||
|
13 | 13 | mdiff, |
|
14 | 14 | node as nodemod, |
|
15 | 15 | revlogutils, |
|
16 | util, | |
|
17 | 16 | ) |
|
18 | 17 | |
|
19 | 18 | from . import ( |
@@ -409,7 +408,7 b' def debug_revlog(ui, revlog):' | |||
|
409 | 408 | numother_nad += 1 |
|
410 | 409 | |
|
411 | 410 | # Obtain data on the raw chunks in the revlog. |
|
412 |
if |
|
|
411 | if hasattr(r, '_getsegmentforrevs'): | |
|
413 | 412 | segment = r._getsegmentforrevs(rev, rev)[1] |
|
414 | 413 | else: |
|
415 | 414 | segment = r._revlog._getsegmentforrevs(rev, rev)[1] |
@@ -1060,7 +1060,7 b' class SnapshotCache:' | |||
|
1060 | 1060 | end_rev < self._start_rev or end_rev > self._end_rev |
|
1061 | 1061 | ), (self._start_rev, self._end_rev, start_rev, end_rev) |
|
1062 | 1062 | cache = self.snapshots |
|
1063 |
if |
|
|
1063 | if hasattr(revlog.index, 'findsnapshots'): | |
|
1064 | 1064 | revlog.index.findsnapshots(cache, start_rev, end_rev) |
|
1065 | 1065 | else: |
|
1066 | 1066 | deltaparent = revlog.deltaparent |
@@ -174,9 +174,9 b' def persist_nodemap(tr, revlog, pending=' | |||
|
174 | 174 | msg = "calling persist nodemap on a revlog without the feature enabled" |
|
175 | 175 | raise error.ProgrammingError(msg) |
|
176 | 176 | |
|
177 |
can_incremental = |
|
|
177 | can_incremental = hasattr(revlog.index, "nodemap_data_incremental") | |
|
178 | 178 | ondisk_docket = revlog._nodemap_docket |
|
179 |
feed_data = |
|
|
179 | feed_data = hasattr(revlog.index, "update_nodemap_data") | |
|
180 | 180 | use_mmap = revlog.opener.options.get(b"persistent-nodemap.mmap") |
|
181 | 181 | |
|
182 | 182 | data = None |
@@ -216,7 +216,7 b' def persist_nodemap(tr, revlog, pending=' | |||
|
216 | 216 | # otherwise fallback to a full new export |
|
217 | 217 | target_docket = NodeMapDocket() |
|
218 | 218 | datafile = _rawdata_filepath(revlog, target_docket) |
|
219 |
if |
|
|
219 | if hasattr(revlog.index, "nodemap_data_all"): | |
|
220 | 220 | data = revlog.index.nodemap_data_all() |
|
221 | 221 | else: |
|
222 | 222 | data = persistent_data(revlog.index) |
@@ -21,7 +21,6 b' from . import (' | |||
|
21 | 21 | obsutil, |
|
22 | 22 | revset, |
|
23 | 23 | scmutil, |
|
24 | util, | |
|
25 | 24 | ) |
|
26 | 25 | |
|
27 | 26 | |
@@ -77,7 +76,7 b" def precheck(repo, revs, action=b'rewrit" | |||
|
77 | 76 | hint = _(b"no changeset checked out") |
|
78 | 77 | raise error.InputError(msg, hint=hint) |
|
79 | 78 | |
|
80 |
if any( |
|
|
79 | if any(hasattr(r, 'rev') for r in revs): | |
|
81 | 80 | repo.ui.develwarn(b"rewriteutil.precheck called with ctx not revs") |
|
82 | 81 | revs = (r.rev() for r in revs) |
|
83 | 82 |
@@ -233,11 +233,7 b' def callcatch(ui, func):' | |||
|
233 | 233 | reason = encoding.unitolocal(reason) |
|
234 | 234 | ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason)) |
|
235 | 235 | except (IOError, OSError) as inst: |
|
236 | if ( | |
|
237 | util.safehasattr(inst, "args") | |
|
238 | and inst.args | |
|
239 | and inst.args[0] == errno.EPIPE | |
|
240 | ): | |
|
236 | if hasattr(inst, "args") and inst.args and inst.args[0] == errno.EPIPE: | |
|
241 | 237 | pass |
|
242 | 238 | elif getattr(inst, "strerror", None): # common IOError or OSError |
|
243 | 239 | if getattr(inst, "filename", None) is not None: |
@@ -561,11 +557,11 b' def shortesthexnodeidprefix(repo, node, ' | |||
|
561 | 557 | if cache is not None: |
|
562 | 558 | nodetree = cache.get(b'disambiguationnodetree') |
|
563 | 559 | if not nodetree: |
|
564 |
if |
|
|
560 | if hasattr(parsers, 'nodetree'): | |
|
565 | 561 | # The CExt is the only implementation to provide a nodetree |
|
566 | 562 | # class so far. |
|
567 | 563 | index = cl.index |
|
568 |
if |
|
|
564 | if hasattr(index, 'get_cindex'): | |
|
569 | 565 | # the rust wrapped need to give access to its internal index |
|
570 | 566 | index = index.get_cindex() |
|
571 | 567 | nodetree = parsers.nodetree(index, len(revs)) |
@@ -1066,7 +1062,7 b' def cleanupnodes(' | |||
|
1066 | 1062 | return |
|
1067 | 1063 | |
|
1068 | 1064 | # translate mapping's other forms |
|
1069 |
if not |
|
|
1065 | if not hasattr(replacements, 'items'): | |
|
1070 | 1066 | replacements = {(n,): () for n in replacements} |
|
1071 | 1067 | else: |
|
1072 | 1068 | # upgrading non tuple "source" to tuple ones for BC |
@@ -516,7 +516,7 b' def mutableancestors(ctx):' | |||
|
516 | 516 | |
|
517 | 517 | def getcommitfunc(extra, interactive, editor=False): |
|
518 | 518 | def commitfunc(ui, repo, message, match, opts): |
|
519 |
hasmq = |
|
|
519 | hasmq = hasattr(repo, 'mq') | |
|
520 | 520 | if hasmq: |
|
521 | 521 | saved, repo.mq.checkapplied = repo.mq.checkapplied, False |
|
522 | 522 |
@@ -137,7 +137,7 b' class abstractsmartset:' | |||
|
137 | 137 | |
|
138 | 138 | This is part of the mandatory API for smartset.""" |
|
139 | 139 | # builtin cannot be cached. but do not needs to |
|
140 |
if cache and |
|
|
140 | if cache and hasattr(condition, '__code__'): | |
|
141 | 141 | condition = util.cachefunc(condition) |
|
142 | 142 | return filteredset(self, condition, condrepr) |
|
143 | 143 | |
@@ -1127,7 +1127,7 b' class fullreposet(_spanset):' | |||
|
1127 | 1127 | This boldly assumes the other contains valid revs only. |
|
1128 | 1128 | """ |
|
1129 | 1129 | # other not a smartset, make is so |
|
1130 |
if not |
|
|
1130 | if not hasattr(other, 'isascending'): | |
|
1131 | 1131 | # filter out hidden revision |
|
1132 | 1132 | # (this boldly assumes all smartset are pure) |
|
1133 | 1133 | # |
@@ -50,11 +50,11 b" hassni = getattr(ssl, 'HAS_SNI', False)" | |||
|
50 | 50 | # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2 |
|
51 | 51 | # support. At the mentioned commit, they were unconditionally defined. |
|
52 | 52 | supportedprotocols = set() |
|
53 |
if getattr(ssl, 'HAS_TLSv1', |
|
|
53 | if getattr(ssl, 'HAS_TLSv1', hasattr(ssl, 'PROTOCOL_TLSv1')): | |
|
54 | 54 | supportedprotocols.add(b'tls1.0') |
|
55 |
if getattr(ssl, 'HAS_TLSv1_1', |
|
|
55 | if getattr(ssl, 'HAS_TLSv1_1', hasattr(ssl, 'PROTOCOL_TLSv1_1')): | |
|
56 | 56 | supportedprotocols.add(b'tls1.1') |
|
57 |
if getattr(ssl, 'HAS_TLSv1_2', |
|
|
57 | if getattr(ssl, 'HAS_TLSv1_2', hasattr(ssl, 'PROTOCOL_TLSv1_2')): | |
|
58 | 58 | supportedprotocols.add(b'tls1.2') |
|
59 | 59 | |
|
60 | 60 | |
@@ -312,7 +312,7 b' def wrapsocket(sock, keyfile, certfile, ' | |||
|
312 | 312 | # is loaded and contains that removed CA, you've just undone the user's |
|
313 | 313 | # choice. |
|
314 | 314 | |
|
315 |
if |
|
|
315 | if hasattr(ssl, 'TLSVersion'): | |
|
316 | 316 | # python 3.7+ |
|
317 | 317 | sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) |
|
318 | 318 | minimumprotocol = settings[b'minimumprotocol'] |
@@ -419,7 +419,7 b' def wrapsocket(sock, keyfile, certfile, ' | |||
|
419 | 419 | pass |
|
420 | 420 | |
|
421 | 421 | # Try to print more helpful error messages for known failures. |
|
422 |
if |
|
|
422 | if hasattr(e, 'reason'): | |
|
423 | 423 | # This error occurs when the client and server don't share a |
|
424 | 424 | # common/supported SSL/TLS protocol. We've disabled SSLv2 and SSLv3 |
|
425 | 425 | # outright. Hopefully the reason for this error is that we require |
@@ -546,7 +546,7 b' def wrapserversocket(' | |||
|
546 | 546 | _(b'referenced certificate file (%s) does not exist') % f |
|
547 | 547 | ) |
|
548 | 548 | |
|
549 |
if |
|
|
549 | if hasattr(ssl, 'TLSVersion'): | |
|
550 | 550 | # python 3.7+ |
|
551 | 551 | sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) |
|
552 | 552 | sslcontext.options |= getattr(ssl, 'OP_NO_COMPRESSION', 0) |
@@ -628,7 +628,7 b' def wrapserversocket(' | |||
|
628 | 628 | # Otherwise, use the list of more secure ciphers if found in the ssl module. |
|
629 | 629 | if exactprotocol: |
|
630 | 630 | sslcontext.set_ciphers('DEFAULT:@SECLEVEL=0') |
|
631 |
elif |
|
|
631 | elif hasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'): | |
|
632 | 632 | sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0) |
|
633 | 633 | # pytype: disable=module-attr |
|
634 | 634 | sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS) |
@@ -428,7 +428,7 b' def consumev1(repo, fp, filecount, bytec' | |||
|
428 | 428 | with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount): |
|
429 | 429 | for i in range(filecount): |
|
430 | 430 | # XXX doesn't support '\n' or '\r' in filenames |
|
431 |
if |
|
|
431 | if hasattr(fp, 'readline'): | |
|
432 | 432 | l = fp.readline() |
|
433 | 433 | else: |
|
434 | 434 | # inline clonebundles use a chunkbuffer, so no readline |
@@ -12,7 +12,6 b' from . import (' | |||
|
12 | 12 | registrar, |
|
13 | 13 | repair, |
|
14 | 14 | scmutil, |
|
15 | util, | |
|
16 | 15 | ) |
|
17 | 16 | |
|
18 | 17 | release = lockmod.release |
@@ -36,7 +35,7 b' def _findupdatetarget(repo, nodes):' | |||
|
36 | 35 | currentbranch = repo[None].branch() |
|
37 | 36 | |
|
38 | 37 | if ( |
|
39 |
|
|
|
38 | hasattr(repo, 'mq') | |
|
40 | 39 | and p2 != repo.nullid |
|
41 | 40 | and p2 in [x.node for x in repo.mq.applied] |
|
42 | 41 | ): |
@@ -1136,7 +1136,7 b' class svnsubrepo(abstractsubrepo):' | |||
|
1136 | 1136 | # --non-interactive. |
|
1137 | 1137 | if commands[0] in (b'update', b'checkout', b'commit'): |
|
1138 | 1138 | cmd.append(b'--non-interactive') |
|
1139 |
if |
|
|
1139 | if hasattr(subprocess, 'CREATE_NO_WINDOW'): | |
|
1140 | 1140 | # On Windows, prevent command prompts windows from popping up when |
|
1141 | 1141 | # running in pythonw. |
|
1142 | 1142 | extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW') |
@@ -1511,7 +1511,7 b' class gitsubrepo(abstractsubrepo):' | |||
|
1511 | 1511 | # the end of git diff arguments is used for paths |
|
1512 | 1512 | commands.insert(1, b'--color') |
|
1513 | 1513 | extrakw = {} |
|
1514 |
if |
|
|
1514 | if hasattr(subprocess, 'CREATE_NO_WINDOW'): | |
|
1515 | 1515 | # On Windows, prevent command prompts windows from popping up when |
|
1516 | 1516 | # running in pythonw. |
|
1517 | 1517 | extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW') |
@@ -384,7 +384,7 b' def repo_rel_or_abs_source(repo):' | |||
|
384 | 384 | Either absolute or relative the outermost repo""" |
|
385 | 385 | parent = repo |
|
386 | 386 | chunks = [] |
|
387 |
while |
|
|
387 | while hasattr(parent, '_subparent'): | |
|
388 | 388 | source = urlutil.url(parent._subsource) |
|
389 | 389 | chunks.append(bytes(source)) |
|
390 | 390 | if source.isabs(): |
@@ -400,7 +400,7 b' def reporelpath(repo):' | |||
|
400 | 400 | # type: (localrepo.localrepository) -> bytes |
|
401 | 401 | """return path to this (sub)repo as seen from outermost repo""" |
|
402 | 402 | parent = repo |
|
403 |
while |
|
|
403 | while hasattr(parent, '_subparent'): | |
|
404 | 404 | parent = parent._subparent |
|
405 | 405 | return repo.root[len(pathutil.normasprefix(parent.root)) :] |
|
406 | 406 | |
@@ -415,7 +415,7 b' def _abssource(repo, push=False, abort=T' | |||
|
415 | 415 | # type: (localrepo.localrepository, bool, bool) -> Optional[bytes] |
|
416 | 416 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
417 | 417 | or on the top repo config. Abort or return None if no source found.""" |
|
418 |
if |
|
|
418 | if hasattr(repo, '_subparent'): | |
|
419 | 419 | source = urlutil.url(repo._subsource) |
|
420 | 420 | if source.isabs(): |
|
421 | 421 | return bytes(source) |
@@ -428,7 +428,7 b' def _abssource(repo, push=False, abort=T' | |||
|
428 | 428 | return bytes(parent) |
|
429 | 429 | else: # recursion reached top repo |
|
430 | 430 | path = None |
|
431 |
if |
|
|
431 | if hasattr(repo, '_subtoppath'): | |
|
432 | 432 | path = repo._subtoppath |
|
433 | 433 | elif push and repo.ui.config(b'paths', b'default-push'): |
|
434 | 434 | path = repo.ui.config(b'paths', b'default-push') |
@@ -339,14 +339,14 b' def json(obj, paranoid=True):' | |||
|
339 | 339 | raise error.ProgrammingError( |
|
340 | 340 | b'Mercurial only does output with bytes: %r' % obj |
|
341 | 341 | ) |
|
342 |
elif |
|
|
342 | elif hasattr(obj, 'keys'): | |
|
343 | 343 | out = [ |
|
344 | 344 | b'"%s": %s' |
|
345 | 345 | % (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid)) |
|
346 | 346 | for k, v in sorted(obj.items()) |
|
347 | 347 | ] |
|
348 | 348 | return b'{' + b', '.join(out) + b'}' |
|
349 |
elif |
|
|
349 | elif hasattr(obj, '__iter__'): | |
|
350 | 350 | out = [json(i, paranoid) for i in obj] |
|
351 | 351 | return b'[' + b', '.join(out) + b']' |
|
352 | 352 | raise error.ProgrammingError(b'cannot encode %r' % obj) |
@@ -281,7 +281,7 b' class hybrid(wrapped):' | |||
|
281 | 281 | |
|
282 | 282 | def getmember(self, context, mapping, key): |
|
283 | 283 | # TODO: maybe split hybrid list/dict types? |
|
284 |
if not |
|
|
284 | if not hasattr(self._values, 'get'): | |
|
285 | 285 | raise error.ParseError(_(b'not a dictionary')) |
|
286 | 286 | key = unwrapastype(context, mapping, key, self._keytype) |
|
287 | 287 | return self._wrapvalue(key, self._values.get(key)) |
@@ -301,13 +301,13 b' class hybrid(wrapped):' | |||
|
301 | 301 | def _wrapvalue(self, key, val): |
|
302 | 302 | if val is None: |
|
303 | 303 | return |
|
304 |
if |
|
|
304 | if hasattr(val, '_makemap'): | |
|
305 | 305 | # a nested hybrid list/dict, which has its own way of map operation |
|
306 | 306 | return val |
|
307 | 307 | return hybriditem(None, key, val, self._makemap) |
|
308 | 308 | |
|
309 | 309 | def filter(self, context, mapping, select): |
|
310 |
if |
|
|
310 | if hasattr(self._values, 'get'): | |
|
311 | 311 | values = { |
|
312 | 312 | k: v |
|
313 | 313 | for k, v in self._values.items() |
@@ -341,7 +341,7 b' class hybrid(wrapped):' | |||
|
341 | 341 | def tovalue(self, context, mapping): |
|
342 | 342 | # TODO: make it non-recursive for trivial lists/dicts |
|
343 | 343 | xs = self._values |
|
344 |
if |
|
|
344 | if hasattr(xs, 'get'): | |
|
345 | 345 | return {k: unwrapvalue(context, mapping, v) for k, v in xs.items()} |
|
346 | 346 | return [unwrapvalue(context, mapping, x) for x in xs] |
|
347 | 347 | |
@@ -858,7 +858,7 b' def flatten(context, mapping, thing):' | |||
|
858 | 858 | ) |
|
859 | 859 | elif thing is None: |
|
860 | 860 | pass |
|
861 |
elif not |
|
|
861 | elif not hasattr(thing, '__iter__'): | |
|
862 | 862 | yield pycompat.bytestr(thing) |
|
863 | 863 | else: |
|
864 | 864 | for i in thing: |
@@ -868,7 +868,7 b' def flatten(context, mapping, thing):' | |||
|
868 | 868 | yield i |
|
869 | 869 | elif i is None: |
|
870 | 870 | pass |
|
871 |
elif not |
|
|
871 | elif not hasattr(i, '__iter__'): | |
|
872 | 872 | yield pycompat.bytestr(i) |
|
873 | 873 | else: |
|
874 | 874 | for j in flatten(context, mapping, i): |
@@ -1467,7 +1467,7 b' class ui:' | |||
|
1467 | 1467 | self.flush() |
|
1468 | 1468 | |
|
1469 | 1469 | wasformatted = self.formatted() |
|
1470 |
if |
|
|
1470 | if hasattr(signal, "SIGPIPE"): | |
|
1471 | 1471 | signal.signal(signal.SIGPIPE, _catchterm) |
|
1472 | 1472 | if self._runpager(pagercmd, pagerenv): |
|
1473 | 1473 | self.pageractive = True |
@@ -1547,7 +1547,7 b' class ui:' | |||
|
1547 | 1547 | |
|
1548 | 1548 | @self.atexit |
|
1549 | 1549 | def killpager(): |
|
1550 |
if |
|
|
1550 | if hasattr(signal, "SIGINT"): | |
|
1551 | 1551 | signal.signal(signal.SIGINT, signal.SIG_IGN) |
|
1552 | 1552 | # restore original fds, closing pager.stdin copies in the process |
|
1553 | 1553 | os.dup2(stdoutfd, procutil.stdout.fileno()) |
@@ -671,7 +671,7 b' def determine_upgrade_actions(' | |||
|
671 | 671 | newactions = [] |
|
672 | 672 | |
|
673 | 673 | for d in format_upgrades: |
|
674 |
if |
|
|
674 | if hasattr(d, '_requirement'): | |
|
675 | 675 | name = d._requirement |
|
676 | 676 | else: |
|
677 | 677 | name = None |
@@ -190,7 +190,7 b' def _gen_sendfile(orgsend):' | |||
|
190 | 190 | return _sendfile |
|
191 | 191 | |
|
192 | 192 | |
|
193 |
has_https = |
|
|
193 | has_https = hasattr(urlreq, 'httpshandler') | |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | class httpconnection(keepalive.HTTPConnection): |
@@ -2583,7 +2583,7 b' class atomictempfile:' | |||
|
2583 | 2583 | self._fp.close() |
|
2584 | 2584 | |
|
2585 | 2585 | def __del__(self): |
|
2586 |
if |
|
|
2586 | if hasattr(self, '_fp'): # constructor actually did something | |
|
2587 | 2587 | self.discard() |
|
2588 | 2588 | |
|
2589 | 2589 | def __enter__(self): |
@@ -16,8 +16,6 b' from .. import (' | |||
|
16 | 16 | ) |
|
17 | 17 | from . import stringutil |
|
18 | 18 | |
|
19 | safehasattr = pycompat.safehasattr | |
|
20 | ||
|
21 | 19 | |
|
22 | 20 | _ = i18n._ |
|
23 | 21 | |
@@ -340,7 +338,7 b' class compressionengine:' | |||
|
340 | 338 | |
|
341 | 339 | class _CompressedStreamReader: |
|
342 | 340 | def __init__(self, fh): |
|
343 |
if |
|
|
341 | if hasattr(fh, 'unbufferedread'): | |
|
344 | 342 | self._reader = fh.unbufferedread |
|
345 | 343 | else: |
|
346 | 344 | self._reader = fh.read |
@@ -22,8 +22,8 b' def mainfrozen():' | |||
|
22 | 22 | (portable, not much used). |
|
23 | 23 | """ |
|
24 | 24 | return ( |
|
25 |
|
|
|
26 |
or |
|
|
25 | hasattr(sys, "frozen") # new py2exe | |
|
26 | or hasattr(sys, "importers") # old py2exe | |
|
27 | 27 | or _imp.is_frozen("__main__") # tools/freeze |
|
28 | 28 | ) |
|
29 | 29 | |
@@ -59,7 +59,7 b' try:' | |||
|
59 | 59 | from importlib import resources # pytype: disable=import-error |
|
60 | 60 | |
|
61 | 61 | # Force loading of the resources module |
|
62 |
if |
|
|
62 | if hasattr(resources, 'files'): | |
|
63 | 63 | resources.files # pytype: disable=module-attr |
|
64 | 64 | else: |
|
65 | 65 | resources.open_binary # pytype: disable=module-attr |
@@ -95,7 +95,7 b' else:' | |||
|
95 | 95 | from .. import encoding |
|
96 | 96 | |
|
97 | 97 | def open_resource(package, name): |
|
98 |
if |
|
|
98 | if hasattr(resources, 'files'): | |
|
99 | 99 | return ( |
|
100 | 100 | resources.files( # pytype: disable=module-attr |
|
101 | 101 | pycompat.sysstr(package) |
@@ -499,7 +499,7 b' class wirepeer(repository.peer):' | |||
|
499 | 499 | else: |
|
500 | 500 | heads = wireprototypes.encodelist(heads) |
|
501 | 501 | |
|
502 |
if |
|
|
502 | if hasattr(bundle, 'deltaheader'): | |
|
503 | 503 | # this a bundle10, do the old style call sequence |
|
504 | 504 | ret, output = self._callpush(b"unbundle", bundle, heads=heads) |
|
505 | 505 | if ret == b"": |
@@ -721,7 +721,7 b' def unbundle(repo, proto, heads):' | |||
|
721 | 721 | r = exchange.unbundle( |
|
722 | 722 | repo, gen, their_heads, b'serve', proto.client() |
|
723 | 723 | ) |
|
724 |
if |
|
|
724 | if hasattr(r, 'addpart'): | |
|
725 | 725 | # The return looks streamable, we are in the bundle2 case |
|
726 | 726 | # and should return a stream. |
|
727 | 727 | return wireprototypes.streamreslegacy(gen=r.getchunks()) |
@@ -12,7 +12,6 b' from mercurial import (' | |||
|
12 | 12 | debugcommands, |
|
13 | 13 | hg, |
|
14 | 14 | ui as uimod, |
|
15 | util, | |
|
16 | 15 | ) |
|
17 | 16 | |
|
18 | 17 | |
@@ -416,7 +415,7 b' def test_gca():' | |||
|
416 | 415 | for i, (dag, tests) in enumerate(dagtests): |
|
417 | 416 | repo = hg.repository(u, b'gca%d' % i, create=1) |
|
418 | 417 | cl = repo.changelog |
|
419 |
if not |
|
|
418 | if not hasattr(cl.index, 'ancestors'): | |
|
420 | 419 | # C version not available |
|
421 | 420 | return |
|
422 | 421 |
@@ -179,15 +179,13 b' except ImportError as inst:' | |||
|
179 | 179 | 'cannot import name unknownattr' |
|
180 | 180 | ) |
|
181 | 181 | |
|
182 | from mercurial import util | |
|
183 | ||
|
184 | 182 | # Unlike the import statement, __import__() function should not raise |
|
185 | 183 | # ImportError even if fromlist has an unknown item |
|
186 | 184 | # (see Python/import.c:import_module_level() and ensure_fromlist()) |
|
187 | 185 | assert 'ftplib' not in sys.modules |
|
188 | 186 | zipfileimp = __import__('ftplib', globals(), locals(), ['unknownattr']) |
|
189 | 187 | assert f(zipfileimp) == "<module 'ftplib' from '?'>", f(zipfileimp) |
|
190 |
assert not |
|
|
188 | assert not hasattr(zipfileimp, 'unknownattr') | |
|
191 | 189 | |
|
192 | 190 | |
|
193 | 191 | # test deactivation for issue6725 |
@@ -11,7 +11,7 b' without changegroup2 support' | |||
|
11 | 11 | > command = registrar.command(cmdtable) |
|
12 | 12 | > @command('testcg2', norepo=True) |
|
13 | 13 | > def testcg2(ui): |
|
14 |
> if not |
|
|
14 | > if not hasattr(changegroup, 'cg2packer'): | |
|
15 | 15 | > sys.exit(80) |
|
16 | 16 | > EOF |
|
17 | 17 | $ cat >> $HGRCPATH << EOF |
General Comments 0
You need to be logged in to leave comments.
Login now