##// END OF EJS Templates
py3: use class X: instead of class X(object):...
Gregory Szorc -
r49801:642e31cb default
parent child Browse files
Show More
@@ -758,7 +758,7 def _preparepats():
758 preparefilters(filters)
758 preparefilters(filters)
759
759
760
760
761 class norepeatlogger(object):
761 class norepeatlogger:
762 def __init__(self):
762 def __init__(self):
763 self._lastseen = None
763 self._lastseen = None
764
764
@@ -20,7 +20,7 args = ap.parse_args()
20
20
21 if sys.version_info[0] < 3:
21 if sys.version_info[0] < 3:
22
22
23 class py2reprhack(object):
23 class py2reprhack:
24 def __repr__(self):
24 def __repr__(self):
25 """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
25 """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
26 return self.__bytes__()
26 return self.__bytes__()
@@ -28,7 +28,7 if sys.version_info[0] < 3:
28
28
29 else:
29 else:
30
30
31 class py2reprhack(object):
31 class py2reprhack:
32 """Not needed on py3."""
32 """Not needed on py3."""
33
33
34
34
@@ -49,7 +49,7 def connectpipe(path=None, extraargs=())
49 return server
49 return server
50
50
51
51
52 class unixconnection(object):
52 class unixconnection:
53 def __init__(self, sockpath):
53 def __init__(self, sockpath):
54 self.sock = sock = socket.socket(socket.AF_UNIX)
54 self.sock = sock = socket.socket(socket.AF_UNIX)
55 sock.connect(sockpath)
55 sock.connect(sockpath)
@@ -62,7 +62,7 class unixconnection(object):
62 self.sock.close()
62 self.sock.close()
63
63
64
64
65 class unixserver(object):
65 class unixserver:
66 def __init__(self, sockpath, logpath=None, repopath=None):
66 def __init__(self, sockpath, logpath=None, repopath=None):
67 self.sockpath = sockpath
67 self.sockpath = sockpath
68 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
68 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
@@ -369,7 +369,7 def getlen(ui):
369 return len
369 return len
370
370
371
371
372 class noop(object):
372 class noop:
373 """dummy context manager"""
373 """dummy context manager"""
374
374
375 def __enter__(self):
375 def __enter__(self):
@@ -413,7 +413,7 def gettimer(ui, opts=None):
413 # available since 2.2 (or ae5f92e154d3)
413 # available since 2.2 (or ae5f92e154d3)
414 from mercurial import node
414 from mercurial import node
415
415
416 class defaultformatter(object):
416 class defaultformatter:
417 """Minimized composition of baseformatter and plainformatter"""
417 """Minimized composition of baseformatter and plainformatter"""
418
418
419 def __init__(self, ui, topic, opts):
419 def __init__(self, ui, topic, opts):
@@ -652,7 +652,7 def safeattrsetter(obj, name, ignoremiss
652
652
653 origvalue = getattr(obj, _sysstr(name))
653 origvalue = getattr(obj, _sysstr(name))
654
654
655 class attrutil(object):
655 class attrutil:
656 def set(self, newvalue):
656 def set(self, newvalue):
657 setattr(obj, _sysstr(name), newvalue)
657 setattr(obj, _sysstr(name), newvalue)
658
658
@@ -2942,7 +2942,7 def perfrevlogwrite(ui, repo, file_=None
2942 fm.end()
2942 fm.end()
2943
2943
2944
2944
2945 class _faketr(object):
2945 class _faketr:
2946 def add(s, x, y, z=None):
2946 def add(s, x, y, z=None):
2947 return None
2947 return None
2948
2948
@@ -79,7 +79,7 def writeerr(data):
79 ####################
79 ####################
80
80
81
81
82 class embeddedmatcher(object): # pytype: disable=ignored-metaclass
82 class embeddedmatcher: # pytype: disable=ignored-metaclass
83 """Base class to detect embedded code fragments in *.t test script"""
83 """Base class to detect embedded code fragments in *.t test script"""
84
84
85 __metaclass__ = abc.ABCMeta
85 __metaclass__ = abc.ABCMeta
@@ -156,7 +156,7 def embedded(basefile, lines, errors, ma
156 :ends: line number (1-origin), at which embedded code ends (exclusive)
156 :ends: line number (1-origin), at which embedded code ends (exclusive)
157 :code: extracted embedded code, which is single-stringified
157 :code: extracted embedded code, which is single-stringified
158
158
159 >>> class ambigmatcher(object):
159 >>> class ambigmatcher:
160 ... # mock matcher class to examine implementation of
160 ... # mock matcher class to examine implementation of
161 ... # "ambiguous matching" corner case
161 ... # "ambiguous matching" corner case
162 ... def __init__(self, desc, matchfunc):
162 ... def __init__(self, desc, matchfunc):
@@ -112,7 +112,7 class Writer(writers.Writer):
112 self.output = visitor.astext()
112 self.output = visitor.astext()
113
113
114
114
115 class Table(object):
115 class Table:
116 def __init__(self):
116 def __init__(self):
117 self._rows = []
117 self._rows = []
118 self._options = ['center']
118 self._options = ['center']
@@ -312,7 +312,7 class Translator(nodes.NodeVisitor):
312 pass
312 pass
313
313
314 def list_start(self, node):
314 def list_start(self, node):
315 class enum_char(object):
315 class enum_char:
316 enum_style = {
316 enum_style = {
317 'bullet': '\\(bu',
317 'bullet': '\\(bu',
318 'emdash': '\\(em',
318 'emdash': '\\(em',
@@ -54,7 +54,7 def _hgextimport(importfunc, name, globa
54 return importfunc(hgextname, globals, *args, **kwargs)
54 return importfunc(hgextname, globals, *args, **kwargs)
55
55
56
56
57 class _demandmod(object):
57 class _demandmod:
58 """module demand-loader and proxy
58 """module demand-loader and proxy
59
59
60 Specify 1 as 'level' argument at construction, to import module
60 Specify 1 as 'level' argument at construction, to import module
@@ -54,7 +54,7 class _lazyloaderex(importlib.util.LazyL
54 super().exec_module(module)
54 super().exec_module(module)
55
55
56
56
57 class LazyFinder(object):
57 class LazyFinder:
58 """A wrapper around a ``MetaPathFinder`` that makes loaders lazy.
58 """A wrapper around a ``MetaPathFinder`` that makes loaders lazy.
59
59
60 ``sys.meta_path`` finders have their ``find_spec()`` called to locate a
60 ``sys.meta_path`` finders have their ``find_spec()`` called to locate a
@@ -83,7 +83,7 colortable = {
83 defaultdict = collections.defaultdict
83 defaultdict = collections.defaultdict
84
84
85
85
86 class nullui(object):
86 class nullui:
87 """blank ui object doing nothing"""
87 """blank ui object doing nothing"""
88
88
89 debugflag = False
89 debugflag = False
@@ -97,7 +97,7 class nullui(object):
97 return nullfunc
97 return nullfunc
98
98
99
99
100 class emptyfilecontext(object):
100 class emptyfilecontext:
101 """minimal filecontext representing an empty file"""
101 """minimal filecontext representing an empty file"""
102
102
103 def __init__(self, repo):
103 def __init__(self, repo):
@@ -277,7 +277,7 def overlaycontext(memworkingcopy, ctx,
277 )
277 )
278
278
279
279
280 class filefixupstate(object):
280 class filefixupstate:
281 """state needed to apply fixups to a single file
281 """state needed to apply fixups to a single file
282
282
283 internally, it keeps file contents of several revisions and a linelog.
283 internally, it keeps file contents of several revisions and a linelog.
@@ -655,7 +655,7 class filefixupstate(object):
655 )
655 )
656
656
657
657
658 class fixupstate(object):
658 class fixupstate:
659 """state needed to run absorb
659 """state needed to run absorb
660
660
661 internally, it keeps paths and filefixupstates.
661 internally, it keeps paths and filefixupstates.
@@ -105,7 +105,7 configitem(b'blackbox', b'date-format',
105 _lastlogger = loggingutil.proxylogger()
105 _lastlogger = loggingutil.proxylogger()
106
106
107
107
108 class blackboxlogger(object):
108 class blackboxlogger:
109 def __init__(self, ui, repo):
109 def __init__(self, ui, repo):
110 self._repo = repo
110 self._repo = repo
111 self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
111 self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
@@ -434,7 +434,7 configitem(
434 )
434 )
435
435
436
436
437 class bzaccess(object):
437 class bzaccess:
438 '''Base class for access to Bugzilla.'''
438 '''Base class for access to Bugzilla.'''
439
439
440 def __init__(self, ui):
440 def __init__(self, ui):
@@ -690,7 +690,7 class bzmysql_3_0(bzmysql_2_18):
690 # Bugzilla via XMLRPC interface.
690 # Bugzilla via XMLRPC interface.
691
691
692
692
693 class cookietransportrequest(object):
693 class cookietransportrequest:
694 """A Transport request method that retains cookies over its lifetime.
694 """A Transport request method that retains cookies over its lifetime.
695
695
696 The regular xmlrpclib transports ignore cookies. Which causes
696 The regular xmlrpclib transports ignore cookies. Which causes
@@ -1095,7 +1095,7 class bzrestapi(bzaccess):
1095 pass
1095 pass
1096
1096
1097
1097
1098 class bugzilla(object):
1098 class bugzilla:
1099 # supported versions of bugzilla. different versions have
1099 # supported versions of bugzilla. different versions have
1100 # different schemas.
1100 # different schemas.
1101 _versions = {
1101 _versions = {
@@ -34,7 +34,7 def _encodeornone(d):
34 return d.encode('latin1')
34 return d.encode('latin1')
35
35
36
36
37 class _shlexpy3proxy(object):
37 class _shlexpy3proxy:
38 def __init__(self, l):
38 def __init__(self, l):
39 self._l = l
39 self._l = l
40
40
@@ -127,7 +127,7 class NoRepo(Exception):
127 SKIPREV = b'SKIP'
127 SKIPREV = b'SKIP'
128
128
129
129
130 class commit(object):
130 class commit:
131 def __init__(
131 def __init__(
132 self,
132 self,
133 author,
133 author,
@@ -157,7 +157,7 class commit(object):
157 self.ctx = ctx # for hg to hg conversions
157 self.ctx = ctx # for hg to hg conversions
158
158
159
159
160 class converter_source(object):
160 class converter_source:
161 """Conversion source interface"""
161 """Conversion source interface"""
162
162
163 def __init__(self, ui, repotype, path=None, revs=None):
163 def __init__(self, ui, repotype, path=None, revs=None):
@@ -307,7 +307,7 class converter_source(object):
307 return True
307 return True
308
308
309
309
310 class converter_sink(object):
310 class converter_sink:
311 """Conversion sink (target) interface"""
311 """Conversion sink (target) interface"""
312
312
313 def __init__(self, ui, repotype, path):
313 def __init__(self, ui, repotype, path):
@@ -403,7 +403,7 class converter_sink(object):
403 raise NotImplementedError
403 raise NotImplementedError
404
404
405
405
406 class commandline(object):
406 class commandline:
407 def __init__(self, ui, command):
407 def __init__(self, ui, command):
408 self.ui = ui
408 self.ui = ui
409 self.command = command
409 self.command = command
@@ -176,7 +176,7 def convertsink(ui, path, type):
176 raise error.Abort(_(b'%s: unknown repository type') % path)
176 raise error.Abort(_(b'%s: unknown repository type') % path)
177
177
178
178
179 class progresssource(object):
179 class progresssource:
180 def __init__(self, ui, source, filecount):
180 def __init__(self, ui, source, filecount):
181 self.ui = ui
181 self.ui = ui
182 self.source = source
182 self.source = source
@@ -198,7 +198,7 class progresssource(object):
198 self.progress.complete()
198 self.progress.complete()
199
199
200
200
201 class converter(object):
201 class converter:
202 def __init__(self, ui, source, dest, revmapfile, opts):
202 def __init__(self, ui, source, dest, revmapfile, opts):
203
203
204 self.source = source
204 self.source = source
@@ -26,7 +26,7 from mercurial.utils import (
26 )
26 )
27
27
28
28
29 class logentry(object):
29 class logentry:
30 """Class logentry has the following attributes:
30 """Class logentry has the following attributes:
31 .author - author name as CVS knows it
31 .author - author name as CVS knows it
32 .branch - name of branch this revision is on
32 .branch - name of branch this revision is on
@@ -577,7 +577,7 def createlog(ui, directory=None, root=b
577 return log
577 return log
578
578
579
579
580 class changeset(object):
580 class changeset:
581 """Class changeset has the following attributes:
581 """Class changeset has the following attributes:
582 .id - integer identifying this changeset (list index)
582 .id - integer identifying this changeset (list index)
583 .author - author name as CVS knows it
583 .author - author name as CVS knows it
@@ -41,7 +41,7 def normalize(path):
41 return posixpath.normpath(path)
41 return posixpath.normpath(path)
42
42
43
43
44 class filemapper(object):
44 class filemapper:
45 """Map and filter filenames when importing.
45 """Map and filter filenames when importing.
46 A name can be mapped to itself, a new name, or None (omit from new
46 A name can be mapped to itself, a new name, or None (omit from new
47 repository)."""
47 repository)."""
@@ -19,7 +19,7 from mercurial import (
19 from . import common
19 from . import common
20
20
21
21
22 class submodule(object):
22 class submodule:
23 def __init__(self, path, node, url):
23 def __init__(self, path, node, url):
24 self.path = path
24 self.path = path
25 self.node = node
25 self.node = node
@@ -27,7 +27,7 from . import common
27
27
28
28
29 class gnuarch_source(common.converter_source, common.commandline):
29 class gnuarch_source(common.converter_source, common.commandline):
30 class gnuarch_rev(object):
30 class gnuarch_rev:
31 def __init__(self, rev):
31 def __init__(self, rev):
32 self.rev = rev
32 self.rev = rev
33 self.summary = b''
33 self.summary = b''
@@ -180,7 +180,7 def optrev(number):
180 return optrev
180 return optrev
181
181
182
182
183 class changedpath(object):
183 class changedpath:
184 def __init__(self, p):
184 def __init__(self, p):
185 self.copyfrom_path = p.copyfrom_path
185 self.copyfrom_path = p.copyfrom_path
186 self.copyfrom_rev = p.copyfrom_rev
186 self.copyfrom_rev = p.copyfrom_rev
@@ -248,7 +248,7 def debugsvnlog(ui, **opts):
248 get_log_child(ui.fout, *args)
248 get_log_child(ui.fout, *args)
249
249
250
250
251 class logstream(object):
251 class logstream:
252 """Interruptible revision log iterator."""
252 """Interruptible revision log iterator."""
253
253
254 def __init__(self, stdout):
254 def __init__(self, stdout):
@@ -70,7 +70,7 class NotBranchError(SubversionException
70 pass
70 pass
71
71
72
72
73 class SvnRaTransport(object):
73 class SvnRaTransport:
74 """
74 """
75 Open an ra connection to a Subversion repository.
75 Open an ra connection to a Subversion repository.
76 """
76 """
@@ -107,7 +107,7 class SvnRaTransport(object):
107 self.ra = ra
107 self.ra = ra
108 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
108 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
109
109
110 class Reporter(object):
110 class Reporter:
111 def __init__(self, reporter_data):
111 def __init__(self, reporter_data):
112 self._reporter, self._baton = reporter_data
112 self._reporter, self._baton = reporter_data
113
113
@@ -185,7 +185,7 filters = {
185 }
185 }
186
186
187
187
188 class eolfile(object):
188 class eolfile:
189 def __init__(self, ui, root, data):
189 def __init__(self, ui, root, data):
190 self._decode = {
190 self._decode = {
191 b'LF': b'to-lf',
191 b'LF': b'to-lf',
@@ -695,7 +695,7 def extdiff(ui, repo, *pats, **opts):
695 return dodiff(ui, repo, cmdline, pats, opts)
695 return dodiff(ui, repo, cmdline, pats, opts)
696
696
697
697
698 class savedcmd(object):
698 class savedcmd:
699 """use external program to diff repository (or selected files)
699 """use external program to diff repository (or selected files)
700
700
701 Show differences between revisions for the specified files, using
701 Show differences between revisions for the specified files, using
@@ -159,7 +159,7 def hashdiffopts(diffopts):
159 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
159 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
160
160
161
161
162 class annotateopts(object):
162 class annotateopts:
163 """like mercurial.mdiff.diffopts, but is for annotate
163 """like mercurial.mdiff.diffopts, but is for annotate
164
164
165 followrename: follow renames, like "hg annotate -f"
165 followrename: follow renames, like "hg annotate -f"
@@ -196,7 +196,7 class annotateopts(object):
196 defaultopts = annotateopts()
196 defaultopts = annotateopts()
197
197
198
198
199 class _annotatecontext(object):
199 class _annotatecontext:
200 """do not use this class directly as it does not use lock to protect
200 """do not use this class directly as it does not use lock to protect
201 writes. use "with annotatecontext(...)" instead.
201 writes. use "with annotatecontext(...)" instead.
202 """
202 """
@@ -783,7 +783,7 def _unlinkpaths(paths):
783 pass
783 pass
784
784
785
785
786 class pathhelper(object):
786 class pathhelper:
787 """helper for getting paths for lockfile, linelog and revmap"""
787 """helper for getting paths for lockfile, linelog and revmap"""
788
788
789 def __init__(self, repo, path, opts=defaultopts):
789 def __init__(self, repo, path, opts=defaultopts):
@@ -19,7 +19,7 from mercurial.utils import dateutil
19
19
20 # imitating mercurial.commands.annotate, not using the vanilla formatter since
20 # imitating mercurial.commands.annotate, not using the vanilla formatter since
21 # the data structures are a bit different, and we have some fast paths.
21 # the data structures are a bit different, and we have some fast paths.
22 class defaultformatter(object):
22 class defaultformatter:
23 """the default formatter that does leftpad and support some common flags"""
23 """the default formatter that does leftpad and support some common flags"""
24
24
25 def __init__(self, ui, repo, opts):
25 def __init__(self, ui, repo, opts):
@@ -48,7 +48,7 renameflag = 2
48 _hshlen = 20
48 _hshlen = 20
49
49
50
50
51 class revmap(object):
51 class revmap:
52 """trivial hg bin hash - linelog rev bidirectional map
52 """trivial hg bin hash - linelog rev bidirectional map
53
53
54 also stores a flag (uint8) for each revision, and track renames.
54 also stores a flag (uint8) for each revision, and track renames.
@@ -22,7 +22,7 from . import (
22 )
22 )
23
23
24
24
25 class _lazyfctx(object):
25 class _lazyfctx:
26 """delegates to fctx but do not construct fctx when unnecessary"""
26 """delegates to fctx but do not construct fctx when unnecessary"""
27
27
28 def __init__(self, repo, node, path):
28 def __init__(self, repo, node, path):
@@ -903,7 +903,7 def fixernames(ui):
903 return names
903 return names
904
904
905
905
906 class Fixer(object):
906 class Fixer:
907 """Wraps the raw config values for a fixer with methods"""
907 """Wraps the raw config values for a fixer with methods"""
908
908
909 def __init__(
909 def __init__(
@@ -681,7 +681,7 def overridestatus(
681 )
681 )
682
682
683
683
684 class poststatus(object):
684 class poststatus:
685 def __init__(self, startclock):
685 def __init__(self, startclock):
686 self._startclock = pycompat.sysbytes(startclock)
686 self._startclock = pycompat.sysbytes(startclock)
687
687
@@ -756,7 +756,7 def wrapsymlink(orig, source, link_name)
756 pass
756 pass
757
757
758
758
759 class state_update(object):
759 class state_update:
760 """This context manager is responsible for dispatching the state-enter
760 """This context manager is responsible for dispatching the state-enter
761 and state-leave signals to the watchman service. The enter and leave
761 and state-leave signals to the watchman service. The enter and leave
762 methods can be invoked manually (for scenarios where context manager
762 methods can be invoked manually (for scenarios where context manager
@@ -301,7 +301,7 class CommandError(WatchmanError):
301 )
301 )
302
302
303
303
304 class Transport(object):
304 class Transport:
305 """communication transport to the watchman server"""
305 """communication transport to the watchman server"""
306
306
307 buf = None
307 buf = None
@@ -346,7 +346,7 class Transport(object):
346 self.buf.append(b)
346 self.buf.append(b)
347
347
348
348
349 class Codec(object):
349 class Codec:
350 """communication encoding for the watchman server"""
350 """communication encoding for the watchman server"""
351
351
352 transport = None
352 transport = None
@@ -859,7 +859,7 class JsonCodec(Codec):
859 self.transport.write(cmd + b"\n")
859 self.transport.write(cmd + b"\n")
860
860
861
861
862 class client(object):
862 class client:
863 """Handles the communication with the watchman service"""
863 """Handles the communication with the watchman service"""
864
864
865 sockpath = None
865 sockpath = None
@@ -93,7 +93,7 def _buf_pos(buf, pos):
93 return ret
93 return ret
94
94
95
95
96 class _bser_buffer(object):
96 class _bser_buffer:
97 def __init__(self, version):
97 def __init__(self, version):
98 self.bser_version = version
98 self.bser_version = version
99 self.buf = ctypes.create_string_buffer(8192)
99 self.buf = ctypes.create_string_buffer(8192)
@@ -324,7 +324,7 def dumps(obj, version=1, capabilities=0
324 # This is a quack-alike with the bserObjectType in bser.c
324 # This is a quack-alike with the bserObjectType in bser.c
325 # It provides by getattr accessors and getitem for both index
325 # It provides by getattr accessors and getitem for both index
326 # and name.
326 # and name.
327 class _BunserDict(object):
327 class _BunserDict:
328 __slots__ = ("_keys", "_values")
328 __slots__ = ("_keys", "_values")
329
329
330 def __init__(self, keys, values):
330 def __init__(self, keys, values):
@@ -350,7 +350,7 class _BunserDict(object):
350 return len(self._keys)
350 return len(self._keys)
351
351
352
352
353 class Bunser(object):
353 class Bunser:
354 def __init__(self, mutable=True, value_encoding=None, value_errors=None):
354 def __init__(self, mutable=True, value_encoding=None, value_errors=None):
355 self.mutable = mutable
355 self.mutable = mutable
356 self.value_encoding = value_encoding
356 self.value_encoding = value_encoding
@@ -22,7 +22,7 from mercurial import (
22 _versionformat = b">I"
22 _versionformat = b">I"
23
23
24
24
25 class state(object):
25 class state:
26 def __init__(self, repo):
26 def __init__(self, repo):
27 self._vfs = repo.vfs
27 self._vfs = repo.vfs
28 self._ui = repo.ui
28 self._ui = repo.ui
@@ -43,7 +43,7 class WatchmanNoRoot(Unavailable):
43 super(WatchmanNoRoot, self).__init__(msg)
43 super(WatchmanNoRoot, self).__init__(msg)
44
44
45
45
46 class client(object):
46 class client:
47 def __init__(self, ui, root, timeout=1.0):
47 def __init__(self, ui, root, timeout=1.0):
48 err = None
48 err = None
49 if not self._user:
49 if not self._user:
@@ -47,7 +47,7 getversion = gitutil.pygit2_version
47
47
48
48
49 # TODO: extract an interface for this in core
49 # TODO: extract an interface for this in core
50 class gitstore(object): # store.basicstore):
50 class gitstore: # store.basicstore):
51 def __init__(self, path, vfstype):
51 def __init__(self, path, vfstype):
52 self.vfs = vfstype(path)
52 self.vfs = vfstype(path)
53 self.opener = self.vfs
53 self.opener = self.vfs
@@ -129,7 +129,7 def _makestore(orig, requirements, store
129 return orig(requirements, storebasepath, vfstype)
129 return orig(requirements, storebasepath, vfstype)
130
130
131
131
132 class gitfilestorage(object):
132 class gitfilestorage:
133 def file(self, path):
133 def file(self, path):
134 if path[0:1] == b'/':
134 if path[0:1] == b'/':
135 path = path[1:]
135 path = path[1:]
@@ -161,7 +161,7 def _setupdothg(ui, path):
161 _BMS_PREFIX = 'refs/heads/'
161 _BMS_PREFIX = 'refs/heads/'
162
162
163
163
164 class gitbmstore(object):
164 class gitbmstore:
165 def __init__(self, gitrepo):
165 def __init__(self, gitrepo):
166 self.gitrepo = gitrepo
166 self.gitrepo = gitrepo
167 self._aclean = True
167 self._aclean = True
@@ -66,7 +66,7 if pygit2:
66
66
67
67
68 @interfaceutil.implementer(intdirstate.idirstate)
68 @interfaceutil.implementer(intdirstate.idirstate)
69 class gitdirstate(object):
69 class gitdirstate:
70 def __init__(self, ui, root, gitrepo):
70 def __init__(self, ui, root, gitrepo):
71 self._ui = ui
71 self._ui = ui
72 self._root = os.path.dirname(root)
72 self._root = os.path.dirname(root)
@@ -29,7 +29,7 from . import (
29 pygit2 = gitutil.get_pygit2()
29 pygit2 = gitutil.get_pygit2()
30
30
31
31
32 class baselog(object): # revlog.revlog):
32 class baselog: # revlog.revlog):
33 """Common implementations between changelog and manifestlog."""
33 """Common implementations between changelog and manifestlog."""
34
34
35 def __init__(self, gr, db):
35 def __init__(self, gr, db):
@@ -69,7 +69,7 class baselog(object): # revlog.revlog)
69 return t is not None
69 return t is not None
70
70
71
71
72 class baselogindex(object):
72 class baselogindex:
73 def __init__(self, log):
73 def __init__(self, log):
74 self._log = log
74 self._log = log
75
75
@@ -15,7 +15,7 pygit2 = gitutil.get_pygit2()
15
15
16
16
17 @interfaceutil.implementer(repository.imanifestdict)
17 @interfaceutil.implementer(repository.imanifestdict)
18 class gittreemanifest(object):
18 class gittreemanifest:
19 """Expose git trees (and optionally a builder's overlay) as a manifestdict.
19 """Expose git trees (and optionally a builder's overlay) as a manifestdict.
20
20
21 Very similar to mercurial.manifest.treemanifest.
21 Very similar to mercurial.manifest.treemanifest.
@@ -258,7 +258,7 class gittreemanifest(object):
258
258
259
259
260 @interfaceutil.implementer(repository.imanifestrevisionstored)
260 @interfaceutil.implementer(repository.imanifestrevisionstored)
261 class gittreemanifestctx(object):
261 class gittreemanifestctx:
262 def __init__(self, repo, gittree):
262 def __init__(self, repo, gittree):
263 self._repo = repo
263 self._repo = repo
264 self._tree = gittree
264 self._tree = gittree
@@ -279,7 +279,7 class gittreemanifestctx(object):
279
279
280
280
281 @interfaceutil.implementer(repository.imanifestrevisionwritable)
281 @interfaceutil.implementer(repository.imanifestrevisionwritable)
282 class memgittreemanifestctx(object):
282 class memgittreemanifestctx:
283 def __init__(self, repo, tree):
283 def __init__(self, repo, tree):
284 self._repo = repo
284 self._repo = repo
285 self._tree = tree
285 self._tree = tree
@@ -122,7 +122,7 def parseoptions(ui, cmdoptions, args):
122 return args, opts
122 return args, opts
123
123
124
124
125 class Command(object):
125 class Command:
126 def __init__(self, name):
126 def __init__(self, name):
127 self.name = name
127 self.name = name
128 self.args = []
128 self.args = []
@@ -163,7 +163,7 class Command(object):
163 return AndCommand(self, other)
163 return AndCommand(self, other)
164
164
165
165
166 class AndCommand(object):
166 class AndCommand:
167 def __init__(self, left, right):
167 def __init__(self, left, right):
168 self.left = left
168 self.left = left
169 self.right = right
169 self.right = right
@@ -64,7 +64,7 help.CATEGORY_ORDER.insert(
64 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
64 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
65
65
66
66
67 class gpg(object):
67 class gpg:
68 def __init__(self, path, key=None):
68 def __init__(self, path, key=None):
69 self.path = path
69 self.path = path
70 self.key = (key and b" --local-user \"%s\"" % key) or b""
70 self.key = (key and b" --local-user \"%s\"" % key) or b""
@@ -351,7 +351,7 Commands:
351 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
351 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
352
352
353
353
354 class histeditstate(object):
354 class histeditstate:
355 def __init__(self, repo):
355 def __init__(self, repo):
356 self.repo = repo
356 self.repo = repo
357 self.actions = None
357 self.actions = None
@@ -490,7 +490,7 class histeditstate(object):
490 return self.repo.vfs.exists(b'histedit-state')
490 return self.repo.vfs.exists(b'histedit-state')
491
491
492
492
493 class histeditaction(object):
493 class histeditaction:
494 def __init__(self, state, node):
494 def __init__(self, state, node):
495 self.state = state
495 self.state = state
496 self.repo = state.repo
496 self.repo = state.repo
@@ -1142,7 +1142,7 def screen_size():
1142 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1142 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1143
1143
1144
1144
1145 class histeditrule(object):
1145 class histeditrule:
1146 def __init__(self, ui, ctx, pos, action=b'pick'):
1146 def __init__(self, ui, ctx, pos, action=b'pick'):
1147 self.ui = ui
1147 self.ui = ui
1148 self.ctx = ctx
1148 self.ctx = ctx
@@ -1242,7 +1242,7 def _trunc_tail(line, n):
1242 return line[: n - 2] + b' >'
1242 return line[: n - 2] + b' >'
1243
1243
1244
1244
1245 class _chistedit_state(object):
1245 class _chistedit_state:
1246 def __init__(
1246 def __init__(
1247 self,
1247 self,
1248 repo,
1248 repo,
@@ -286,7 +286,7 def _tryhoist(ui, remotebookmark):
286 return remotebookmark
286 return remotebookmark
287
287
288
288
289 class bundlestore(object):
289 class bundlestore:
290 def __init__(self, repo):
290 def __init__(self, repo):
291 self._repo = repo
291 self._repo = repo
292 storetype = self._repo.ui.config(b'infinitepush', b'storetype')
292 storetype = self._repo.ui.config(b'infinitepush', b'storetype')
@@ -101,7 +101,7 def _handlelfs(repo, missing):
101 return
101 return
102
102
103
103
104 class copiedpart(object):
104 class copiedpart:
105 """a copy of unbundlepart content that can be consumed later"""
105 """a copy of unbundlepart content that can be consumed later"""
106
106
107 def __init__(self, part):
107 def __init__(self, part):
@@ -6,7 +6,7
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 class indexapi(object):
9 class indexapi:
10 """Class that manages access to infinitepush index.
10 """Class that manages access to infinitepush index.
11
11
12 This class is a context manager and all write operations (like
12 This class is a context manager and all write operations (like
@@ -25,7 +25,7 class BundleReadException(Exception):
25 pass
25 pass
26
26
27
27
28 class abstractbundlestore(object): # pytype: disable=ignored-metaclass
28 class abstractbundlestore: # pytype: disable=ignored-metaclass
29 """Defines the interface for bundle stores.
29 """Defines the interface for bundle stores.
30
30
31 A bundle store is an entity that stores raw bundle data. It is a simple
31 A bundle store is an entity that stores raw bundle data. It is a simple
@@ -56,7 +56,7 class abstractbundlestore(object): # py
56 """
56 """
57
57
58
58
59 class filebundlestore(object):
59 class filebundlestore:
60 """bundle store in filesystem
60 """bundle store in filesystem
61
61
62 meant for storing bundles somewhere on disk and on network filesystems
62 meant for storing bundles somewhere on disk and on network filesystems
@@ -282,7 +282,7 class journalentry(
282 __str__ = encoding.strmethod(__bytes__)
282 __str__ = encoding.strmethod(__bytes__)
283
283
284
284
285 class journalstorage(object):
285 class journalstorage:
286 """Storage for journal entries
286 """Storage for journal entries
287
287
288 Entries are divided over two files; one with entries that pertain to the
288 Entries are divided over two files; one with entries that pertain to the
@@ -235,7 +235,7 def _preselect(wstatus, changed):
235 return modified, added
235 return modified, added
236
236
237
237
238 class kwtemplater(object):
238 class kwtemplater:
239 """
239 """
240 Sets up keyword templates, corresponding keyword regex, and
240 Sets up keyword templates, corresponding keyword regex, and
241 provides keyword substitution functions.
241 provides keyword substitution functions.
@@ -41,7 +41,7 class StoreError(Exception):
41 return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail)
41 return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail)
42
42
43
43
44 class basestore(object):
44 class basestore:
45 def __init__(self, ui, repo, url):
45 def __init__(self, ui, repo, url):
46 self.ui = ui
46 self.ui = ui
47 self.repo = repo
47 self.repo = repo
@@ -756,7 +756,7 def updatestandinsbymatch(repo, match):
756 return match
756 return match
757
757
758
758
759 class automatedcommithook(object):
759 class automatedcommithook:
760 """Stateful hook to update standins at the 1st commit of resuming
760 """Stateful hook to update standins at the 1st commit of resuming
761
761
762 For efficiency, updating standins in the working directory should
762 For efficiency, updating standins in the working directory should
@@ -492,7 +492,7 def overridedebugstate(orig, ui, repo, *
492 large = opts.pop('large', False)
492 large = opts.pop('large', False)
493 if large:
493 if large:
494
494
495 class fakerepo(object):
495 class fakerepo:
496 dirstate = lfutil.openlfdirstate(ui, repo)
496 dirstate = lfutil.openlfdirstate(ui, repo)
497
497
498 orig(ui, fakerepo, *pats, **opts)
498 orig(ui, fakerepo, *pats, **opts)
@@ -108,7 +108,7 class lfsuploadfile(httpconnectionmod.ht
108 return None # progress is handled by the worker client
108 return None # progress is handled by the worker client
109
109
110
110
111 class local(object):
111 class local:
112 """Local blobstore for large file contents.
112 """Local blobstore for large file contents.
113
113
114 This blobstore is used both as a cache and as a staging area for large blobs
114 This blobstore is used both as a cache and as a staging area for large blobs
@@ -306,7 +306,7 class lfsauthhandler(util.urlreq.basehan
306 return None
306 return None
307
307
308
308
309 class _gitlfsremote(object):
309 class _gitlfsremote:
310 def __init__(self, repo, url):
310 def __init__(self, repo, url):
311 ui = repo.ui
311 ui = repo.ui
312 self.ui = ui
312 self.ui = ui
@@ -642,7 +642,7 class _gitlfsremote(object):
642 getattr(h, "close_all", lambda: None)()
642 getattr(h, "close_all", lambda: None)()
643
643
644
644
645 class _dummyremote(object):
645 class _dummyremote:
646 """Dummy store storing blobs to temp directory."""
646 """Dummy store storing blobs to temp directory."""
647
647
648 def __init__(self, repo, url):
648 def __init__(self, repo, url):
@@ -661,7 +661,7 class _dummyremote(object):
661 tostore.download(p.oid(), fp, None)
661 tostore.download(p.oid(), fp, None)
662
662
663
663
664 class _nullremote(object):
664 class _nullremote:
665 """Null store storing blobs to /dev/null."""
665 """Null store storing blobs to /dev/null."""
666
666
667 def __init__(self, repo, url):
667 def __init__(self, repo, url):
@@ -674,7 +674,7 class _nullremote(object):
674 pass
674 pass
675
675
676
676
677 class _promptremote(object):
677 class _promptremote:
678 """Prompt user to set lfs.url when accessed."""
678 """Prompt user to set lfs.url when accessed."""
679
679
680 def __init__(self, repo, url):
680 def __init__(self, repo, url):
@@ -44,7 +44,7 from mercurial.utils import procutil
44 testedwith = b'ships-with-hg-core'
44 testedwith = b'ships-with-hg-core'
45
45
46
46
47 class processlogger(object):
47 class processlogger:
48 """Map log events to external commands
48 """Map log events to external commands
49
49
50 Arguments are passed on as environment variables.
50 Arguments are passed on as environment variables.
@@ -150,7 +150,7 try:
150 except KeyError:
150 except KeyError:
151 # note: load is lazy so we could avoid the try-except,
151 # note: load is lazy so we could avoid the try-except,
152 # but I (marmoute) prefer this explicit code.
152 # but I (marmoute) prefer this explicit code.
153 class dummyui(object):
153 class dummyui:
154 def debug(self, msg):
154 def debug(self, msg):
155 pass
155 pass
156
156
@@ -183,7 +183,7 def checksubstate(repo, baserev=None):
183 normname = util.normpath
183 normname = util.normpath
184
184
185
185
186 class statusentry(object):
186 class statusentry:
187 def __init__(self, node, name):
187 def __init__(self, node, name):
188 self.node, self.name = node, name
188 self.node, self.name = node, name
189
189
@@ -293,7 +293,7 def insertplainheader(lines, header, val
293 return lines
293 return lines
294
294
295
295
296 class patchheader(object):
296 class patchheader:
297 def __init__(self, pf, plainmode=False):
297 def __init__(self, pf, plainmode=False):
298 def eatdiff(lines):
298 def eatdiff(lines):
299 while lines:
299 while lines:
@@ -495,7 +495,7 class AbortNoCleanup(error.Abort):
495 pass
495 pass
496
496
497
497
498 class queue(object):
498 class queue:
499 def __init__(self, ui, baseui, path, patchdir=None):
499 def __init__(self, ui, baseui, path, patchdir=None):
500 self.basepath = path
500 self.basepath = path
501 try:
501 try:
@@ -314,7 +314,7 deftemplates = {
314 }
314 }
315
315
316
316
317 class notifier(object):
317 class notifier:
318 '''email notification class.'''
318 '''email notification class.'''
319
319
320 def __init__(self, ui, repo, hooktype):
320 def __init__(self, ui, repo, hooktype):
@@ -668,7 +668,7 def getdiff(basectx, ctx, diffopts):
668 return output.getvalue()
668 return output.getvalue()
669
669
670
670
671 class DiffChangeType(object):
671 class DiffChangeType:
672 ADD = 1
672 ADD = 1
673 CHANGE = 2
673 CHANGE = 2
674 DELETE = 3
674 DELETE = 3
@@ -679,7 +679,7 class DiffChangeType(object):
679 MULTICOPY = 8
679 MULTICOPY = 8
680
680
681
681
682 class DiffFileType(object):
682 class DiffFileType:
683 TEXT = 1
683 TEXT = 1
684 IMAGE = 2
684 IMAGE = 2
685 BINARY = 3
685 BINARY = 3
@@ -700,7 +700,7 class phabhunk(dict):
700
700
701
701
702 @attr.s
702 @attr.s
703 class phabchange(object):
703 class phabchange:
704 """Represents a Differential change, owns Differential hunks and owned by a
704 """Represents a Differential change, owns Differential hunks and owned by a
705 Differential diff. Each one represents one file in a diff.
705 Differential diff. Each one represents one file in a diff.
706 """
706 """
@@ -741,7 +741,7 class phabchange(object):
741
741
742
742
743 @attr.s
743 @attr.s
744 class phabdiff(object):
744 class phabdiff:
745 """Represents a Differential diff, owns Differential changes. Corresponds
745 """Represents a Differential diff, owns Differential changes. Corresponds
746 to a commit.
746 to a commit.
747 """
747 """
@@ -159,7 +159,7 def _ctxdesc(ctx):
159 )
159 )
160
160
161
161
162 class rebaseruntime(object):
162 class rebaseruntime:
163 """This class is a container for rebase runtime state"""
163 """This class is a container for rebase runtime state"""
164
164
165 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
165 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
@@ -77,7 +77,7 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9
77 BULLET_SECTION = _(b'Other Changes')
77 BULLET_SECTION = _(b'Other Changes')
78
78
79
79
80 class parsedreleasenotes(object):
80 class parsedreleasenotes:
81 def __init__(self):
81 def __init__(self):
82 self.sections = {}
82 self.sections = {}
83
83
@@ -170,7 +170,7 class parsedreleasenotes(object):
170 self.addnontitleditem(section, paragraphs)
170 self.addnontitleditem(section, paragraphs)
171
171
172
172
173 class releasenotessections(object):
173 class releasenotessections:
174 def __init__(self, ui, repo=None):
174 def __init__(self, ui, repo=None):
175 if repo:
175 if repo:
176 sections = util.sortdict(DEFAULT_SECTIONS)
176 sections = util.sortdict(DEFAULT_SECTIONS)
@@ -63,7 +63,7 else:
63 PACKOPENMODE = b'rb'
63 PACKOPENMODE = b'rb'
64
64
65
65
66 class _cachebackedpacks(object):
66 class _cachebackedpacks:
67 def __init__(self, packs, cachesize):
67 def __init__(self, packs, cachesize):
68 self._packs = set(packs)
68 self._packs = set(packs)
69 self._lrucache = util.lrucachedict(cachesize)
69 self._lrucache = util.lrucachedict(cachesize)
@@ -109,7 +109,7 class _cachebackedpacks(object):
109 self._lastpack = None
109 self._lastpack = None
110
110
111
111
112 class basepackstore(object):
112 class basepackstore:
113 # Default cache size limit for the pack files.
113 # Default cache size limit for the pack files.
114 DEFAULTCACHESIZE = 100
114 DEFAULTCACHESIZE = 100
115
115
@@ -267,7 +267,7 class basepackstore(object):
267 return newpacks
267 return newpacks
268
268
269
269
270 class versionmixin(object):
270 class versionmixin:
271 # Mix-in for classes with multiple supported versions
271 # Mix-in for classes with multiple supported versions
272 VERSION = None
272 VERSION = None
273 SUPPORTED_VERSIONS = [2]
273 SUPPORTED_VERSIONS = [2]
@@ -526,7 +526,7 class mutablebasepack(versionmixin):
526 self.idxfp.write(struct.pack(b'!BB', self.VERSION, config))
526 self.idxfp.write(struct.pack(b'!BB', self.VERSION, config))
527
527
528
528
529 class indexparams(object):
529 class indexparams:
530 __slots__ = (
530 __slots__ = (
531 'fanoutprefix',
531 'fanoutprefix',
532 'fanoutstruct',
532 'fanoutstruct',
@@ -19,7 +19,7 from . import (
19 )
19 )
20
20
21
21
22 class basestore(object):
22 class basestore:
23 def __init__(self, repo, path, reponame, shared=False):
23 def __init__(self, repo, path, reponame, shared=False):
24 """Creates a remotefilelog store object for the given repo name.
24 """Creates a remotefilelog store object for the given repo name.
25
25
@@ -412,7 +412,7 class basestore(object):
412 )
412 )
413
413
414
414
415 class baseunionstore(object):
415 class baseunionstore:
416 def __init__(self, *args, **kwargs):
416 def __init__(self, *args, **kwargs):
417 # If one of the functions that iterates all of the stores is about to
417 # If one of the functions that iterates all of the stores is about to
418 # throw a KeyError, try this many times with a full refresh between
418 # throw a KeyError, try this many times with a full refresh between
@@ -15,7 +15,7 from mercurial import (
15 _sshv1peer = sshpeer.sshv1peer
15 _sshv1peer = sshpeer.sshv1peer
16
16
17
17
18 class connectionpool(object):
18 class connectionpool:
19 def __init__(self, repo):
19 def __init__(self, repo):
20 self._repo = repo
20 self._repo = repo
21 self._pool = dict()
21 self._pool = dict()
@@ -65,7 +65,7 class connectionpool(object):
65 del pathpool[:]
65 del pathpool[:]
66
66
67
67
68 class connection(object):
68 class connection:
69 def __init__(self, pool, peer):
69 def __init__(self, pool, peer):
70 self._pool = pool
70 self._pool = pool
71 self.peer = peer
71 self.peer = peer
@@ -17,7 +17,7 from . import (
17 )
17 )
18
18
19
19
20 class ChainIndicies(object):
20 class ChainIndicies:
21 """A static class for easy reference to the delta chain indicies."""
21 """A static class for easy reference to the delta chain indicies."""
22
22
23 # The filename of this revision delta
23 # The filename of this revision delta
@@ -229,7 +229,7 class remotefilelogcontentstore(basestor
229 self._threaddata.metacache = (node, meta)
229 self._threaddata.metacache = (node, meta)
230
230
231
231
232 class remotecontentstore(object):
232 class remotecontentstore:
233 def __init__(self, ui, fileservice, shared):
233 def __init__(self, ui, fileservice, shared):
234 self._fileservice = fileservice
234 self._fileservice = fileservice
235 # type(shared) is usually remotefilelogcontentstore
235 # type(shared) is usually remotefilelogcontentstore
@@ -274,7 +274,7 class remotecontentstore(object):
274 pass
274 pass
275
275
276
276
277 class manifestrevlogstore(object):
277 class manifestrevlogstore:
278 def __init__(self, repo):
278 def __init__(self, repo):
279 self._store = repo.store
279 self._store = repo.store
280 self._svfs = repo.svfs
280 self._svfs = repo.svfs
@@ -81,7 +81,7 def buildtemprevlog(repo, file):
81 os.remove(temppath)
81 os.remove(temppath)
82 r = filelog.filelog(repo.svfs, b'temprevlog')
82 r = filelog.filelog(repo.svfs, b'temprevlog')
83
83
84 class faket(object):
84 class faket:
85 def add(self, a, b, c):
85 def add(self, a, b, c):
86 pass
86 pass
87
87
@@ -139,7 +139,7 def peersetup(ui, peer):
139 peer.__class__ = remotefilepeer
139 peer.__class__ = remotefilepeer
140
140
141
141
142 class cacheconnection(object):
142 class cacheconnection:
143 """The connection for communicating with the remote cache. Performs
143 """The connection for communicating with the remote cache. Performs
144 gets and sets by communicating with an external process that has the
144 gets and sets by communicating with an external process that has the
145 cache-specific implementation.
145 cache-specific implementation.
@@ -302,7 +302,7 def _getfiles_threaded(
302 pipeo.flush()
302 pipeo.flush()
303
303
304
304
305 class fileserverclient(object):
305 class fileserverclient:
306 """A client for requesting files from the remote file server."""
306 """A client for requesting files from the remote file server."""
307
307
308 def __init__(self, repo):
308 def __init__(self, repo):
@@ -517,7 +517,7 class fileserverclient(object):
517 # returns cache misses. This enables tests to run easily
517 # returns cache misses. This enables tests to run easily
518 # and may eventually allow us to be a drop in replacement
518 # and may eventually allow us to be a drop in replacement
519 # for the largefiles extension.
519 # for the largefiles extension.
520 class simplecache(object):
520 class simplecache:
521 def __init__(self):
521 def __init__(self):
522 self.missingids = []
522 self.missingids = []
523 self.connected = True
523 self.connected = True
@@ -141,7 +141,7 class remotefilelogmetadatastore(basesto
141 )
141 )
142
142
143
143
144 class remotemetadatastore(object):
144 class remotemetadatastore:
145 def __init__(self, ui, fileservice, shared):
145 def __init__(self, ui, fileservice, shared):
146 self._fileservice = fileservice
146 self._fileservice = fileservice
147 self._shared = shared
147 self._shared = shared
@@ -27,7 +27,7 from . import (
27 )
27 )
28
28
29
29
30 class remotefilelognodemap(object):
30 class remotefilelognodemap:
31 def __init__(self, filename, store):
31 def __init__(self, filename, store):
32 self._filename = filename
32 self._filename = filename
33 self._store = store
33 self._store = store
@@ -42,7 +42,7 class remotefilelognodemap(object):
42 return node
42 return node
43
43
44
44
45 class remotefilelog(object):
45 class remotefilelog:
46
46
47 _generaldelta = True
47 _generaldelta = True
48 _flagserrorclass = error.RevlogError
48 _flagserrorclass = error.RevlogError
@@ -93,7 +93,7 def onetimesetup(ui):
93 b'x_rfl_getfile', b'file node', permission=b'pull'
93 b'x_rfl_getfile', b'file node', permission=b'pull'
94 )(getfile)
94 )(getfile)
95
95
96 class streamstate(object):
96 class streamstate:
97 match = None
97 match = None
98 shallowremote = False
98 shallowremote = False
99 noflatmf = False
99 noflatmf = False
@@ -498,7 +498,7 def keepset(repo, keyfn, lastkeepkeys=No
498 return keepkeys
498 return keepkeys
499
499
500
500
501 class repacker(object):
501 class repacker:
502 """Class for orchestrating the repack of data and history information into a
502 """Class for orchestrating the repack of data and history information into a
503 new format.
503 new format.
504 """
504 """
@@ -819,7 +819,7 class repacker(object):
819 return sortednodes
819 return sortednodes
820
820
821
821
822 class repackledger(object):
822 class repackledger:
823 """Storage for all the bookkeeping that happens during a repack. It contains
823 """Storage for all the bookkeeping that happens during a repack. It contains
824 the list of revisions being repacked, what happened to each revision, and
824 the list of revisions being repacked, what happened to each revision, and
825 which source store contained which revision originally (for later cleanup).
825 which source store contained which revision originally (for later cleanup).
@@ -867,7 +867,7 class repackledger(object):
867 self.created.add(value)
867 self.created.add(value)
868
868
869
869
870 class repackentry(object):
870 class repackentry:
871 """Simple class representing a single revision entry in the repackledger."""
871 """Simple class representing a single revision entry in the repackledger."""
872
872
873 __slots__ = (
873 __slots__ = (
@@ -176,7 +176,7 class lazyremotenamedict(mutablemapping)
176 items = iteritems
176 items = iteritems
177
177
178
178
179 class remotenames(object):
179 class remotenames:
180 """
180 """
181 This class encapsulates all the remotenames state. It also contains
181 This class encapsulates all the remotenames state. It also contains
182 methods to access that state in convenient ways. Remotenames are lazy
182 methods to access that state in convenient ways. Remotenames are lazy
@@ -67,7 +67,7 testedwith = b'ships-with-hg-core'
67 _partre = re.compile(br'{(\d+)\}')
67 _partre = re.compile(br'{(\d+)\}')
68
68
69
69
70 class ShortRepository(object):
70 class ShortRepository:
71 def __init__(self, url, scheme, templater):
71 def __init__(self, url, scheme, templater):
72 self.scheme = scheme
72 self.scheme = scheme
73 self.templater = templater
73 self.templater = templater
@@ -264,7 +264,7 class SQLiteStoreError(error.StorageErro
264
264
265
265
266 @attr.s
266 @attr.s
267 class revisionentry(object):
267 class revisionentry:
268 rid = attr.ib()
268 rid = attr.ib()
269 rev = attr.ib()
269 rev = attr.ib()
270 node = attr.ib()
270 node = attr.ib()
@@ -278,7 +278,7 class revisionentry(object):
278
278
279 @interfaceutil.implementer(repository.irevisiondelta)
279 @interfaceutil.implementer(repository.irevisiondelta)
280 @attr.s(slots=True)
280 @attr.s(slots=True)
281 class sqliterevisiondelta(object):
281 class sqliterevisiondelta:
282 node = attr.ib()
282 node = attr.ib()
283 p1node = attr.ib()
283 p1node = attr.ib()
284 p2node = attr.ib()
284 p2node = attr.ib()
@@ -294,14 +294,14 class sqliterevisiondelta(object):
294
294
295 @interfaceutil.implementer(repository.iverifyproblem)
295 @interfaceutil.implementer(repository.iverifyproblem)
296 @attr.s(frozen=True)
296 @attr.s(frozen=True)
297 class sqliteproblem(object):
297 class sqliteproblem:
298 warning = attr.ib(default=None)
298 warning = attr.ib(default=None)
299 error = attr.ib(default=None)
299 error = attr.ib(default=None)
300 node = attr.ib(default=None)
300 node = attr.ib(default=None)
301
301
302
302
303 @interfaceutil.implementer(repository.ifilestorage)
303 @interfaceutil.implementer(repository.ifilestorage)
304 class sqlitefilestore(object):
304 class sqlitefilestore:
305 """Implements storage for an individual tracked path."""
305 """Implements storage for an individual tracked path."""
306
306
307 def __init__(self, db, path, compression):
307 def __init__(self, db, path, compression):
@@ -1249,7 +1249,7 def newreporequirements(orig, ui, create
1249
1249
1250
1250
1251 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1251 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1252 class sqlitefilestorage(object):
1252 class sqlitefilestorage:
1253 """Repository file storage backed by SQLite."""
1253 """Repository file storage backed by SQLite."""
1254
1254
1255 def file(self, path):
1255 def file(self, path):
@@ -75,13 +75,13 configitem(
75 )
75 )
76
76
77
77
78 class transplantentry(object):
78 class transplantentry:
79 def __init__(self, lnode, rnode):
79 def __init__(self, lnode, rnode):
80 self.lnode = lnode
80 self.lnode = lnode
81 self.rnode = rnode
81 self.rnode = rnode
82
82
83
83
84 class transplants(object):
84 class transplants:
85 def __init__(self, path=None, transplantfile=None, opener=None):
85 def __init__(self, path=None, transplantfile=None, opener=None):
86 self.path = path
86 self.path = path
87 self.transplantfile = transplantfile
87 self.transplantfile = transplantfile
@@ -128,7 +128,7 class transplants(object):
128 self.dirty = True
128 self.dirty = True
129
129
130
130
131 class transplanter(object):
131 class transplanter:
132 def __init__(self, ui, repo, opts):
132 def __init__(self, ui, repo, opts):
133 self.ui = ui
133 self.ui = ui
134 self.repo = repo
134 self.repo = repo
@@ -231,7 +231,7 class BadDomainNameCircular(BadDomainNam
231 # implementation classes
231 # implementation classes
232
232
233
233
234 class DNSEntry(object):
234 class DNSEntry:
235 """A DNS entry"""
235 """A DNS entry"""
236
236
237 def __init__(self, name, type, clazz):
237 def __init__(self, name, type, clazz):
@@ -506,7 +506,7 class DNSService(DNSRecord):
506 return self.toString(b"%s:%s" % (self.server, self.port))
506 return self.toString(b"%s:%s" % (self.server, self.port))
507
507
508
508
509 class DNSIncoming(object):
509 class DNSIncoming:
510 """Object representation of an incoming DNS packet"""
510 """Object representation of an incoming DNS packet"""
511
511
512 def __init__(self, data):
512 def __init__(self, data):
@@ -702,7 +702,7 class DNSIncoming(object):
702 return result
702 return result
703
703
704
704
705 class DNSOutgoing(object):
705 class DNSOutgoing:
706 """Object representation of an outgoing packet"""
706 """Object representation of an outgoing packet"""
707
707
708 def __init__(self, flags, multicast=1):
708 def __init__(self, flags, multicast=1):
@@ -864,7 +864,7 class DNSOutgoing(object):
864 return b''.join(self.data)
864 return b''.join(self.data)
865
865
866
866
867 class DNSCache(object):
867 class DNSCache:
868 """A cache of DNS entries"""
868 """A cache of DNS entries"""
869
869
870 def __init__(self):
870 def __init__(self):
@@ -982,7 +982,7 class Engine(threading.Thread):
982 self.condition.release()
982 self.condition.release()
983
983
984
984
985 class Listener(object):
985 class Listener:
986 """A Listener is used by this module to listen on the multicast
986 """A Listener is used by this module to listen on the multicast
987 group to which DNS messages are sent, allowing the implementation
987 group to which DNS messages are sent, allowing the implementation
988 to cache information as it arrives.
988 to cache information as it arrives.
@@ -1127,7 +1127,7 class ServiceBrowser(threading.Thread):
1127 event(self.zeroconf)
1127 event(self.zeroconf)
1128
1128
1129
1129
1130 class ServiceInfo(object):
1130 class ServiceInfo:
1131 """Service information"""
1131 """Service information"""
1132
1132
1133 def __init__(
1133 def __init__(
@@ -1386,7 +1386,7 class ServiceInfo(object):
1386 return result
1386 return result
1387
1387
1388
1388
1389 class Zeroconf(object):
1389 class Zeroconf:
1390 """Implementation of Zeroconf Multicast DNS Service Discovery
1390 """Implementation of Zeroconf Multicast DNS Service Discovery
1391
1391
1392 Supports registration, unregistration, queries and browsing.
1392 Supports registration, unregistration, queries and browsing.
@@ -158,7 +158,7 def zc_create_server(create_server, ui,
158 # listen
158 # listen
159
159
160
160
161 class listener(object):
161 class listener:
162 def __init__(self):
162 def __init__(self):
163 self.found = {}
163 self.found = {}
164
164
@@ -42,7 +42,7 try:
42 except ImportError:
42 except ImportError:
43 # replacement of io.open() for python < 2.6
43 # replacement of io.open() for python < 2.6
44 # we use codecs instead
44 # we use codecs instead
45 class io(object):
45 class io:
46 @staticmethod
46 @staticmethod
47 def open(fpath, mode='r', encoding=None):
47 def open(fpath, mode='r', encoding=None):
48 return codecs.open(fpath, mode, encoding)
48 return codecs.open(fpath, mode, encoding)
@@ -816,7 +816,7 class MOFile(_BaseFile):
816 # class _BaseEntry {{{
816 # class _BaseEntry {{{
817
817
818
818
819 class _BaseEntry(object):
819 class _BaseEntry:
820 """
820 """
821 Base class for :class:`~polib.POEntry` and :class:`~polib.MOEntry` classes.
821 Base class for :class:`~polib.POEntry` and :class:`~polib.MOEntry` classes.
822 This class should **not** be instanciated directly.
822 This class should **not** be instanciated directly.
@@ -1227,7 +1227,7 class MOEntry(_BaseEntry):
1227 # class _POFileParser {{{
1227 # class _POFileParser {{{
1228
1228
1229
1229
1230 class _POFileParser(object):
1230 class _POFileParser:
1231 """
1231 """
1232 A finite state machine to parse efficiently and correctly po
1232 A finite state machine to parse efficiently and correctly po
1233 file format.
1233 file format.
@@ -1706,7 +1706,7 class _POFileParser(object):
1706 # class _MOFileParser {{{
1706 # class _MOFileParser {{{
1707
1707
1708
1708
1709 class _MOFileParser(object):
1709 class _MOFileParser:
1710 """
1710 """
1711 A class to parse binary mo files.
1711 A class to parse binary mo files.
1712 """
1712 """
@@ -146,7 +146,7 def ancestors(pfunc, *orignodes):
146 return deepest(gca)
146 return deepest(gca)
147
147
148
148
149 class incrementalmissingancestors(object):
149 class incrementalmissingancestors:
150 """persistent state used to calculate missing ancestors incrementally
150 """persistent state used to calculate missing ancestors incrementally
151
151
152 Although similar in spirit to lazyancestors below, this is a separate class
152 Although similar in spirit to lazyancestors below, this is a separate class
@@ -316,7 +316,7 def _lazyancestorsiter(parentrevs, initr
316 see(p2)
316 see(p2)
317
317
318
318
319 class lazyancestors(object):
319 class lazyancestors:
320 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
320 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
321 """Create a new object generating ancestors for the given revs. Does
321 """Create a new object generating ancestors for the given revs. Does
322 not generate revs lower than stoprev.
322 not generate revs lower than stoprev.
@@ -132,7 +132,7 def buildmetadata(ctx):
132 return out.getvalue()
132 return out.getvalue()
133
133
134
134
135 class tarit(object):
135 class tarit:
136 """write archive to tar file or stream. can write uncompressed,
136 """write archive to tar file or stream. can write uncompressed,
137 or compress with gzip or bzip2."""
137 or compress with gzip or bzip2."""
138
138
@@ -193,7 +193,7 class tarit(object):
193 self.fileobj.close()
193 self.fileobj.close()
194
194
195
195
196 class zipit(object):
196 class zipit:
197 """write archive to zip file or stream. can write uncompressed,
197 """write archive to zip file or stream. can write uncompressed,
198 or compressed with deflate."""
198 or compressed with deflate."""
199
199
@@ -240,7 +240,7 class zipit(object):
240 self.z.close()
240 self.z.close()
241
241
242
242
243 class fileit(object):
243 class fileit:
244 '''write archive as files in directory.'''
244 '''write archive as files in directory.'''
245
245
246 def __init__(self, name, mtime):
246 def __init__(self, name, mtime):
@@ -58,7 +58,7 def _getbkfile(repo):
58 return fp
58 return fp
59
59
60
60
61 class bmstore(object):
61 class bmstore:
62 r"""Storage for bookmarks.
62 r"""Storage for bookmarks.
63
63
64 This object should do all bookmark-related reads and writes, so
64 This object should do all bookmark-related reads and writes, so
@@ -62,7 +62,7 pack_into = struct.pack_into
62 unpack_from = struct.unpack_from
62 unpack_from = struct.unpack_from
63
63
64
64
65 class BranchMapCache(object):
65 class BranchMapCache:
66 """mapping of filtered views of repo with their branchcache"""
66 """mapping of filtered views of repo with their branchcache"""
67
67
68 def __init__(self):
68 def __init__(self):
@@ -169,7 +169,7 def _branchcachedesc(repo):
169 return b'branch cache'
169 return b'branch cache'
170
170
171
171
172 class branchcache(object):
172 class branchcache:
173 """A dict like object that hold branches heads cache.
173 """A dict like object that hold branches heads cache.
174
174
175 This cache is used to avoid costly computations to determine all the
175 This cache is used to avoid costly computations to determine all the
@@ -631,7 +631,7 class remotebranchcache(branchcache):
631 _rbccloseflag = 0x80000000
631 _rbccloseflag = 0x80000000
632
632
633
633
634 class revbranchcache(object):
634 class revbranchcache:
635 """Persistent cache, mapping from revision number to branch name and close.
635 """Persistent cache, mapping from revision number to branch name and close.
636 This is a low level cache, independent of filtering.
636 This is a low level cache, independent of filtering.
637
637
@@ -251,7 +251,7 def parthandler(parttype, params=()):
251 return _decorator
251 return _decorator
252
252
253
253
254 class unbundlerecords(object):
254 class unbundlerecords:
255 """keep record of what happens during and unbundle
255 """keep record of what happens during and unbundle
256
256
257 New records are added using `records.add('cat', obj)`. Where 'cat' is a
257 New records are added using `records.add('cat', obj)`. Where 'cat' is a
@@ -299,7 +299,7 class unbundlerecords(object):
299 __bool__ = __nonzero__
299 __bool__ = __nonzero__
300
300
301
301
302 class bundleoperation(object):
302 class bundleoperation:
303 """an object that represents a single bundling process
303 """an object that represents a single bundling process
304
304
305 Its purpose is to carry unbundle-related objects and states.
305 Its purpose is to carry unbundle-related objects and states.
@@ -379,7 +379,7 def applybundle(repo, unbundler, tr, sou
379 return op
379 return op
380
380
381
381
382 class partiterator(object):
382 class partiterator:
383 def __init__(self, repo, op, unbundler):
383 def __init__(self, repo, op, unbundler):
384 self.repo = repo
384 self.repo = repo
385 self.op = op
385 self.op = op
@@ -626,7 +626,7 bundletypes = {
626 bundlepriority = [b'HG10GZ', b'HG10BZ', b'HG10UN']
626 bundlepriority = [b'HG10GZ', b'HG10BZ', b'HG10UN']
627
627
628
628
629 class bundle20(object):
629 class bundle20:
630 """represent an outgoing bundle2 container
630 """represent an outgoing bundle2 container
631
631
632 Use the `addparam` method to add stream level parameter. and `newpart` to
632 Use the `addparam` method to add stream level parameter. and `newpart` to
@@ -750,7 +750,7 class bundle20(object):
750 return salvaged
750 return salvaged
751
751
752
752
753 class unpackermixin(object):
753 class unpackermixin:
754 """A mixin to extract bytes and struct data from a stream"""
754 """A mixin to extract bytes and struct data from a stream"""
755
755
756 def __init__(self, fp):
756 def __init__(self, fp):
@@ -983,7 +983,7 def processcompression(unbundler, param,
983 unbundler._compressed = True
983 unbundler._compressed = True
984
984
985
985
986 class bundlepart(object):
986 class bundlepart:
987 """A bundle2 part contains application level payload
987 """A bundle2 part contains application level payload
988
988
989 The part `type` is used to route the part to the application level
989 The part `type` is used to route the part to the application level
@@ -1273,7 +1273,7 class interrupthandler(unpackermixin):
1273 )
1273 )
1274
1274
1275
1275
1276 class interruptoperation(object):
1276 class interruptoperation:
1277 """A limited operation to be use by part handler during interruption
1277 """A limited operation to be use by part handler during interruption
1278
1278
1279 It only have access to an ui object.
1279 It only have access to an ui object.
@@ -21,7 +21,7 CB_MANIFEST_FILE = b'clonebundles.manife
21
21
22
22
23 @attr.s
23 @attr.s
24 class bundlespec(object):
24 class bundlespec:
25 compression = attr.ib()
25 compression = attr.ib()
26 wirecompression = attr.ib()
26 wirecompression = attr.ib()
27 version = attr.ib()
27 version = attr.ib()
@@ -343,7 +343,7 def filterclonebundleentries(repo, entri
343 return newentries
343 return newentries
344
344
345
345
346 class clonebundleentry(object):
346 class clonebundleentry:
347 """Represents an item in a clone bundles manifest.
347 """Represents an item in a clone bundles manifest.
348
348
349 This rich class is needed to support sorting since sorted() in Python 3
349 This rich class is needed to support sorting since sorted() in Python 3
@@ -270,7 +270,7 def _getfilestarts(cgunpacker):
270 return filespos
270 return filespos
271
271
272
272
273 class bundlerepository(object):
273 class bundlerepository:
274 """A repository instance that is a union of a local repo and a bundle.
274 """A repository instance that is a union of a local repo and a bundle.
275
275
276 Instances represent a read-only repository composed of a local repository
276 Instances represent a read-only repository composed of a local repository
@@ -550,7 +550,7 def makebundlerepository(ui, repopath, b
550 return repo
550 return repo
551
551
552
552
553 class bundletransactionmanager(object):
553 class bundletransactionmanager:
554 def transaction(self):
554 def transaction(self):
555 return None
555 return None
556
556
@@ -33,7 +33,7 if pycompat.isdarwin:
33 attrkinds[lib.VFIFO] = statmod.S_IFIFO
33 attrkinds[lib.VFIFO] = statmod.S_IFIFO
34 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
34 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
35
35
36 class stat_res(object):
36 class stat_res:
37 def __init__(self, st_mode, st_mtime, st_size):
37 def __init__(self, st_mode, st_mtime, st_size):
38 self.st_mode = st_mode
38 self.st_mode = st_mode
39 self.st_mtime = st_mtime
39 self.st_mtime = st_mtime
@@ -105,7 +105,7 def writechunks(ui, chunks, filename, vf
105 os.unlink(cleanup)
105 os.unlink(cleanup)
106
106
107
107
108 class cg1unpacker(object):
108 class cg1unpacker:
109 """Unpacker for cg1 changegroup streams.
109 """Unpacker for cg1 changegroup streams.
110
110
111 A changegroup unpacker handles the framing of the revision data in
111 A changegroup unpacker handles the framing of the revision data in
@@ -691,7 +691,7 class cg4unpacker(cg3unpacker):
691 )
691 )
692
692
693
693
694 class headerlessfixup(object):
694 class headerlessfixup:
695 def __init__(self, fh, h):
695 def __init__(self, fh, h):
696 self._h = h
696 self._h = h
697 self._fh = fh
697 self._fh = fh
@@ -1003,7 +1003,7 def deltagroup(
1003 progress.complete()
1003 progress.complete()
1004
1004
1005
1005
1006 class cgpacker(object):
1006 class cgpacker:
1007 def __init__(
1007 def __init__(
1008 self,
1008 self,
1009 repo,
1009 repo,
@@ -91,7 +91,7 def stripdesc(desc):
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
92
92
93
93
94 class appender(object):
94 class appender:
95 """the changelog index must be updated last on disk, so we use this class
95 """the changelog index must be updated last on disk, so we use this class
96 to delay writes to it"""
96 to delay writes to it"""
97
97
@@ -161,7 +161,7 class appender(object):
161 return self.fp.__exit__(*args)
161 return self.fp.__exit__(*args)
162
162
163
163
164 class _divertopener(object):
164 class _divertopener:
165 def __init__(self, opener, target):
165 def __init__(self, opener, target):
166 self._opener = opener
166 self._opener = opener
167 self._target = target
167 self._target = target
@@ -188,7 +188,7 def _delayopener(opener, target, buf):
188
188
189
189
190 @attr.s
190 @attr.s
191 class _changelogrevision(object):
191 class _changelogrevision:
192 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
193 # it in
193 # it in
194 extra = attr.ib()
194 extra = attr.ib()
@@ -204,7 +204,7 class _changelogrevision(object):
204 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
204 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
205
205
206
206
207 class changelogrevision(object):
207 class changelogrevision:
208 """Holds results of a parsed changelog revision.
208 """Holds results of a parsed changelog revision.
209
209
210 Changelog revisions consist of multiple pieces of data, including
210 Changelog revisions consist of multiple pieces of data, including
@@ -196,7 +196,7 def _mtimehash(paths):
196 return _hashlist(pycompat.maplist(trystat, paths))[:12]
196 return _hashlist(pycompat.maplist(trystat, paths))[:12]
197
197
198
198
199 class hashstate(object):
199 class hashstate:
200 """a structure storing confighash, mtimehash, paths used for mtimehash"""
200 """a structure storing confighash, mtimehash, paths used for mtimehash"""
201
201
202 def __init__(self, confighash, mtimehash, mtimepaths):
202 def __init__(self, confighash, mtimehash, mtimepaths):
@@ -292,7 +292,7 def _loadnewui(srcui, args, cdebug):
292 return (newui, newlui)
292 return (newui, newlui)
293
293
294
294
295 class channeledsystem(object):
295 class channeledsystem:
296 """Propagate ui.system() request in the following format:
296 """Propagate ui.system() request in the following format:
297
297
298 payload length (unsigned int),
298 payload length (unsigned int),
@@ -623,7 +623,7 def _hashaddress(address, hashstr):
623 return b'%s-%s' % (os.path.join(dirname, basename), hashstr)
623 return b'%s-%s' % (os.path.join(dirname, basename), hashstr)
624
624
625
625
626 class chgunixservicehandler(object):
626 class chgunixservicehandler:
627 """Set of operations for chg services"""
627 """Set of operations for chg services"""
628
628
629 pollinterval = 1 # [sec]
629 pollinterval = 1 # [sec]
@@ -666,7 +666,7 def dorecord(
666 return commit(ui, repo, recordinwlock, pats, opts)
666 return commit(ui, repo, recordinwlock, pats, opts)
667
667
668
668
669 class dirnode(object):
669 class dirnode:
670 """
670 """
671 Represent a directory in user working copy with information required for
671 Represent a directory in user working copy with information required for
672 the purpose of tersing its status.
672 the purpose of tersing its status.
@@ -832,7 +832,7 def _commentlines(raw):
832
832
833
833
834 @attr.s(frozen=True)
834 @attr.s(frozen=True)
835 class morestatus(object):
835 class morestatus:
836 reporoot = attr.ib()
836 reporoot = attr.ib()
837 unfinishedop = attr.ib()
837 unfinishedop = attr.ib()
838 unfinishedmsg = attr.ib()
838 unfinishedmsg = attr.ib()
@@ -1343,7 +1343,7 def isstdiofilename(pat):
1343 return not pat or pat == b'-'
1343 return not pat or pat == b'-'
1344
1344
1345
1345
1346 class _unclosablefile(object):
1346 class _unclosablefile:
1347 def __init__(self, fp):
1347 def __init__(self, fp):
1348 self._fp = fp
1348 self._fp = fp
1349
1349
@@ -39,7 +39,7 from .utils import (
39 )
39 )
40
40
41
41
42 class channeledoutput(object):
42 class channeledoutput:
43 """
43 """
44 Write data to out in the following format:
44 Write data to out in the following format:
45
45
@@ -68,7 +68,7 class channeledoutput(object):
68 return getattr(self.out, attr)
68 return getattr(self.out, attr)
69
69
70
70
71 class channeledmessage(object):
71 class channeledmessage:
72 """
72 """
73 Write encoded message and metadata to out in the following format:
73 Write encoded message and metadata to out in the following format:
74
74
@@ -97,7 +97,7 class channeledmessage(object):
97 return getattr(self._cout, attr)
97 return getattr(self._cout, attr)
98
98
99
99
100 class channeledinput(object):
100 class channeledinput:
101 """
101 """
102 Read data from in_.
102 Read data from in_.
103
103
@@ -200,7 +200,7 def _selectmessageencoder(ui):
200 )
200 )
201
201
202
202
203 class server(object):
203 class server:
204 """
204 """
205 Listens for commands on fin, runs them and writes the output on a channel
205 Listens for commands on fin, runs them and writes the output on a channel
206 based stream to fout.
206 based stream to fout.
@@ -450,7 +450,7 def setuplogging(ui, repo=None, fp=None)
450 u.setlogger(b'cmdserver', logger)
450 u.setlogger(b'cmdserver', logger)
451
451
452
452
453 class pipeservice(object):
453 class pipeservice:
454 def __init__(self, ui, repo, opts):
454 def __init__(self, ui, repo, opts):
455 self.ui = ui
455 self.ui = ui
456 self.repo = repo
456 self.repo = repo
@@ -525,7 +525,7 def _serverequest(ui, repo, conn, create
525 raise
525 raise
526
526
527
527
528 class unixservicehandler(object):
528 class unixservicehandler:
529 """Set of pluggable operations for unix-mode services
529 """Set of pluggable operations for unix-mode services
530
530
531 Almost all methods except for createcmdserver() are called in the main
531 Almost all methods except for createcmdserver() are called in the main
@@ -559,7 +559,7 class unixservicehandler(object):
559 return server(self.ui, repo, fin, fout, prereposetups)
559 return server(self.ui, repo, fin, fout, prereposetups)
560
560
561
561
562 class unixforkingservice(object):
562 class unixforkingservice:
563 """
563 """
564 Listens on unix domain socket and forks server per connection
564 Listens on unix domain socket and forks server per connection
565 """
565 """
@@ -18,7 +18,7 from . import (
18 )
18 )
19
19
20
20
21 class config(object):
21 class config:
22 def __init__(self, data=None):
22 def __init__(self, data=None):
23 self._current_source_level = 0
23 self._current_source_level = 0
24 self._data = {}
24 self._data = {}
@@ -29,7 +29,7 def loadconfigtable(ui, extname, configt
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31
31
32 class configitem(object):
32 class configitem:
33 """represent a known config item
33 """represent a known config item
34
34
35 :section: the official config section where to find this item,
35 :section: the official config section where to find this item,
@@ -51,7 +51,7 from .dirstateutils import (
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53
53
54 class basectx(object):
54 class basectx:
55 """A basectx object represents the common logic for its children:
55 """A basectx object represents the common logic for its children:
56 changectx: read-only context that is already present in the repo,
56 changectx: read-only context that is already present in the repo,
57 workingctx: a context that represents the working directory and can
57 workingctx: a context that represents the working directory and can
@@ -796,7 +796,7 class changectx(basectx):
796 return self.walk(match)
796 return self.walk(match)
797
797
798
798
799 class basefilectx(object):
799 class basefilectx:
800 """A filecontext object represents the common logic for its children:
800 """A filecontext object represents the common logic for its children:
801 filectx: read-only access to a filerevision that is already present
801 filectx: read-only access to a filerevision that is already present
802 in the repo,
802 in the repo,
@@ -3104,7 +3104,7 class metadataonlyctx(committablectx):
3104 return scmutil.status(modified, added, removed, [], [], [], [])
3104 return scmutil.status(modified, added, removed, [], [], [], [])
3105
3105
3106
3106
3107 class arbitraryfilectx(object):
3107 class arbitraryfilectx:
3108 """Allows you to use filectx-like functions on a file in an arbitrary
3108 """Allows you to use filectx-like functions on a file in an arbitrary
3109 location on disk, possibly not in the working directory.
3109 location on disk, possibly not in the working directory.
3110 """
3110 """
@@ -887,7 +887,7 def _checksinglesidecopies(
887 copy[dst] = src
887 copy[dst] = src
888
888
889
889
890 class branch_copies(object):
890 class branch_copies:
891 """Information about copies made on one side of a merge/graft.
891 """Information about copies made on one side of a merge/graft.
892
892
893 "copy" is a mapping from destination name -> source name,
893 "copy" is a mapping from destination name -> source name,
@@ -82,7 +82,7 def checkcurses(ui):
82 return curses and ui.interface(b"chunkselector") == b"curses"
82 return curses and ui.interface(b"chunkselector") == b"curses"
83
83
84
84
85 class patchnode(object):
85 class patchnode:
86 """abstract class for patch graph nodes
86 """abstract class for patch graph nodes
87 (i.e. patchroot, header, hunk, hunkline)
87 (i.e. patchroot, header, hunk, hunkline)
88 """
88 """
@@ -601,7 +601,7 def testchunkselector(testfn, ui, header
601 """
601 """
602 chunkselector = curseschunkselector(headerlist, ui, operation)
602 chunkselector = curseschunkselector(headerlist, ui, operation)
603
603
604 class dummystdscr(object):
604 class dummystdscr:
605 def clear(self):
605 def clear(self):
606 pass
606 pass
607
607
@@ -628,7 +628,7 def testchunkselector(testfn, ui, header
628 }
628 }
629
629
630
630
631 class curseschunkselector(object):
631 class curseschunkselector:
632 def __init__(self, headerlist, ui, operation=None):
632 def __init__(self, headerlist, ui, operation=None):
633 # put the headers into a patch object
633 # put the headers into a patch object
634 self.headerlist = patch(headerlist)
634 self.headerlist = patch(headerlist)
@@ -271,7 +271,7 def descendantrevs(revs, revsfn, parentr
271 break
271 break
272
272
273
273
274 class subsetparentswalker(object):
274 class subsetparentswalker:
275 r"""Scan adjacent ancestors in the graph given by the subset
275 r"""Scan adjacent ancestors in the graph given by the subset
276
276
277 This computes parent-child relations in the sub graph filtered by
277 This computes parent-child relations in the sub graph filtered by
@@ -647,7 +647,7 def blockdescendants(fctx, fromline, tol
647
647
648
648
649 @attr.s(slots=True, frozen=True)
649 @attr.s(slots=True, frozen=True)
650 class annotateline(object):
650 class annotateline:
651 fctx = attr.ib()
651 fctx = attr.ib()
652 lineno = attr.ib()
652 lineno = attr.ib()
653 # Whether this annotation was the result of a skip-annotate.
653 # Whether this annotation was the result of a skip-annotate.
@@ -656,7 +656,7 class annotateline(object):
656
656
657
657
658 @attr.s(slots=True, frozen=True)
658 @attr.s(slots=True, frozen=True)
659 class _annotatedfile(object):
659 class _annotatedfile:
660 # list indexed by lineno - 1
660 # list indexed by lineno - 1
661 fctxs = attr.ib()
661 fctxs = attr.ib()
662 linenos = attr.ib()
662 linenos = attr.ib()
@@ -90,7 +90,7 def requires_no_parents_change(func):
90
90
91
91
92 @interfaceutil.implementer(intdirstate.idirstate)
92 @interfaceutil.implementer(intdirstate.idirstate)
93 class dirstate(object):
93 class dirstate:
94 def __init__(
94 def __init__(
95 self,
95 self,
96 opener,
96 opener,
@@ -34,7 +34,7 else:
34 rangemask = 0x7FFFFFFF
34 rangemask = 0x7FFFFFFF
35
35
36
36
37 class _dirstatemapcommon(object):
37 class _dirstatemapcommon:
38 """
38 """
39 Methods that are identical for both implementations of the dirstatemap
39 Methods that are identical for both implementations of the dirstatemap
40 class, with and without Rust extensions enabled.
40 class, with and without Rust extensions enabled.
@@ -28,7 +28,7 HEADER = struct.Struct(
28 )
28 )
29
29
30
30
31 class DirstateDocket(object):
31 class DirstateDocket:
32 data_filename_pattern = b'dirstate.%s'
32 data_filename_pattern = b'dirstate.%s'
33
33
34 def __init__(self, parents, data_size, tree_metadata, uuid):
34 def __init__(self, parents, data_size, tree_metadata, uuid):
@@ -125,7 +125,7 def slice_with_len(data, start, len):
125
125
126
126
127 @attr.s
127 @attr.s
128 class Node(object):
128 class Node:
129 path = attr.ib()
129 path = attr.ib()
130 entry = attr.ib()
130 entry = attr.ib()
131 parent = attr.ib(default=None)
131 parent = attr.ib(default=None)
@@ -73,7 +73,7 def findcommonincoming(repo, remote, hea
73 return (list(common), anyinc, heads or list(srvheads))
73 return (list(common), anyinc, heads or list(srvheads))
74
74
75
75
76 class outgoing(object):
76 class outgoing:
77 """Represents the result of a findcommonoutgoing() call.
77 """Represents the result of a findcommonoutgoing() call.
78
78
79 Members:
79 Members:
@@ -53,7 +53,7 from .utils import (
53 )
53 )
54
54
55
55
56 class request(object):
56 class request:
57 def __init__(
57 def __init__(
58 self,
58 self,
59 args,
59 args,
@@ -557,7 +557,7 def aliasinterpolate(name, args, cmd):
557 return r.sub(lambda x: replacemap[x.group()], cmd)
557 return r.sub(lambda x: replacemap[x.group()], cmd)
558
558
559
559
560 class cmdalias(object):
560 class cmdalias:
561 def __init__(self, ui, name, definition, cmdtable, source):
561 def __init__(self, ui, name, definition, cmdtable, source):
562 self.name = self.cmd = name
562 self.name = self.cmd = name
563 self.cmdname = b''
563 self.cmdname = b''
@@ -740,7 +740,7 class cmdalias(object):
740 raise
740 raise
741
741
742
742
743 class lazyaliasentry(object):
743 class lazyaliasentry:
744 """like a typical command entry (func, opts, help), but is lazy"""
744 """like a typical command entry (func, opts, help), but is lazy"""
745
745
746 def __init__(self, ui, name, definition, cmdtable, source):
746 def __init__(self, ui, name, definition, cmdtable, source):
@@ -510,7 +510,7 def trim(s, width, ellipsis=b'', leftsid
510 return u + ellipsis
510 return u + ellipsis
511
511
512
512
513 class normcasespecs(object):
513 class normcasespecs:
514 """what a platform's normcase does to ASCII strings
514 """what a platform's normcase does to ASCII strings
515
515
516 This is specified per platform, and should be consistent with what normcase
516 This is specified per platform, and should be consistent with what normcase
@@ -39,7 +39,7 def _tobytes(exc):
39 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
39 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
40
40
41
41
42 class Hint(object):
42 class Hint:
43 """Mix-in to provide a hint of an error
43 """Mix-in to provide a hint of an error
44
44
45 This should come first in the inheritance list to consume a hint and
45 This should come first in the inheritance list to consume a hint and
@@ -222,7 +222,7 def _forcebundle1(op):
222 return forcebundle1 or not op.remote.capable(b'bundle2')
222 return forcebundle1 or not op.remote.capable(b'bundle2')
223
223
224
224
225 class pushoperation(object):
225 class pushoperation:
226 """A object that represent a single push operation
226 """A object that represent a single push operation
227
227
228 Its purpose is to carry push related state and very common operations.
228 Its purpose is to carry push related state and very common operations.
@@ -1371,7 +1371,7 def _pushbookmark(pushop):
1371 pushop.bkresult = 1
1371 pushop.bkresult = 1
1372
1372
1373
1373
1374 class pulloperation(object):
1374 class pulloperation:
1375 """A object that represent a single pull operation
1375 """A object that represent a single pull operation
1376
1376
1377 It purpose is to carry pull related state and very common operation.
1377 It purpose is to carry pull related state and very common operation.
@@ -621,7 +621,7 def wrapfilecache(cls, propname, wrapper
621 raise AttributeError("type '%s' has no property '%s'" % (cls, propname))
621 raise AttributeError("type '%s' has no property '%s'" % (cls, propname))
622
622
623
623
624 class wrappedfunction(object):
624 class wrappedfunction:
625 '''context manager for temporarily wrapping a function'''
625 '''context manager for temporarily wrapping a function'''
626
626
627 def __init__(self, container, funcname, wrapper):
627 def __init__(self, container, funcname, wrapper):
@@ -20,7 +20,7 from . import (
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22
22
23 class exthelper(object):
23 class exthelper:
24 """Helper for modular extension setup
24 """Helper for modular extension setup
25
25
26 A single helper should be instantiated for each module of an
26 A single helper should be instantiated for each module of an
@@ -204,7 +204,7 def earlygetopt(args, shortlist, namelis
204 return parsedopts, parsedargs
204 return parsedopts, parsedargs
205
205
206
206
207 class customopt(object): # pytype: disable=ignored-metaclass
207 class customopt: # pytype: disable=ignored-metaclass
208 """Manage defaults and mutations for any type of opt."""
208 """Manage defaults and mutations for any type of opt."""
209
209
210 __metaclass__ = abc.ABCMeta
210 __metaclass__ = abc.ABCMeta
@@ -24,7 +24,7 from .revlogutils import (
24
24
25
25
26 @interfaceutil.implementer(repository.ifilestorage)
26 @interfaceutil.implementer(repository.ifilestorage)
27 class filelog(object):
27 class filelog:
28 def __init__(self, opener, path):
28 def __init__(self, opener, path):
29 self._revlog = revlog.revlog(
29 self._revlog = revlog.revlog(
30 opener,
30 opener,
@@ -84,7 +84,7 fullmerge = internaltool.fullmerge # bo
84 )
84 )
85
85
86
86
87 class absentfilectx(object):
87 class absentfilectx:
88 """Represents a file that's ostensibly in a context but is actually not
88 """Represents a file that's ostensibly in a context but is actually not
89 present in it.
89 present in it.
90
90
@@ -503,7 +503,7 methods = {
503 }
503 }
504
504
505
505
506 class matchctx(object):
506 class matchctx:
507 def __init__(self, basectx, ctx, cwd, badfn=None):
507 def __init__(self, basectx, ctx, cwd, badfn=None):
508 self._basectx = basectx
508 self._basectx = basectx
509 self.ctx = ctx
509 self.ctx = ctx
@@ -144,7 +144,7 def isprintable(obj):
144 return isinstance(obj, (type(None), bool, int, int, float, bytes))
144 return isinstance(obj, (type(None), bool, int, int, float, bytes))
145
145
146
146
147 class _nullconverter(object):
147 class _nullconverter:
148 '''convert non-primitive data types to be processed by formatter'''
148 '''convert non-primitive data types to be processed by formatter'''
149
149
150 # set to True if context object should be stored as item
150 # set to True if context object should be stored as item
@@ -175,7 +175,7 class _nullconverter(object):
175 return list(data)
175 return list(data)
176
176
177
177
178 class baseformatter(object):
178 class baseformatter:
179
179
180 # set to True if the formater output a strict format that does not support
180 # set to True if the formater output a strict format that does not support
181 # arbitrary output in the stream.
181 # arbitrary output in the stream.
@@ -297,7 +297,7 def _iteritems(data):
297 return data
297 return data
298
298
299
299
300 class _plainconverter(object):
300 class _plainconverter:
301 '''convert non-primitive data types to text'''
301 '''convert non-primitive data types to text'''
302
302
303 storecontext = False
303 storecontext = False
@@ -452,7 +452,7 class jsonformatter(baseformatter):
452 self._out.write(b"\n]\n")
452 self._out.write(b"\n]\n")
453
453
454
454
455 class _templateconverter(object):
455 class _templateconverter:
456 '''convert non-primitive data types to be processed by templater'''
456 '''convert non-primitive data types to be processed by templater'''
457
457
458 storecontext = True
458 storecontext = True
@@ -541,7 +541,7 class templateformatter(baseformatter):
541
541
542
542
543 @attr.s(frozen=True)
543 @attr.s(frozen=True)
544 class templatespec(object):
544 class templatespec:
545 ref = attr.ib()
545 ref = attr.ib()
546 tmpl = attr.ib()
546 tmpl = attr.ib()
547 mapfile = attr.ib()
547 mapfile = attr.ib()
@@ -358,7 +358,7 def _drawendinglines(lines, extra, edgem
358
358
359
359
360 @attr.s
360 @attr.s
361 class asciistate(object):
361 class asciistate:
362 """State of ascii() graph rendering"""
362 """State of ascii() graph rendering"""
363
363
364 seen = attr.ib(init=False, default=attr.Factory(list))
364 seen = attr.ib(init=False, default=attr.Factory(list))
@@ -35,7 +35,7 def matchlines(body, regexp):
35 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
35 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
36
36
37
37
38 class linestate(object):
38 class linestate:
39 def __init__(self, line, linenum, colstart, colend):
39 def __init__(self, line, linenum, colstart, colend):
40 self.line = line
40 self.line = line
41 self.linenum = linenum
41 self.linenum = linenum
@@ -79,7 +79,7 def difflinestates(a, b):
79 yield (b'+', b[i])
79 yield (b'+', b[i])
80
80
81
81
82 class grepsearcher(object):
82 class grepsearcher:
83 """Search files and revisions for lines matching the given pattern
83 """Search files and revisions for lines matching the given pattern
84
84
85 Options:
85 Options:
@@ -1534,7 +1534,7 foi = [
1534 ]
1534 ]
1535
1535
1536
1536
1537 class cachedlocalrepo(object):
1537 class cachedlocalrepo:
1538 """Holds a localrepository that can be cached and reused."""
1538 """Holds a localrepository that can be cached and reused."""
1539
1539
1540 def __init__(self, repo):
1540 def __init__(self, repo):
@@ -54,7 +54,7 def hgwebdir(config, baseui=None):
54 return hgwebdir_mod.hgwebdir(config, baseui=baseui)
54 return hgwebdir_mod.hgwebdir(config, baseui=baseui)
55
55
56
56
57 class httpservice(object):
57 class httpservice:
58 def __init__(self, ui, app, opts):
58 def __init__(self, ui, app, opts):
59 self.ui = ui
59 self.ui = ui
60 self.app = app
60 self.app = app
@@ -115,7 +115,7 class ErrorResponse(Exception):
115 self.message = message
115 self.message = message
116
116
117
117
118 class continuereader(object):
118 class continuereader:
119 """File object wrapper to handle HTTP 100-continue.
119 """File object wrapper to handle HTTP 100-continue.
120
120
121 This is used by servers so they automatically handle Expect: 100-continue
121 This is used by servers so they automatically handle Expect: 100-continue
@@ -110,7 +110,7 def makebreadcrumb(url, prefix=b''):
110 return templateutil.mappinglist(reversed(breadcrumb))
110 return templateutil.mappinglist(reversed(breadcrumb))
111
111
112
112
113 class requestcontext(object):
113 class requestcontext:
114 """Holds state/context for an individual request.
114 """Holds state/context for an individual request.
115
115
116 Servers can be multi-threaded. Holding state on the WSGI application
116 Servers can be multi-threaded. Holding state on the WSGI application
@@ -235,7 +235,7 class requestcontext(object):
235 return self.res.sendresponse()
235 return self.res.sendresponse()
236
236
237
237
238 class hgweb(object):
238 class hgweb:
239 """HTTP server for individual repositories.
239 """HTTP server for individual repositories.
240
240
241 Instances of this class serve HTTP responses for a particular
241 Instances of this class serve HTTP responses for a particular
@@ -268,7 +268,7 def indexentries(
268 return templateutil.mappinggenerator(_indexentriesgen, args=args)
268 return templateutil.mappinggenerator(_indexentriesgen, args=args)
269
269
270
270
271 class hgwebdir(object):
271 class hgwebdir:
272 """HTTP server for multiple repositories.
272 """HTTP server for multiple repositories.
273
273
274 Given a configuration, different repositories will be served depending
274 Given a configuration, different repositories will be served depending
@@ -21,7 +21,7 from ..utils import (
21 )
21 )
22
22
23
23
24 class multidict(object):
24 class multidict:
25 """A dict like object that can store multiple values for a key.
25 """A dict like object that can store multiple values for a key.
26
26
27 Used to store parsed request parameters.
27 Used to store parsed request parameters.
@@ -81,7 +81,7 class multidict(object):
81
81
82
82
83 @attr.s(frozen=True)
83 @attr.s(frozen=True)
84 class parsedrequest(object):
84 class parsedrequest:
85 """Represents a parsed WSGI request.
85 """Represents a parsed WSGI request.
86
86
87 Contains both parsed parameters as well as a handle on the input stream.
87 Contains both parsed parameters as well as a handle on the input stream.
@@ -355,7 +355,7 def parserequestfromenv(env, reponame=No
355 )
355 )
356
356
357
357
358 class offsettrackingwriter(object):
358 class offsettrackingwriter:
359 """A file object like object that is append only and tracks write count.
359 """A file object like object that is append only and tracks write count.
360
360
361 Instances are bound to a callable. This callable is called with data
361 Instances are bound to a callable. This callable is called with data
@@ -388,7 +388,7 class offsettrackingwriter(object):
388 return self._offset
388 return self._offset
389
389
390
390
391 class wsgiresponse(object):
391 class wsgiresponse:
392 """Represents a response to a WSGI request.
392 """Represents a response to a WSGI request.
393
393
394 A response consists of a status line, headers, and a body.
394 A response consists of a status line, headers, and a body.
@@ -52,7 +52,7 def _splitURI(uri):
52 return urlreq.unquote(path), query
52 return urlreq.unquote(path), query
53
53
54
54
55 class _error_logger(object):
55 class _error_logger:
56 def __init__(self, handler):
56 def __init__(self, handler):
57 self.handler = handler
57 self.handler = handler
58
58
@@ -343,7 +343,7 except ImportError:
343 _mixin = socketserver.ForkingMixIn
343 _mixin = socketserver.ForkingMixIn
344 else:
344 else:
345
345
346 class _mixin(object):
346 class _mixin:
347 pass
347 pass
348
348
349
349
@@ -46,7 +46,7 from . import webutil
46 commands = {}
46 commands = {}
47
47
48
48
49 class webcommand(object):
49 class webcommand:
50 """Decorator used to register a web command handler.
50 """Decorator used to register a web command handler.
51
51
52 The decorator takes as its positional arguments the name/path the
52 The decorator takes as its positional arguments the name/path the
@@ -99,7 +99,7 def _navseq(step, firststep=None):
99 step *= 10
99 step *= 10
100
100
101
101
102 class revnav(object):
102 class revnav:
103 def __init__(self, repo):
103 def __init__(self, repo):
104 """Navigation generation object
104 """Navigation generation object
105
105
@@ -29,7 +29,7 def _formatparam(param, value=None, quot
29 return param
29 return param
30
30
31
31
32 class Headers(object):
32 class Headers:
33 """Manage a collection of HTTP response headers"""
33 """Manage a collection of HTTP response headers"""
34
34
35 def __init__(self, headers=None):
35 def __init__(self, headers=None):
@@ -26,7 +26,7 urlerr = util.urlerr
26 urlreq = util.urlreq
26 urlreq = util.urlreq
27
27
28 # moved here from url.py to avoid a cycle
28 # moved here from url.py to avoid a cycle
29 class httpsendfile(object):
29 class httpsendfile:
30 """This is a wrapper around the objects returned by python's "open".
30 """This is a wrapper around the objects returned by python's "open".
31
31
32 Its purpose is to send file-like objects via HTTP.
32 Its purpose is to send file-like objects via HTTP.
@@ -62,7 +62,7 def encodevalueinheaders(value, header,
62 return result
62 return result
63
63
64
64
65 class _multifile(object):
65 class _multifile:
66 def __init__(self, *fileobjs):
66 def __init__(self, *fileobjs):
67 for f in fileobjs:
67 for f in fileobjs:
68 if not util.safehasattr(f, b'length'):
68 if not util.safehasattr(f, b'length'):
@@ -388,7 +388,7 class ipeerv2(ipeerconnection, ipeercapa
388
388
389
389
390 @interfaceutil.implementer(ipeerbase)
390 @interfaceutil.implementer(ipeerbase)
391 class peer(object):
391 class peer:
392 """Base class for peer repositories."""
392 """Base class for peer repositories."""
393
393
394 limitedarguments = False
394 limitedarguments = False
@@ -20,11 +20,11 if encoding.environ.get(b'HGREALINTERFAC
20 implementer = zi.implementer
20 implementer = zi.implementer
21 else:
21 else:
22
22
23 class Attribute(object):
23 class Attribute:
24 def __init__(self, __name__, __doc__=b''):
24 def __init__(self, __name__, __doc__=b''):
25 pass
25 pass
26
26
27 class Interface(object):
27 class Interface:
28 def __init__(
28 def __init__(
29 self, name, bases=(), attrs=None, __doc__=None, __module__=None
29 self, name, bases=(), attrs=None, __doc__=None, __module__=None
30 ):
30 ):
@@ -107,7 +107,7 urlreq = util.urlreq
107 DEBUG = None
107 DEBUG = None
108
108
109
109
110 class ConnectionManager(object):
110 class ConnectionManager:
111 """
111 """
112 The connection manager must be able to:
112 The connection manager must be able to:
113 * keep track of all existing
113 * keep track of all existing
@@ -170,7 +170,7 class ConnectionManager(object):
170 return dict(self._hostmap)
170 return dict(self._hostmap)
171
171
172
172
173 class KeepAliveHandler(object):
173 class KeepAliveHandler:
174 def __init__(self, timeout=None):
174 def __init__(self, timeout=None):
175 self._cm = ConnectionManager()
175 self._cm = ConnectionManager()
176 self._timeout = timeout
176 self._timeout = timeout
@@ -789,7 +789,7 def test_timeout(url):
789 global DEBUG
789 global DEBUG
790 dbbackup = DEBUG
790 dbbackup = DEBUG
791
791
792 class FakeLogger(object):
792 class FakeLogger:
793 def debug(self, msg, *args):
793 def debug(self, msg, *args):
794 print(msg % args)
794 print(msg % args)
795
795
@@ -33,7 +33,7 class LineLogError(Exception):
33
33
34
34
35 @attr.s
35 @attr.s
36 class lineinfo(object):
36 class lineinfo:
37 # Introducing revision of this line.
37 # Introducing revision of this line.
38 rev = attr.ib()
38 rev = attr.ib()
39 # Line number for this line in its introducing revision.
39 # Line number for this line in its introducing revision.
@@ -43,7 +43,7 class lineinfo(object):
43
43
44
44
45 @attr.s
45 @attr.s
46 class annotateresult(object):
46 class annotateresult:
47 rev = attr.ib()
47 rev = attr.ib()
48 lines = attr.ib()
48 lines = attr.ib()
49 _eof = attr.ib()
49 _eof = attr.ib()
@@ -52,7 +52,7 class annotateresult(object):
52 return iter(self.lines)
52 return iter(self.lines)
53
53
54
54
55 class _llinstruction(object): # pytype: disable=ignored-metaclass
55 class _llinstruction: # pytype: disable=ignored-metaclass
56
56
57 __metaclass__ = abc.ABCMeta
57 __metaclass__ = abc.ABCMeta
58
58
@@ -233,7 +233,7 def _decodeone(data, offset):
233 raise NotImplementedError(b'Unimplemented opcode %r' % opcode)
233 raise NotImplementedError(b'Unimplemented opcode %r' % opcode)
234
234
235
235
236 class linelog(object):
236 class linelog:
237 """Efficient cache for per-line history information."""
237 """Efficient cache for per-line history information."""
238
238
239 def __init__(self, program=None, maxrev=0):
239 def __init__(self, program=None, maxrev=0):
@@ -251,7 +251,7 legacycaps = moderncaps.union({b'changeg
251
251
252
252
253 @interfaceutil.implementer(repository.ipeercommandexecutor)
253 @interfaceutil.implementer(repository.ipeercommandexecutor)
254 class localcommandexecutor(object):
254 class localcommandexecutor:
255 def __init__(self, peer):
255 def __init__(self, peer):
256 self._peer = peer
256 self._peer = peer
257 self._sent = False
257 self._sent = False
@@ -1215,7 +1215,7 def makemain(**kwargs):
1215
1215
1216
1216
1217 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1217 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1218 class revlogfilestorage(object):
1218 class revlogfilestorage:
1219 """File storage when using revlogs."""
1219 """File storage when using revlogs."""
1220
1220
1221 def file(self, path):
1221 def file(self, path):
@@ -1226,7 +1226,7 class revlogfilestorage(object):
1226
1226
1227
1227
1228 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1228 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1229 class revlognarrowfilestorage(object):
1229 class revlognarrowfilestorage:
1230 """File storage when using revlogs and narrow files."""
1230 """File storage when using revlogs and narrow files."""
1231
1231
1232 def file(self, path):
1232 def file(self, path):
@@ -1259,7 +1259,7 REPO_INTERFACES = [
1259
1259
1260
1260
1261 @interfaceutil.implementer(repository.ilocalrepositorymain)
1261 @interfaceutil.implementer(repository.ilocalrepositorymain)
1262 class localrepository(object):
1262 class localrepository:
1263 """Main class for representing local repositories.
1263 """Main class for representing local repositories.
1264
1264
1265 All local repositories are instances of this class.
1265 All local repositories are instances of this class.
@@ -2044,7 +2044,7 class localrepository(object):
2044
2044
2045 # This simplifies its cache management by having one decorated
2045 # This simplifies its cache management by having one decorated
2046 # function (this one) and the rest simply fetch things from it.
2046 # function (this one) and the rest simply fetch things from it.
2047 class tagscache(object):
2047 class tagscache:
2048 def __init__(self):
2048 def __init__(self):
2049 # These two define the set of tags for this repository. tags
2049 # These two define the set of tags for this repository. tags
2050 # maps tag name to node; tagtypes maps tag name to 'global' or
2050 # maps tag name to node; tagtypes maps tag name to 'global' or
@@ -3912,7 +3912,7 def poisonrepository(repo):
3912 #
3912 #
3913 # But we have to allow the close() method because some constructors
3913 # But we have to allow the close() method because some constructors
3914 # of repos call close() on repo references.
3914 # of repos call close() on repo references.
3915 class poisonedrepository(object):
3915 class poisonedrepository:
3916 def __getattribute__(self, item):
3916 def __getattribute__(self, item):
3917 if item == 'close':
3917 if item == 'close':
3918 return object.__getattribute__(self, item)
3918 return object.__getattribute__(self, item)
@@ -173,7 +173,7 def trylock(ui, vfs, lockname, timeout,
173 return l
173 return l
174
174
175
175
176 class lock(object):
176 class lock:
177 """An advisory lock held by one process to control access to a set
177 """An advisory lock held by one process to control access to a set
178 of files. Non-cooperating processes or incorrectly written scripts
178 of files. Non-cooperating processes or incorrectly written scripts
179 can ignore Mercurial's locking scheme and stomp all over the
179 can ignore Mercurial's locking scheme and stomp all over the
@@ -227,7 +227,7 def diffordiffstat(
227 )
227 )
228
228
229
229
230 class changesetdiffer(object):
230 class changesetdiffer:
231 """Generate diff of changeset with pre-configured filtering functions"""
231 """Generate diff of changeset with pre-configured filtering functions"""
232
232
233 def _makefilematcher(self, ctx):
233 def _makefilematcher(self, ctx):
@@ -261,7 +261,7 def changesetlabels(ctx):
261 return b' '.join(labels)
261 return b' '.join(labels)
262
262
263
263
264 class changesetprinter(object):
264 class changesetprinter:
265 '''show changeset information when templating not requested.'''
265 '''show changeset information when templating not requested.'''
266
266
267 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
267 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
@@ -706,7 +706,7 def changesetdisplayer(ui, repo, opts, d
706
706
707
707
708 @attr.s
708 @attr.s
709 class walkopts(object):
709 class walkopts:
710 """Options to configure a set of revisions and file matcher factory
710 """Options to configure a set of revisions and file matcher factory
711 to scan revision/file history
711 to scan revision/file history
712 """
712 """
@@ -73,7 +73,7 def _matchevent(event, tracked):
73 return b'*' in tracked or event in tracked
73 return b'*' in tracked or event in tracked
74
74
75
75
76 class filelogger(object):
76 class filelogger:
77 """Basic logger backed by physical file with optional rotation"""
77 """Basic logger backed by physical file with optional rotation"""
78
78
79 def __init__(self, vfs, name, tracked, maxfiles=0, maxsize=0):
79 def __init__(self, vfs, name, tracked, maxfiles=0, maxsize=0):
@@ -104,7 +104,7 class filelogger(object):
104 )
104 )
105
105
106
106
107 class fileobjectlogger(object):
107 class fileobjectlogger:
108 """Basic logger backed by file-like object"""
108 """Basic logger backed by file-like object"""
109
109
110 def __init__(self, fp, tracked):
110 def __init__(self, fp, tracked):
@@ -129,7 +129,7 class fileobjectlogger(object):
129 )
129 )
130
130
131
131
132 class proxylogger(object):
132 class proxylogger:
133 """Forward log events to another logger to be set later"""
133 """Forward log events to another logger to be set later"""
134
134
135 def __init__(self):
135 def __init__(self):
@@ -22,7 +22,7 def profile(f, *args, **kwds):
22 return Stats(p.getstats())
22 return Stats(p.getstats())
23
23
24
24
25 class Stats(object):
25 class Stats:
26 """XXX docstring"""
26 """XXX docstring"""
27
27
28 def __init__(self, data):
28 def __init__(self, data):
@@ -26,7 +26,7 def label(code):
26 )
26 )
27
27
28
28
29 class KCacheGrind(object):
29 class KCacheGrind:
30 def __init__(self, profiler):
30 def __init__(self, profiler):
31 self.data = profiler.getstats()
31 self.data = profiler.getstats()
32 self.out_file = None
32 self.out_file = None
@@ -84,7 +84,7 def _text(it):
84 return b''.join(lines)
84 return b''.join(lines)
85
85
86
86
87 class lazymanifestiter(object):
87 class lazymanifestiter:
88 def __init__(self, lm):
88 def __init__(self, lm):
89 self.pos = 0
89 self.pos = 0
90 self.lm = lm
90 self.lm = lm
@@ -107,7 +107,7 class lazymanifestiter(object):
107 __next__ = next
107 __next__ = next
108
108
109
109
110 class lazymanifestiterentries(object):
110 class lazymanifestiterentries:
111 def __init__(self, lm):
111 def __init__(self, lm):
112 self.lm = lm
112 self.lm = lm
113 self.pos = 0
113 self.pos = 0
@@ -158,7 +158,7 def _cmp(a, b):
158 _manifestflags = {b'', b'l', b't', b'x'}
158 _manifestflags = {b'', b'l', b't', b'x'}
159
159
160
160
161 class _lazymanifest(object):
161 class _lazymanifest:
162 """A pure python manifest backed by a byte string. It is supplimented with
162 """A pure python manifest backed by a byte string. It is supplimented with
163 internal lists as it is modified, until it is compacted back to a pure byte
163 internal lists as it is modified, until it is compacted back to a pure byte
164 string.
164 string.
@@ -473,7 +473,7 except AttributeError:
473
473
474
474
475 @interfaceutil.implementer(repository.imanifestdict)
475 @interfaceutil.implementer(repository.imanifestdict)
476 class manifestdict(object):
476 class manifestdict:
477 def __init__(self, nodelen, data=b''):
477 def __init__(self, nodelen, data=b''):
478 self._nodelen = nodelen
478 self._nodelen = nodelen
479 self._lm = _lazymanifest(nodelen, data)
479 self._lm = _lazymanifest(nodelen, data)
@@ -796,7 +796,7 def _splittopdir(f):
796
796
797
797
798 @interfaceutil.implementer(repository.imanifestdict)
798 @interfaceutil.implementer(repository.imanifestdict)
799 class treemanifest(object):
799 class treemanifest:
800 def __init__(self, nodeconstants, dir=b'', text=b''):
800 def __init__(self, nodeconstants, dir=b'', text=b''):
801 self._dir = dir
801 self._dir = dir
802 self.nodeconstants = nodeconstants
802 self.nodeconstants = nodeconstants
@@ -1550,7 +1550,7 class FastdeltaUnavailable(Exception):
1550
1550
1551
1551
1552 @interfaceutil.implementer(repository.imanifeststorage)
1552 @interfaceutil.implementer(repository.imanifeststorage)
1553 class manifestrevlog(object):
1553 class manifestrevlog:
1554 """A revlog that stores manifest texts. This is responsible for caching the
1554 """A revlog that stores manifest texts. This is responsible for caching the
1555 full-text manifest contents.
1555 full-text manifest contents.
1556 """
1556 """
@@ -1908,7 +1908,7 class manifestrevlog(object):
1908
1908
1909
1909
1910 @interfaceutil.implementer(repository.imanifestlog)
1910 @interfaceutil.implementer(repository.imanifestlog)
1911 class manifestlog(object):
1911 class manifestlog:
1912 """A collection class representing the collection of manifest snapshots
1912 """A collection class representing the collection of manifest snapshots
1913 referenced by commits in the repository.
1913 referenced by commits in the repository.
1914
1914
@@ -2007,7 +2007,7 class manifestlog(object):
2007
2007
2008
2008
2009 @interfaceutil.implementer(repository.imanifestrevisionwritable)
2009 @interfaceutil.implementer(repository.imanifestrevisionwritable)
2010 class memmanifestctx(object):
2010 class memmanifestctx:
2011 def __init__(self, manifestlog):
2011 def __init__(self, manifestlog):
2012 self._manifestlog = manifestlog
2012 self._manifestlog = manifestlog
2013 self._manifestdict = manifestdict(manifestlog.nodeconstants.nodelen)
2013 self._manifestdict = manifestdict(manifestlog.nodeconstants.nodelen)
@@ -2037,7 +2037,7 class memmanifestctx(object):
2037
2037
2038
2038
2039 @interfaceutil.implementer(repository.imanifestrevisionstored)
2039 @interfaceutil.implementer(repository.imanifestrevisionstored)
2040 class manifestctx(object):
2040 class manifestctx:
2041 """A class representing a single revision of a manifest, including its
2041 """A class representing a single revision of a manifest, including its
2042 contents, its parent revs, and its linkrev.
2042 contents, its parent revs, and its linkrev.
2043 """
2043 """
@@ -2117,7 +2117,7 class manifestctx(object):
2117
2117
2118
2118
2119 @interfaceutil.implementer(repository.imanifestrevisionwritable)
2119 @interfaceutil.implementer(repository.imanifestrevisionwritable)
2120 class memtreemanifestctx(object):
2120 class memtreemanifestctx:
2121 def __init__(self, manifestlog, dir=b''):
2121 def __init__(self, manifestlog, dir=b''):
2122 self._manifestlog = manifestlog
2122 self._manifestlog = manifestlog
2123 self._dir = dir
2123 self._dir = dir
@@ -2152,7 +2152,7 class memtreemanifestctx(object):
2152
2152
2153
2153
2154 @interfaceutil.implementer(repository.imanifestrevisionstored)
2154 @interfaceutil.implementer(repository.imanifestrevisionstored)
2155 class treemanifestctx(object):
2155 class treemanifestctx:
2156 def __init__(self, manifestlog, dir, node):
2156 def __init__(self, manifestlog, dir, node):
2157 self._manifestlog = manifestlog
2157 self._manifestlog = manifestlog
2158 self._dir = dir
2158 self._dir = dir
@@ -382,7 +382,7 def _donormalize(patterns, default, root
382 return kindpats
382 return kindpats
383
383
384
384
385 class basematcher(object):
385 class basematcher:
386 def __init__(self, badfn=None):
386 def __init__(self, badfn=None):
387 if badfn is not None:
387 if badfn is not None:
388 self.bad = badfn
388 self.bad = badfn
@@ -659,7 +659,7 class patternmatcher(basematcher):
659 # This is basically a reimplementation of pathutil.dirs that stores the
659 # This is basically a reimplementation of pathutil.dirs that stores the
660 # children instead of just a count of them, plus a small optional optimization
660 # children instead of just a count of them, plus a small optional optimization
661 # to avoid some directories we don't need.
661 # to avoid some directories we don't need.
662 class _dirchildren(object):
662 class _dirchildren:
663 def __init__(self, paths, onlyinclude=None):
663 def __init__(self, paths, onlyinclude=None):
664 self._dirs = {}
664 self._dirs = {}
665 self._onlyinclude = onlyinclude or []
665 self._onlyinclude = onlyinclude or []
@@ -37,7 +37,7 splitnewlines = bdiff.splitnewlines
37
37
38
38
39 # TODO: this looks like it could be an attrs, which might help pytype
39 # TODO: this looks like it could be an attrs, which might help pytype
40 class diffopts(object):
40 class diffopts:
41 """context is the number of context lines
41 """context is the number of context lines
42 text treats all files as text
42 text treats all files as text
43 showfunc enables diff -p output
43 showfunc enables diff -p output
@@ -66,7 +66,7 def _checkunknownfile(repo, wctx, mctx,
66 )
66 )
67
67
68
68
69 class _unknowndirschecker(object):
69 class _unknowndirschecker:
70 """
70 """
71 Look for any unknown files or directories that may have a path conflict
71 Look for any unknown files or directories that may have a path conflict
72 with a file. If any path prefix of the file exists as a file or link,
72 with a file. If any path prefix of the file exists as a file or link,
@@ -537,7 +537,7 def _filternarrowactions(narrowmatch, br
537 raise error.StateError(msg % f)
537 raise error.StateError(msg % f)
538
538
539
539
540 class mergeresult(object):
540 class mergeresult:
541 """An object representing result of merging manifests.
541 """An object representing result of merging manifests.
542
542
543 It has information about what actions need to be performed on dirstate
543 It has information about what actions need to be performed on dirstate
@@ -1467,7 +1467,7 def _prefetchfiles(repo, ctx, mresult):
1467
1467
1468
1468
1469 @attr.s(frozen=True)
1469 @attr.s(frozen=True)
1470 class updateresult(object):
1470 class updateresult:
1471 updatedcount = attr.ib()
1471 updatedcount = attr.ib()
1472 mergedcount = attr.ib()
1472 mergedcount = attr.ib()
1473 removedcount = attr.ib()
1473 removedcount = attr.ib()
@@ -100,7 +100,7 CHANGE_REMOVED = b'removed'
100 CHANGE_MODIFIED = b'modified'
100 CHANGE_MODIFIED = b'modified'
101
101
102
102
103 class MergeAction(object):
103 class MergeAction:
104 """represent an "action" merge need to take for a given file
104 """represent an "action" merge need to take for a given file
105
105
106 Attributes:
106 Attributes:
@@ -194,7 +194,7 CONVERT_MERGE_ACTIONS = (
194 )
194 )
195
195
196
196
197 class _mergestate_base(object):
197 class _mergestate_base:
198 """track 3-way merge state of individual files
198 """track 3-way merge state of individual files
199
199
200 The merge state is stored on disk when needed. Two files are used: one with
200 The merge state is stored on disk when needed. Two files are used: one with
@@ -22,7 +22,7 from .revlogutils import (
22 )
22 )
23
23
24
24
25 class ChangingFiles(object):
25 class ChangingFiles:
26 """A class recording the changes made to files by a changeset
26 """A class recording the changes made to files by a changeset
27
27
28 Actions performed on files are gathered into 3 sets:
28 Actions performed on files are gathered into 3 sets:
@@ -16,7 +16,7 def tolist(val):
16 return [val]
16 return [val]
17
17
18
18
19 class namespaces(object):
19 class namespaces:
20 """provides an interface to register and operate on multiple namespaces. See
20 """provides an interface to register and operate on multiple namespaces. See
21 the namespace class below for details on the namespace object.
21 the namespace class below for details on the namespace object.
22
22
@@ -124,7 +124,7 class namespaces(object):
124 raise KeyError(_(b'no such name: %s') % name)
124 raise KeyError(_(b'no such name: %s') % name)
125
125
126
126
127 class namespace(object):
127 class namespace:
128 """provides an interface to a namespace
128 """provides an interface to a namespace
129
129
130 Namespaces are basically generic many-to-many mapping between some
130 Namespaces are basically generic many-to-many mapping between some
@@ -31,7 +31,7 nullrev = -1
31 wdirrev = 0x7FFFFFFF
31 wdirrev = 0x7FFFFFFF
32
32
33
33
34 class sha1nodeconstants(object):
34 class sha1nodeconstants:
35 nodelen = 20
35 nodelen = 20
36
36
37 # In hex, this is '0000000000000000000000000000000000000000'
37 # In hex, this is '0000000000000000000000000000000000000000'
@@ -541,7 +541,7 def _checkinvalidmarkers(repo, markers):
541 )
541 )
542
542
543
543
544 class obsstore(object):
544 class obsstore:
545 """Store obsolete markers
545 """Store obsolete markers
546
546
547 Markers can be accessed with two mappings:
547 Markers can be accessed with two mappings:
@@ -56,7 +56,7 bumpedfix = 1
56 usingsha256 = 2
56 usingsha256 = 2
57
57
58
58
59 class marker(object):
59 class marker:
60 """Wrap obsolete marker raw data"""
60 """Wrap obsolete marker raw data"""
61
61
62 def __init__(self, repo, data):
62 def __init__(self, repo, data):
@@ -25,7 +25,7 from . import (
25 from .utils import stringutil
25 from .utils import stringutil
26
26
27
27
28 class parser(object):
28 class parser:
29 def __init__(self, elements, methods=None):
29 def __init__(self, elements, methods=None):
30 self._elements = elements
30 self._elements = elements
31 self._methods = methods
31 self._methods = methods
@@ -415,7 +415,7 def parseerrordetail(inst):
415 return inst.message
415 return inst.message
416
416
417
417
418 class alias(object):
418 class alias:
419 """Parsed result of alias"""
419 """Parsed result of alias"""
420
420
421 def __init__(self, name, args, err, replacement):
421 def __init__(self, name, args, err, replacement):
@@ -429,7 +429,7 class alias(object):
429 self.warned = False
429 self.warned = False
430
430
431
431
432 class basealiasrules(object):
432 class basealiasrules:
433 """Parsing and expansion rule set of aliases
433 """Parsing and expansion rule set of aliases
434
434
435 This is a helper for fileset/revset/template aliases. A concrete rule set
435 This is a helper for fileset/revset/template aliases. A concrete rule set
@@ -149,7 +149,7 def split(stream):
149 def remainder(cur):
149 def remainder(cur):
150 yield chunk(cur)
150 yield chunk(cur)
151
151
152 class fiter(object):
152 class fiter:
153 def __init__(self, fp):
153 def __init__(self, fp):
154 self.fp = fp
154 self.fp = fp
155
155
@@ -342,7 +342,7 def _extract(ui, fileobj, tmpname, tmpfp
342 return data
342 return data
343
343
344
344
345 class patchmeta(object):
345 class patchmeta:
346 """Patched file metadata
346 """Patched file metadata
347
347
348 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
348 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
@@ -435,7 +435,7 def readgitpatch(lr):
435 return gitpatches
435 return gitpatches
436
436
437
437
438 class linereader(object):
438 class linereader:
439 # simple class to allow pushing lines back into the input stream
439 # simple class to allow pushing lines back into the input stream
440 def __init__(self, fp):
440 def __init__(self, fp):
441 self.fp = fp
441 self.fp = fp
@@ -456,7 +456,7 class linereader(object):
456 return iter(self.readline, b'')
456 return iter(self.readline, b'')
457
457
458
458
459 class abstractbackend(object):
459 class abstractbackend:
460 def __init__(self, ui):
460 def __init__(self, ui):
461 self.ui = ui
461 self.ui = ui
462
462
@@ -592,7 +592,7 class workingbackend(fsbackend):
592 return sorted(self.changed)
592 return sorted(self.changed)
593
593
594
594
595 class filestore(object):
595 class filestore:
596 def __init__(self, maxsize=None):
596 def __init__(self, maxsize=None):
597 self.opener = None
597 self.opener = None
598 self.files = {}
598 self.files = {}
@@ -681,7 +681,7 contextdesc = re.compile(br'(?:---|\*\*\
681 eolmodes = [b'strict', b'crlf', b'lf', b'auto']
681 eolmodes = [b'strict', b'crlf', b'lf', b'auto']
682
682
683
683
684 class patchfile(object):
684 class patchfile:
685 def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
685 def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
686 self.fname = gp.path
686 self.fname = gp.path
687 self.eolmode = eolmode
687 self.eolmode = eolmode
@@ -914,7 +914,7 class patchfile(object):
914 return len(self.rej)
914 return len(self.rej)
915
915
916
916
917 class header(object):
917 class header:
918 """patch header"""
918 """patch header"""
919
919
920 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
920 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
@@ -994,7 +994,7 class header(object):
994 )
994 )
995
995
996
996
997 class recordhunk(object):
997 class recordhunk:
998 """patch hunk
998 """patch hunk
999
999
1000 XXX shouldn't we merge this with the other hunk class?
1000 XXX shouldn't we merge this with the other hunk class?
@@ -1349,7 +1349,7 the hunk is left unchanged.
1349 )
1349 )
1350
1350
1351
1351
1352 class hunk(object):
1352 class hunk:
1353 def __init__(self, desc, num, lr, context):
1353 def __init__(self, desc, num, lr, context):
1354 self.number = num
1354 self.number = num
1355 self.desc = desc
1355 self.desc = desc
@@ -1577,7 +1577,7 class hunk(object):
1577 return old, oldstart, new, newstart
1577 return old, oldstart, new, newstart
1578
1578
1579
1579
1580 class binhunk(object):
1580 class binhunk:
1581 """A binary patch file."""
1581 """A binary patch file."""
1582
1582
1583 def __init__(self, lr, fname):
1583 def __init__(self, lr, fname):
@@ -1758,7 +1758,7 def parsepatch(originalchunks, maxcontex
1758 +9
1758 +9
1759 """
1759 """
1760
1760
1761 class parser(object):
1761 class parser:
1762 """patch parsing state machine"""
1762 """patch parsing state machine"""
1763
1763
1764 def __init__(self):
1764 def __init__(self):
@@ -31,7 +31,7 def _lowerclean(s):
31 return encoding.hfsignoreclean(s.lower())
31 return encoding.hfsignoreclean(s.lower())
32
32
33
33
34 class pathauditor(object):
34 class pathauditor:
35 """ensure that a filesystem path contains no banned components.
35 """ensure that a filesystem path contains no banned components.
36 the following properties of a path are checked:
36 the following properties of a path are checked:
37
37
@@ -314,7 +314,7 def finddirs(path):
314 yield b''
314 yield b''
315
315
316
316
317 class dirs(object):
317 class dirs:
318 '''a multiset of directory names from a set of file paths'''
318 '''a multiset of directory names from a set of file paths'''
319
319
320 def __init__(self, map, only_tracked=False):
320 def __init__(self, map, only_tracked=False):
@@ -343,7 +343,7 def _trackphasechange(data, rev, old, ne
343 data.insert(low + 1, (pycompat.xrange(rev, rev + 1), t))
343 data.insert(low + 1, (pycompat.xrange(rev, rev + 1), t))
344
344
345
345
346 class phasecache(object):
346 class phasecache:
347 def __init__(self, repo, phasedefaults, _load=True):
347 def __init__(self, repo, phasedefaults, _load=True):
348 # type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None
348 # type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None
349 if _load:
349 if _load:
@@ -879,7 +879,7 def analyzeremotephases(repo, subset, ro
879 return publicheads, draftroots
879 return publicheads, draftroots
880
880
881
881
882 class remotephasessummary(object):
882 class remotephasessummary:
883 """summarize phase information on the remote side
883 """summarize phase information on the remote side
884
884
885 :publishing: True is the remote is publishing
885 :publishing: True is the remote is publishing
@@ -664,7 +664,7 def hidewindow():
664 pass
664 pass
665
665
666
666
667 class cachestat(object):
667 class cachestat:
668 def __init__(self, path):
668 def __init__(self, path):
669 self.stat = os.stat(path)
669 self.stat = os.stat(path)
670
670
@@ -173,7 +173,7 def statprofile(ui, fp):
173 statprof.display(fp, data=data, format=displayformat, **kwargs)
173 statprof.display(fp, data=data, format=displayformat, **kwargs)
174
174
175
175
176 class profile(object):
176 class profile:
177 """Start profiling.
177 """Start profiling.
178
178
179 Profiling is active when the context manager is active. When the context
179 Profiling is active when the context manager is active. When the context
@@ -231,7 +231,7 class profile(object):
231 self._fp = open(path, b'wb')
231 self._fp = open(path, b'wb')
232 elif pycompat.iswindows:
232 elif pycompat.iswindows:
233 # parse escape sequence by win32print()
233 # parse escape sequence by win32print()
234 class uifp(object):
234 class uifp:
235 def __init__(self, ui):
235 def __init__(self, ui):
236 self._ui = ui
236 self._ui = ui
237
237
@@ -84,7 +84,7 def _eintrretry(func, *args):
84 raise
84 raise
85
85
86
86
87 class progbar(object):
87 class progbar:
88 def __init__(self, ui):
88 def __init__(self, ui):
89 self.ui = ui
89 self.ui = ui
90 self._refreshlock = threading.Lock()
90 self._refreshlock = threading.Lock()
@@ -220,7 +220,7 else:
220 err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror)
220 err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror)
221 )
221 )
222
222
223 class posixfile(object):
223 class posixfile:
224 """a file object aiming for POSIX-like semantics
224 """a file object aiming for POSIX-like semantics
225
225
226 CPython's open() returns a file that was opened *without* setting the
226 CPython's open() returns a file that was opened *without* setting the
@@ -63,7 +63,7 DIRSTATE_V2_ALL_IGNORED_RECORDED = 1 <<
63
63
64
64
65 @attr.s(slots=True, init=False)
65 @attr.s(slots=True, init=False)
66 class DirstateItem(object):
66 class DirstateItem:
67 """represent a dirstate entry
67 """represent a dirstate entry
68
68
69 It hold multiple attributes
69 It hold multiple attributes
@@ -560,7 +560,7 def gettype(q):
560 return int(q & 0xFFFF)
560 return int(q & 0xFFFF)
561
561
562
562
563 class BaseIndexObject(object):
563 class BaseIndexObject:
564 # Can I be passed to an algorithme implemented in Rust ?
564 # Can I be passed to an algorithme implemented in Rust ?
565 rust_ext_compat = 0
565 rust_ext_compat = 0
566 # Format of an index entry according to Python's `struct` language
566 # Format of an index entry according to Python's `struct` language
@@ -180,7 +180,7 def ctxpvec(ctx):
180 return pvec(util.b85encode(bs))
180 return pvec(util.b85encode(bs))
181
181
182
182
183 class pvec(object):
183 class pvec:
184 def __init__(self, hashorctx):
184 def __init__(self, hashorctx):
185 if isinstance(hashorctx, bytes):
185 if isinstance(hashorctx, bytes):
186 self._bs = hashorctx
186 self._bs = hashorctx
@@ -163,7 +163,7 class bytestr(bytes):
163
163
164 __bytes__() should be called if provided:
164 __bytes__() should be called if provided:
165
165
166 >>> class bytesable(object):
166 >>> class bytesable:
167 ... def __bytes__(self):
167 ... def __bytes__(self):
168 ... return b'bytes'
168 ... return b'bytes'
169 >>> bytestr(bytesable())
169 >>> bytestr(bytesable())
@@ -21,7 +21,7 from . import (
21 configitem = configitems.getitemregister
21 configitem = configitems.getitemregister
22
22
23
23
24 class _funcregistrarbase(object):
24 class _funcregistrarbase:
25 """Base of decorator to register a function for specific purpose
25 """Base of decorator to register a function for specific purpose
26
26
27 This decorator stores decorated functions into own dict 'table'.
27 This decorator stores decorated functions into own dict 'table'.
@@ -379,7 +379,7 def safestriproots(ui, repo, nodes):
379 return [c.node() for c in repo.set(b'roots(%ld)', tostrip)]
379 return [c.node() for c in repo.set(b'roots(%ld)', tostrip)]
380
380
381
381
382 class stripcallback(object):
382 class stripcallback:
383 """used as a transaction postclose callback"""
383 """used as a transaction postclose callback"""
384
384
385 def __init__(self, ui, repo, backup, topic):
385 def __init__(self, ui, repo, backup, topic):
@@ -19,7 +19,7 from . import (
19 )
19 )
20
20
21
21
22 class repoloader(object):
22 class repoloader:
23 """Load repositories in background thread
23 """Load repositories in background thread
24
24
25 This is designed for a forking server. A cached repo cannot be obtained
25 This is designed for a forking server. A cached repo cannot be obtained
@@ -261,7 +261,7 def wrapchangelog(unfichangelog, filtere
261 return cl
261 return cl
262
262
263
263
264 class filteredchangelogmixin(object):
264 class filteredchangelogmixin:
265 def tiprev(self):
265 def tiprev(self):
266 """filtered version of revlog.tiprev"""
266 """filtered version of revlog.tiprev"""
267 for i in pycompat.xrange(len(self) - 1, -2, -1):
267 for i in pycompat.xrange(len(self) - 1, -2, -1):
@@ -361,7 +361,7 class filteredchangelogmixin(object):
361 return super(filteredchangelogmixin, self).flags(rev)
361 return super(filteredchangelogmixin, self).flags(rev)
362
362
363
363
364 class repoview(object):
364 class repoview:
365 """Provide a read/write view of a repo through a filtered changelog
365 """Provide a read/write view of a repo through a filtered changelog
366
366
367 This object is used to access a filtered version of a repository without
367 This object is used to access a filtered version of a repository without
@@ -171,7 +171,7 HAS_FAST_PERSISTENT_NODEMAP = rustrevlog
171
171
172 @interfaceutil.implementer(repository.irevisiondelta)
172 @interfaceutil.implementer(repository.irevisiondelta)
173 @attr.s(slots=True)
173 @attr.s(slots=True)
174 class revlogrevisiondelta(object):
174 class revlogrevisiondelta:
175 node = attr.ib()
175 node = attr.ib()
176 p1node = attr.ib()
176 p1node = attr.ib()
177 p2node = attr.ib()
177 p2node = attr.ib()
@@ -187,7 +187,7 class revlogrevisiondelta(object):
187
187
188 @interfaceutil.implementer(repository.iverifyproblem)
188 @interfaceutil.implementer(repository.iverifyproblem)
189 @attr.s(frozen=True)
189 @attr.s(frozen=True)
190 class revlogproblem(object):
190 class revlogproblem:
191 warning = attr.ib(default=None)
191 warning = attr.ib(default=None)
192 error = attr.ib(default=None)
192 error = attr.ib(default=None)
193 node = attr.ib(default=None)
193 node = attr.ib(default=None)
@@ -237,7 +237,7 FILE_TOO_SHORT_MSG = _(
237 )
237 )
238
238
239
239
240 class revlog(object):
240 class revlog:
241 """
241 """
242 the underlying revision storage object
242 the underlying revision storage object
243
243
@@ -1042,7 +1042,7 class revlog(object):
1042 heads = [self.rev(n) for n in heads]
1042 heads = [self.rev(n) for n in heads]
1043
1043
1044 # we want the ancestors, but inclusive
1044 # we want the ancestors, but inclusive
1045 class lazyset(object):
1045 class lazyset:
1046 def __init__(self, lazyvalues):
1046 def __init__(self, lazyvalues):
1047 self.addedvalues = set()
1047 self.addedvalues = set()
1048 self.lazyvalues = lazyvalues
1048 self.lazyvalues = lazyvalues
@@ -62,7 +62,7 def entry(
62
62
63
63
64 @attr.s(slots=True, frozen=True)
64 @attr.s(slots=True, frozen=True)
65 class revisioninfo(object):
65 class revisioninfo:
66 """Information about a revision that allows building its fulltext
66 """Information about a revision that allows building its fulltext
67 node: expected hash of the revision
67 node: expected hash of the revision
68 p1, p2: parent revs of the revision
68 p1, p2: parent revs of the revision
@@ -38,7 +38,7 from . import flagutil
38 LIMIT_DELTA2TEXT = 2
38 LIMIT_DELTA2TEXT = 2
39
39
40
40
41 class _testrevlog(object):
41 class _testrevlog:
42 """minimalist fake revlog to use in doctests"""
42 """minimalist fake revlog to use in doctests"""
43
43
44 def __init__(self, data, density=0.5, mingap=0, snapshot=()):
44 def __init__(self, data, density=0.5, mingap=0, snapshot=()):
@@ -544,7 +544,7 def _textfromdelta(fh, revlog, baserev,
544
544
545
545
546 @attr.s(slots=True, frozen=True)
546 @attr.s(slots=True, frozen=True)
547 class _deltainfo(object):
547 class _deltainfo:
548 distance = attr.ib()
548 distance = attr.ib()
549 deltalen = attr.ib()
549 deltalen = attr.ib()
550 data = attr.ib()
550 data = attr.ib()
@@ -927,7 +927,7 def _rawgroups(revlog, p1, p2, cachedelt
927 yield (prev,)
927 yield (prev,)
928
928
929
929
930 class deltacomputer(object):
930 class deltacomputer:
931 def __init__(self, revlog):
931 def __init__(self, revlog):
932 self.revlog = revlog
932 self.revlog = revlog
933
933
@@ -99,7 +99,7 S_HEADER = struct.Struct(constants.INDEX
99 S_OLD_UID = struct.Struct('>BL')
99 S_OLD_UID = struct.Struct('>BL')
100
100
101
101
102 class RevlogDocket(object):
102 class RevlogDocket:
103 """metadata associated with revlog"""
103 """metadata associated with revlog"""
104
104
105 def __init__(
105 def __init__(
@@ -113,7 +113,7 def setup_persistent_nodemap(tr, revlog)
113 tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog))
113 tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog))
114
114
115
115
116 class _NoTransaction(object):
116 class _NoTransaction:
117 """transaction like object to update the nodemap outside a transaction"""
117 """transaction like object to update the nodemap outside a transaction"""
118
118
119 def __init__(self):
119 def __init__(self):
@@ -304,7 +304,7 S_VERSION = struct.Struct(">B")
304 S_HEADER = struct.Struct(">BQQQQ")
304 S_HEADER = struct.Struct(">BQQQQ")
305
305
306
306
307 class NodeMapDocket(object):
307 class NodeMapDocket:
308 """metadata associated with persistent nodemap data
308 """metadata associated with persistent nodemap data
309
309
310 The persistent data may come from disk or be on their way to disk.
310 The persistent data may come from disk or be on their way to disk.
@@ -23,7 +23,7 def _is_power_of_two(n):
23 return (n & (n - 1) == 0) and n != 0
23 return (n & (n - 1) == 0) and n != 0
24
24
25
25
26 class randomaccessfile(object):
26 class randomaccessfile:
27 """Accessing arbitrary chuncks of data within a file, with some caching"""
27 """Accessing arbitrary chuncks of data within a file, with some caching"""
28
28
29 def __init__(
29 def __init__(
@@ -62,7 +62,7 termsize = scmplatform.termsize
62
62
63
63
64 @attr.s(slots=True, repr=False)
64 @attr.s(slots=True, repr=False)
65 class status(object):
65 class status:
66 """Struct with a list of files per status.
66 """Struct with a list of files per status.
67
67
68 The 'deleted', 'unknown' and 'ignored' properties are only
68 The 'deleted', 'unknown' and 'ignored' properties are only
@@ -323,7 +323,7 def checkportabilityalert(ui):
323 return abort, warn
323 return abort, warn
324
324
325
325
326 class casecollisionauditor(object):
326 class casecollisionauditor:
327 def __init__(self, ui, abort, dirstate):
327 def __init__(self, ui, abort, dirstate):
328 self._ui = ui
328 self._ui = ui
329 self._abort = abort
329 self._abort = abort
@@ -1019,7 +1019,7 def backuppath(ui, repo, filepath):
1019 return origvfs.join(filepath)
1019 return origvfs.join(filepath)
1020
1020
1021
1021
1022 class _containsnode(object):
1022 class _containsnode:
1023 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1023 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1024
1024
1025 def __init__(self, repo, revcontainer):
1025 def __init__(self, repo, revcontainer):
@@ -1567,7 +1567,7 def writerequires(opener, requirements):
1567 fp.write(b"%s\n" % r)
1567 fp.write(b"%s\n" % r)
1568
1568
1569
1569
1570 class filecachesubentry(object):
1570 class filecachesubentry:
1571 def __init__(self, path, stat):
1571 def __init__(self, path, stat):
1572 self.path = path
1572 self.path = path
1573 self.cachestat = None
1573 self.cachestat = None
@@ -1623,7 +1623,7 class filecachesubentry(object):
1623 raise
1623 raise
1624
1624
1625
1625
1626 class filecacheentry(object):
1626 class filecacheentry:
1627 def __init__(self, paths, stat=True):
1627 def __init__(self, paths, stat=True):
1628 self._entries = []
1628 self._entries = []
1629 for path in paths:
1629 for path in paths:
@@ -1641,7 +1641,7 class filecacheentry(object):
1641 entry.refresh()
1641 entry.refresh()
1642
1642
1643
1643
1644 class filecache(object):
1644 class filecache:
1645 """A property like decorator that tracks files under .hg/ for updates.
1645 """A property like decorator that tracks files under .hg/ for updates.
1646
1646
1647 On first access, the files defined as arguments are stat()ed and the
1647 On first access, the files defined as arguments are stat()ed and the
@@ -1798,7 +1798,7 def extdatasource(repo, source):
1798 return data
1798 return data
1799
1799
1800
1800
1801 class progress(object):
1801 class progress:
1802 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1802 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1803 self.ui = ui
1803 self.ui = ui
1804 self.pos = 0
1804 self.pos = 0
@@ -1863,7 +1863,7 def gddeltaconfig(ui):
1863 return ui.configbool(b'format', b'generaldelta')
1863 return ui.configbool(b'format', b'generaldelta')
1864
1864
1865
1865
1866 class simplekeyvaluefile(object):
1866 class simplekeyvaluefile:
1867 """A simple file with key=value lines
1867 """A simple file with key=value lines
1868
1868
1869 Keys must be alphanumerics and start with a letter, values must not
1869 Keys must be alphanumerics and start with a letter, values must not
@@ -106,7 +106,7 def _limitsample(sample, desiredlen, ran
106 return set(sample[:desiredlen])
106 return set(sample[:desiredlen])
107
107
108
108
109 class partialdiscovery(object):
109 class partialdiscovery:
110 """an object representing ongoing discovery
110 """an object representing ongoing discovery
111
111
112 Feed with data from the remote repository, this object keep track of the
112 Feed with data from the remote repository, this object keep track of the
@@ -68,7 +68,7 shelvefileextensions = [b'hg', b'patch',
68 shelveuser = b'shelve@localhost'
68 shelveuser = b'shelve@localhost'
69
69
70
70
71 class ShelfDir(object):
71 class ShelfDir:
72 def __init__(self, repo, for_backups=False):
72 def __init__(self, repo, for_backups=False):
73 if for_backups:
73 if for_backups:
74 self.vfs = vfsmod.vfs(repo.vfs.join(backupdir))
74 self.vfs = vfsmod.vfs(repo.vfs.join(backupdir))
@@ -101,7 +101,7 class ShelfDir(object):
101 return sorted(info, reverse=True)
101 return sorted(info, reverse=True)
102
102
103
103
104 class Shelf(object):
104 class Shelf:
105 """Represents a shelf, including possibly multiple files storing it.
105 """Represents a shelf, including possibly multiple files storing it.
106
106
107 Old shelves will have a .patch and a .hg file. Newer shelves will
107 Old shelves will have a .patch and a .hg file. Newer shelves will
@@ -213,7 +213,7 class Shelf(object):
213 self.vfs.tryunlink(self.name + b'.' + ext)
213 self.vfs.tryunlink(self.name + b'.' + ext)
214
214
215
215
216 class shelvedstate(object):
216 class shelvedstate:
217 """Handle persistence during unshelving operations.
217 """Handle persistence during unshelving operations.
218
218
219 Handles saving and restoring a shelved state. Ensures that different
219 Handles saving and restoring a shelved state. Ensures that different
@@ -62,7 +62,7 def compare_range(a, astart, aend, b, bs
62 return True
62 return True
63
63
64
64
65 class Merge3Text(object):
65 class Merge3Text:
66 """3-way merge of texts.
66 """3-way merge of texts.
67
67
68 Given strings BASE, OTHER, THIS, tries to produce a combined text
68 Given strings BASE, OTHER, THIS, tries to produce a combined text
@@ -468,7 +468,7 def _resolve(m3, sides):
468 return lines
468 return lines
469
469
470
470
471 class MergeInput(object):
471 class MergeInput:
472 def __init__(self, fctx, label=None, label_detail=None):
472 def __init__(self, fctx, label=None, label_detail=None):
473 self.fctx = fctx
473 self.fctx = fctx
474 self.label = label
474 self.label = label
@@ -20,7 +20,7 def _typename(o):
20 return pycompat.sysbytes(type(o).__name__).lstrip(b'_')
20 return pycompat.sysbytes(type(o).__name__).lstrip(b'_')
21
21
22
22
23 class abstractsmartset(object):
23 class abstractsmartset:
24 def __nonzero__(self):
24 def __nonzero__(self):
25 """True if the smartset is not empty"""
25 """True if the smartset is not empty"""
26 raise NotImplementedError()
26 raise NotImplementedError()
@@ -47,7 +47,7 def _forwardoutput(ui, pipe, warn=False)
47 display(_(b"remote: "), l, b'\n')
47 display(_(b"remote: "), l, b'\n')
48
48
49
49
50 class doublepipe(object):
50 class doublepipe:
51 """Operate a side-channel pipe in addition of a main one
51 """Operate a side-channel pipe in addition of a main one
52
52
53 The side-channel pipe contains server output to be forwarded to the user
53 The side-channel pipe contains server output to be forwarded to the user
@@ -39,7 +39,7 if pycompat.TYPE_CHECKING:
39 assert t
39 assert t
40
40
41
41
42 class cmdstate(object):
42 class cmdstate:
43 """a wrapper class to store the state of commands like `rebase`, `graft`,
43 """a wrapper class to store the state of commands like `rebase`, `graft`,
44 `histedit`, `shelve` etc. Extensions can also use this to write state files.
44 `histedit`, `shelve` etc. Extensions can also use this to write state files.
45
45
@@ -102,7 +102,7 class cmdstate(object):
102 return self._repo.vfs.exists(self.fname)
102 return self._repo.vfs.exists(self.fname)
103
103
104
104
105 class _statecheck(object):
105 class _statecheck:
106 """a utility class that deals with multistep operations like graft,
106 """a utility class that deals with multistep operations like graft,
107 histedit, bisect, update etc and check whether such commands
107 histedit, bisect, update etc and check whether such commands
108 are in an unfinished conditition or not and return appropriate message
108 are in an unfinished conditition or not and return appropriate message
@@ -34,7 +34,7 urlerr = util.urlerr
34 urlreq = util.urlreq
34 urlreq = util.urlreq
35
35
36
36
37 class httprangereader(object):
37 class httprangereader:
38 def __init__(self, url, opener):
38 def __init__(self, url, opener):
39 # we assume opener has HTTPRangeHandler
39 # we assume opener has HTTPRangeHandler
40 self.url = url
40 self.url = url
@@ -154,7 +154,7 def clock():
154 ## Collection data structures
154 ## Collection data structures
155
155
156
156
157 class ProfileState(object):
157 class ProfileState:
158 def __init__(self, frequency=None):
158 def __init__(self, frequency=None):
159 self.reset(frequency)
159 self.reset(frequency)
160 self.track = b'cpu'
160 self.track = b'cpu'
@@ -202,7 +202,7 class ProfileState(object):
202 state = ProfileState()
202 state = ProfileState()
203
203
204
204
205 class CodeSite(object):
205 class CodeSite:
206 cache = {}
206 cache = {}
207
207
208 __slots__ = ('path', 'lineno', 'function', 'source')
208 __slots__ = ('path', 'lineno', 'function', 'source')
@@ -260,7 +260,7 class CodeSite(object):
260 return '%s:%s' % (self.filename(), self.function)
260 return '%s:%s' % (self.filename(), self.function)
261
261
262
262
263 class Sample(object):
263 class Sample:
264 __slots__ = ('stack', 'time')
264 __slots__ = ('stack', 'time')
265
265
266 def __init__(self, stack, time):
266 def __init__(self, stack, time):
@@ -434,7 +434,7 def profile():
434 ## Reporting API
434 ## Reporting API
435
435
436
436
437 class SiteStats(object):
437 class SiteStats:
438 def __init__(self, site):
438 def __init__(self, site):
439 self.site = site
439 self.site = site
440 self.selfcount = 0
440 self.selfcount = 0
@@ -708,7 +708,7 def display_about_method(data, fp, funct
708
708
709
709
710 def display_hotpath(data, fp, limit=0.05, **kwargs):
710 def display_hotpath(data, fp, limit=0.05, **kwargs):
711 class HotNode(object):
711 class HotNode:
712 def __init__(self, site):
712 def __init__(self, site):
713 self.site = site
713 self.site = site
714 self.count = 0
714 self.count = 0
@@ -455,7 +455,7 FILETYPE_FILELOG_OTHER = FILEFLAGS_FILEL
455 FILETYPE_OTHER = FILEFLAGS_OTHER
455 FILETYPE_OTHER = FILEFLAGS_OTHER
456
456
457
457
458 class basicstore(object):
458 class basicstore:
459 '''base class for local repository stores'''
459 '''base class for local repository stores'''
460
460
461 def __init__(self, path, vfstype):
461 def __init__(self, path, vfstype):
@@ -601,7 +601,7 class encodedstore(basicstore):
601 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
601 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
602
602
603
603
604 class fncache(object):
604 class fncache:
605 # the filename used to be partially encoded
605 # the filename used to be partially encoded
606 # hence the encodedir/decodedir dance
606 # hence the encodedir/decodedir dance
607 def __init__(self, vfs):
607 def __init__(self, vfs):
@@ -516,7 +516,7 def applybundlev1(repo, fp):
516 nodemap.post_stream_cleanup(repo)
516 nodemap.post_stream_cleanup(repo)
517
517
518
518
519 class streamcloneapplier(object):
519 class streamcloneapplier:
520 """Class to manage applying streaming clone bundles.
520 """Class to manage applying streaming clone bundles.
521
521
522 We need to wrap ``applybundlev1()`` in a dedicated type to enable bundle
522 We need to wrap ``applybundlev1()`` in a dedicated type to enable bundle
@@ -220,7 +220,7 def nullsubrepo(ctx, path, pctx):
220 # subrepo classes need to implement the following abstract class:
220 # subrepo classes need to implement the following abstract class:
221
221
222
222
223 class abstractsubrepo(object):
223 class abstractsubrepo:
224 def __init__(self, ctx, path):
224 def __init__(self, ctx, path):
225 """Initialize abstractsubrepo part
225 """Initialize abstractsubrepo part
226
226
@@ -684,7 +684,7 def _tag(
684 _fnodesmissingrec = b'\xff' * 24
684 _fnodesmissingrec = b'\xff' * 24
685
685
686
686
687 class hgtagsfnodescache(object):
687 class hgtagsfnodescache:
688 """Persistent cache mapping revisions to .hgtags filenodes.
688 """Persistent cache mapping revisions to .hgtags filenodes.
689
689
690 The cache is an array of records. Each item in the array corresponds to
690 The cache is an array of records. Each item in the array corresponds to
@@ -626,7 +626,7 def unquotestring(s):
626 return s[1:-1]
626 return s[1:-1]
627
627
628
628
629 class resourcemapper(object): # pytype: disable=ignored-metaclass
629 class resourcemapper: # pytype: disable=ignored-metaclass
630 """Mapper of internal template resources"""
630 """Mapper of internal template resources"""
631
631
632 __metaclass__ = abc.ABCMeta
632 __metaclass__ = abc.ABCMeta
@@ -663,7 +663,7 class nullresourcemapper(resourcemapper)
663 return {}
663 return {}
664
664
665
665
666 class engine(object):
666 class engine:
667 """template expansion engine.
667 """template expansion engine.
668
668
669 template expansion works like this. a map file contains key=value
669 template expansion works like this. a map file contains key=value
@@ -919,7 +919,7 def _readmapfile(fp, mapfile):
919 return cache, tmap, aliases
919 return cache, tmap, aliases
920
920
921
921
922 class loader(object):
922 class loader:
923 """Load template fragments optionally from a map file"""
923 """Load template fragments optionally from a map file"""
924
924
925 def __init__(self, cache, aliases):
925 def __init__(self, cache, aliases):
@@ -994,7 +994,7 class loader(object):
994 return syms
994 return syms
995
995
996
996
997 class templater(object):
997 class templater:
998 def __init__(
998 def __init__(
999 self,
999 self,
1000 filters=None,
1000 filters=None,
@@ -31,7 +31,7 class TemplateNotFound(error.Abort):
31 pass
31 pass
32
32
33
33
34 class wrapped(object): # pytype: disable=ignored-metaclass
34 class wrapped: # pytype: disable=ignored-metaclass
35 """Object requiring extra conversion prior to displaying or processing
35 """Object requiring extra conversion prior to displaying or processing
36 as value
36 as value
37
37
@@ -108,7 +108,7 class wrapped(object): # pytype: disabl
108 """
108 """
109
109
110
110
111 class mappable(object): # pytype: disable=ignored-metaclass
111 class mappable: # pytype: disable=ignored-metaclass
112 """Object which can be converted to a single template mapping"""
112 """Object which can be converted to a single template mapping"""
113
113
114 __metaclass__ = abc.ABCMeta
114 __metaclass__ = abc.ABCMeta
@@ -169,7 +169,7 def _maybebytesurl(maybestr):
169 return pycompat.rapply(pycompat.bytesurl, maybestr)
169 return pycompat.rapply(pycompat.bytesurl, maybestr)
170
170
171
171
172 class httppasswordmgrdbproxy(object):
172 class httppasswordmgrdbproxy:
173 """Delays loading urllib2 until it's needed."""
173 """Delays loading urllib2 until it's needed."""
174
174
175 def __init__(self):
175 def __init__(self):
@@ -207,7 +207,7 def _catchterm(*args):
207 _reqexithandlers = []
207 _reqexithandlers = []
208
208
209
209
210 class ui(object):
210 class ui:
211 def __init__(self, src=None):
211 def __init__(self, src=None):
212 """Create a fresh new ui object if no src given
212 """Create a fresh new ui object if no src given
213
213
@@ -209,7 +209,7 class unionpeer(localrepo.localpeer):
209 return False
209 return False
210
210
211
211
212 class unionrepository(object):
212 class unionrepository:
213 """Represents the union of data in 2 repositories.
213 """Represents the union of data in 2 repositories.
214
214
215 Instances are not usable if constructed directly. Use ``instance()``
215 Instances are not usable if constructed directly. Use ``instance()``
@@ -45,7 +45,7 FORMAT_VARIANT = b'deficiency'
45 OPTIMISATION = b'optimization'
45 OPTIMISATION = b'optimization'
46
46
47
47
48 class improvement(object):
48 class improvement:
49 """Represents an improvement that can be made as part of an upgrade."""
49 """Represents an improvement that can be made as part of an upgrade."""
50
50
51 ### The following attributes should be defined for each subclass:
51 ### The following attributes should be defined for each subclass:
@@ -684,7 +684,7 def determine_upgrade_actions(
684 return newactions
684 return newactions
685
685
686
686
687 class UpgradeOperation(object):
687 class UpgradeOperation:
688 """represent the work to be done during an upgrade"""
688 """represent the work to be done during an upgrade"""
689
689
690 def __init__(
690 def __init__(
@@ -50,7 +50,7 def escape(s, quote=None):
50 return s
50 return s
51
51
52
52
53 class passwordmgr(object):
53 class passwordmgr:
54 def __init__(self, ui, passwddb):
54 def __init__(self, ui, passwddb):
55 self.ui = ui
55 self.ui = ui
56 self.passwddb = passwddb
56 self.passwddb = passwddb
@@ -17,7 +17,7 from . import pycompat
17 _sysstr = pycompat.sysstr
17 _sysstr = pycompat.sysstr
18
18
19
19
20 class _pycompatstub(object):
20 class _pycompatstub:
21 def __init__(self):
21 def __init__(self):
22 self._aliases = {}
22 self._aliases = {}
23
23
@@ -231,7 +231,7 for k in DIGESTS_BY_STRENGTH:
231 assert k in DIGESTS
231 assert k in DIGESTS
232
232
233
233
234 class digester(object):
234 class digester:
235 """helper to compute digests.
235 """helper to compute digests.
236
236
237 This helper can be used to compute one or more digests given their name.
237 This helper can be used to compute one or more digests given their name.
@@ -279,7 +279,7 class digester(object):
279 return None
279 return None
280
280
281
281
282 class digestchecker(object):
282 class digestchecker:
283 """file handle wrapper that additionally checks content against a given
283 """file handle wrapper that additionally checks content against a given
284 size and digests.
284 size and digests.
285
285
@@ -329,7 +329,7 except NameError:
329 _chunksize = 4096
329 _chunksize = 4096
330
330
331
331
332 class bufferedinputpipe(object):
332 class bufferedinputpipe:
333 """a manually buffered input pipe
333 """a manually buffered input pipe
334
334
335 Python will not let us use buffered IO and lazy reading with 'polling' at
335 Python will not let us use buffered IO and lazy reading with 'polling' at
@@ -457,7 +457,7 def mmapread(fp, size=None):
457 raise
457 raise
458
458
459
459
460 class fileobjectproxy(object):
460 class fileobjectproxy:
461 """A proxy around file objects that tells a watcher when events occur.
461 """A proxy around file objects that tells a watcher when events occur.
462
462
463 This type is intended to only be used for testing purposes. Think hard
463 This type is intended to only be used for testing purposes. Think hard
@@ -693,7 +693,7 PROXIED_SOCKET_METHODS = {
693 }
693 }
694
694
695
695
696 class socketproxy(object):
696 class socketproxy:
697 """A proxy around a socket that tells a watcher when events occur.
697 """A proxy around a socket that tells a watcher when events occur.
698
698
699 This is like ``fileobjectproxy`` except for sockets.
699 This is like ``fileobjectproxy`` except for sockets.
@@ -816,7 +816,7 class socketproxy(object):
816 )
816 )
817
817
818
818
819 class baseproxyobserver(object):
819 class baseproxyobserver:
820 def __init__(self, fh, name, logdata, logdataapis):
820 def __init__(self, fh, name, logdata, logdataapis):
821 self.fh = fh
821 self.fh = fh
822 self.name = name
822 self.name = name
@@ -1256,7 +1256,7 def cachefunc(func):
1256 return f
1256 return f
1257
1257
1258
1258
1259 class cow(object):
1259 class cow:
1260 """helper class to make copy-on-write easier
1260 """helper class to make copy-on-write easier
1261
1261
1262 Call preparewrite before doing any writes.
1262 Call preparewrite before doing any writes.
@@ -1349,7 +1349,7 class cowsortdict(cow, sortdict):
1349 """
1349 """
1350
1350
1351
1351
1352 class transactional(object): # pytype: disable=ignored-metaclass
1352 class transactional: # pytype: disable=ignored-metaclass
1353 """Base class for making a transactional type into a context manager."""
1353 """Base class for making a transactional type into a context manager."""
1354
1354
1355 __metaclass__ = abc.ABCMeta
1355 __metaclass__ = abc.ABCMeta
@@ -1400,7 +1400,7 def nullcontextmanager(enter_result=None
1400 yield enter_result
1400 yield enter_result
1401
1401
1402
1402
1403 class _lrucachenode(object):
1403 class _lrucachenode:
1404 """A node in a doubly linked list.
1404 """A node in a doubly linked list.
1405
1405
1406 Holds a reference to nodes on either side as well as a key-value
1406 Holds a reference to nodes on either side as well as a key-value
@@ -1424,7 +1424,7 class _lrucachenode(object):
1424 self.cost = 0
1424 self.cost = 0
1425
1425
1426
1426
1427 class lrucachedict(object):
1427 class lrucachedict:
1428 """Dict that caches most recent accesses and sets.
1428 """Dict that caches most recent accesses and sets.
1429
1429
1430 The dict consists of an actual backing dict - indexed by original
1430 The dict consists of an actual backing dict - indexed by original
@@ -1755,7 +1755,7 def lrucachefunc(func):
1755 return f
1755 return f
1756
1756
1757
1757
1758 class propertycache(object):
1758 class propertycache:
1759 def __init__(self, func):
1759 def __init__(self, func):
1760 self.func = func
1760 self.func = func
1761 self.name = func.__name__
1761 self.name = func.__name__
@@ -2214,7 +2214,7 except ImportError:
2214 _re2 = False
2214 _re2 = False
2215
2215
2216
2216
2217 class _re(object):
2217 class _re:
2218 def _checkre2(self):
2218 def _checkre2(self):
2219 global _re2
2219 global _re2
2220 global _re2_input
2220 global _re2_input
@@ -2416,7 +2416,7 def mktempcopy(name, emptyok=False, crea
2416 return temp
2416 return temp
2417
2417
2418
2418
2419 class filestat(object):
2419 class filestat:
2420 """help to exactly detect change of a file
2420 """help to exactly detect change of a file
2421
2421
2422 'stat' attribute is result of 'os.stat()' if specified 'path'
2422 'stat' attribute is result of 'os.stat()' if specified 'path'
@@ -2522,7 +2522,7 class filestat(object):
2522 return not self == other
2522 return not self == other
2523
2523
2524
2524
2525 class atomictempfile(object):
2525 class atomictempfile:
2526 """writable file object that atomically updates a file
2526 """writable file object that atomically updates a file
2527
2527
2528 All writes will go to a temporary copy of the original file. Call
2528 All writes will go to a temporary copy of the original file. Call
@@ -2665,7 +2665,7 def appendfile(path, text):
2665 fp.write(text)
2665 fp.write(text)
2666
2666
2667
2667
2668 class chunkbuffer(object):
2668 class chunkbuffer:
2669 """Allow arbitrary sized chunks of data to be efficiently read from an
2669 """Allow arbitrary sized chunks of data to be efficiently read from an
2670 iterator over chunks of arbitrary size."""
2670 iterator over chunks of arbitrary size."""
2671
2671
@@ -2770,7 +2770,7 def filechunkiter(f, size=131072, limit=
2770 yield s
2770 yield s
2771
2771
2772
2772
2773 class cappedreader(object):
2773 class cappedreader:
2774 """A file object proxy that allows reading up to N bytes.
2774 """A file object proxy that allows reading up to N bytes.
2775
2775
2776 Given a source file object, instances of this type allow reading up to
2776 Given a source file object, instances of this type allow reading up to
@@ -2858,7 +2858,7 bytecount = unitcountfn(
2858 )
2858 )
2859
2859
2860
2860
2861 class transformingwriter(object):
2861 class transformingwriter:
2862 """Writable file wrapper to transform data by function"""
2862 """Writable file wrapper to transform data by function"""
2863
2863
2864 def __init__(self, fp, encode):
2864 def __init__(self, fp, encode):
@@ -2966,7 +2966,7 timecount = unitcountfn(
2966
2966
2967
2967
2968 @attr.s
2968 @attr.s
2969 class timedcmstats(object):
2969 class timedcmstats:
2970 """Stats information produced by the timedcm context manager on entering."""
2970 """Stats information produced by the timedcm context manager on entering."""
2971
2971
2972 # the starting value of the timer as a float (meaning and resulution is
2972 # the starting value of the timer as a float (meaning and resulution is
@@ -3067,7 +3067,7 def sizetoint(s):
3067 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3067 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3068
3068
3069
3069
3070 class hooks(object):
3070 class hooks:
3071 """A collection of hook functions that can be used to extend a
3071 """A collection of hook functions that can be used to extend a
3072 function's behavior. Hooks are called in lexicographic order,
3072 function's behavior. Hooks are called in lexicographic order,
3073 based on the names of their sources."""
3073 based on the names of their sources."""
@@ -483,7 +483,7 class bytestringchunk(bytes):
483 return self
483 return self
484
484
485
485
486 class sansiodecoder(object):
486 class sansiodecoder:
487 """A CBOR decoder that doesn't perform its own I/O.
487 """A CBOR decoder that doesn't perform its own I/O.
488
488
489 To use, construct an instance and feed it segments containing
489 To use, construct an instance and feed it segments containing
@@ -976,7 +976,7 class sansiodecoder(object):
976 return l
976 return l
977
977
978
978
979 class bufferingdecoder(object):
979 class bufferingdecoder:
980 """A CBOR decoder that buffers undecoded input.
980 """A CBOR decoder that buffers undecoded input.
981
981
982 This is a glorified wrapper around ``sansiodecoder`` that adds a buffering
982 This is a glorified wrapper around ``sansiodecoder`` that adds a buffering
@@ -32,7 +32,7 compewireprotosupport = collections.name
32 )
32 )
33
33
34
34
35 class propertycache(object):
35 class propertycache:
36 def __init__(self, func):
36 def __init__(self, func):
37 self.func = func
37 self.func = func
38 self.name = func.__name__
38 self.name = func.__name__
@@ -47,7 +47,7 class propertycache(object):
47 obj.__dict__[self.name] = value
47 obj.__dict__[self.name] = value
48
48
49
49
50 class compressormanager(object):
50 class compressormanager:
51 """Holds registrations of various compression engines.
51 """Holds registrations of various compression engines.
52
52
53 This class essentially abstracts the differences between compression
53 This class essentially abstracts the differences between compression
@@ -219,7 +219,7 class compressormanager(object):
219 compengines = compressormanager()
219 compengines = compressormanager()
220
220
221
221
222 class compressionengine(object):
222 class compressionengine:
223 """Base class for compression engines.
223 """Base class for compression engines.
224
224
225 Compression engines must implement the interface defined by this class.
225 Compression engines must implement the interface defined by this class.
@@ -338,7 +338,7 class compressionengine(object):
338 raise NotImplementedError()
338 raise NotImplementedError()
339
339
340
340
341 class _CompressedStreamReader(object):
341 class _CompressedStreamReader:
342 def __init__(self, fh):
342 def __init__(self, fh):
343 if safehasattr(fh, 'unbufferedread'):
343 if safehasattr(fh, 'unbufferedread'):
344 self._reader = fh.unbufferedread
344 self._reader = fh.unbufferedread
@@ -482,7 +482,7 class _zlibengine(compressionengine):
482 def decompressorreader(self, fh):
482 def decompressorreader(self, fh):
483 return _GzipCompressedStreamReader(fh)
483 return _GzipCompressedStreamReader(fh)
484
484
485 class zlibrevlogcompressor(object):
485 class zlibrevlogcompressor:
486 def __init__(self, level=None):
486 def __init__(self, level=None):
487 self._level = level
487 self._level = level
488
488
@@ -626,7 +626,7 class _noopengine(compressionengine):
626 def decompressorreader(self, fh):
626 def decompressorreader(self, fh):
627 return fh
627 return fh
628
628
629 class nooprevlogcompressor(object):
629 class nooprevlogcompressor:
630 def compress(self, data):
630 def compress(self, data):
631 return None
631 return None
632
632
@@ -698,7 +698,7 class _zstdengine(compressionengine):
698 def decompressorreader(self, fh):
698 def decompressorreader(self, fh):
699 return _ZstdCompressedStreamReader(fh, self._module)
699 return _ZstdCompressedStreamReader(fh, self._module)
700
700
701 class zstdrevlogcompressor(object):
701 class zstdrevlogcompressor:
702 def __init__(self, zstd, level=3):
702 def __init__(self, zstd, level=3):
703 # TODO consider omitting frame magic to save 4 bytes.
703 # TODO consider omitting frame magic to save 4 bytes.
704 # This writes content sizes into the frame header. That is
704 # This writes content sizes into the frame header. That is
@@ -782,7 +782,7 def bundlecompressiontopics():
782
782
783 # We need to format the docstring. So use a dummy object/type to hold it
783 # We need to format the docstring. So use a dummy object/type to hold it
784 # rather than mutating the original.
784 # rather than mutating the original.
785 class docobject(object):
785 class docobject:
786 pass
786 pass
787
787
788 for name in compengines:
788 for name in compengines:
@@ -59,7 +59,7 class BadFile(io.RawIOBase):
59 raise IOError(errno.EBADF, 'Bad file descriptor')
59 raise IOError(errno.EBADF, 'Bad file descriptor')
60
60
61
61
62 class LineBufferedWrapper(object):
62 class LineBufferedWrapper:
63 def __init__(self, orig):
63 def __init__(self, orig):
64 self.orig = orig
64 self.orig = orig
65
65
@@ -98,7 +98,7 def unwrap_line_buffered(stream):
98 return stream
98 return stream
99
99
100
100
101 class WriteAllWrapper(object):
101 class WriteAllWrapper:
102 def __init__(self, orig):
102 def __init__(self, orig):
103 self.orig = orig
103 self.orig = orig
104
104
@@ -193,7 +193,7 def explainexit(code):
193 return _(b"killed by signal %d") % -code
193 return _(b"killed by signal %d") % -code
194
194
195
195
196 class _pfile(object):
196 class _pfile:
197 """File-like wrapper for a stream opened by subprocess.Popen()"""
197 """File-like wrapper for a stream opened by subprocess.Popen()"""
198
198
199 def __init__(self, proc, fp):
199 def __init__(self, proc, fp):
@@ -496,7 +496,7 def person(author):
496
496
497
497
498 @attr.s(hash=True)
498 @attr.s(hash=True)
499 class mailmapping(object):
499 class mailmapping:
500 """Represents a username/email key or value in
500 """Represents a username/email key or value in
501 a mailmap file"""
501 a mailmap file"""
502
502
@@ -54,7 +54,7 def getport(port):
54 )
54 )
55
55
56
56
57 class url(object):
57 class url:
58 r"""Reliable URL parser.
58 r"""Reliable URL parser.
59
59
60 This parses URLs and provides attributes for the following
60 This parses URLs and provides attributes for the following
@@ -832,7 +832,7 def _chain_path(base_path, ui, paths):
832 return new_paths
832 return new_paths
833
833
834
834
835 class path(object):
835 class path:
836 """Represents an individual path and its configuration."""
836 """Represents an individual path and its configuration."""
837
837
838 def __init__(
838 def __init__(
@@ -54,7 +54,7 WARN_NULLID_COPY_SOURCE = _(
54 )
54 )
55
55
56
56
57 class verifier(object):
57 class verifier:
58 def __init__(self, repo, level=None):
58 def __init__(self, repo, level=None):
59 self.repo = repo.unfiltered()
59 self.repo = repo.unfiltered()
60 self.ui = repo.ui
60 self.ui = repo.ui
@@ -46,7 +46,7 def _avoidambig(path, oldstat):
46 checkandavoid()
46 checkandavoid()
47
47
48
48
49 class abstractvfs(object):
49 class abstractvfs:
50 """Abstract base class; cannot be instantiated"""
50 """Abstract base class; cannot be instantiated"""
51
51
52 # default directory separator for vfs
52 # default directory separator for vfs
@@ -606,7 +606,7 class readonlyvfs(proxyvfs):
606 return self.vfs.join(path, *insidef)
606 return self.vfs.join(path, *insidef)
607
607
608
608
609 class closewrapbase(object):
609 class closewrapbase:
610 """Base class of wrapper, which hooks closing
610 """Base class of wrapper, which hooks closing
611
611
612 Do not instantiate outside of the vfs layer.
612 Do not instantiate outside of the vfs layer.
@@ -652,7 +652,7 class delayclosedfile(closewrapbase):
652 self._closer.close(self._origfh)
652 self._closer.close(self._origfh)
653
653
654
654
655 class backgroundfilecloser(object):
655 class backgroundfilecloser:
656 """Coordinates background closing of file handles on multiple threads."""
656 """Coordinates background closing of file handles on multiple threads."""
657
657
658 def __init__(self, ui, expectedcount=-1):
658 def __init__(self, ui, expectedcount=-1):
@@ -53,7 +53,7 unlink = win32.unlink
53 umask = 0o022
53 umask = 0o022
54
54
55
55
56 class mixedfilemodewrapper(object):
56 class mixedfilemodewrapper:
57 """Wraps a file handle when it is opened in read/write mode.
57 """Wraps a file handle when it is opened in read/write mode.
58
58
59 fopen() and fdopen() on Windows have a specific-to-Windows requirement
59 fopen() and fdopen() on Windows have a specific-to-Windows requirement
@@ -130,7 +130,7 class mixedfilemodewrapper(object):
130 return self._fp.readlines(*args, **kwargs)
130 return self._fp.readlines(*args, **kwargs)
131
131
132
132
133 class fdproxy(object):
133 class fdproxy:
134 """Wraps osutil.posixfile() to override the name attribute to reflect the
134 """Wraps osutil.posixfile() to override the name attribute to reflect the
135 underlying file name.
135 underlying file name.
136 """
136 """
@@ -214,7 +214,7 def get_password():
214 return encoding.unitolocal(pw)
214 return encoding.unitolocal(pw)
215
215
216
216
217 class winstdout(object):
217 class winstdout:
218 """Some files on Windows misbehave.
218 """Some files on Windows misbehave.
219
219
220 When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
220 When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
@@ -658,7 +658,7 def isexec(f):
658 return False
658 return False
659
659
660
660
661 class cachestat(object):
661 class cachestat:
662 def __init__(self, path):
662 def __init__(self, path):
663 pass
663 pass
664
664
@@ -134,7 +134,7 def humanflags(mapping, value):
134
134
135
135
136 @attr.s(slots=True)
136 @attr.s(slots=True)
137 class frameheader(object):
137 class frameheader:
138 """Represents the data in a frame header."""
138 """Represents the data in a frame header."""
139
139
140 length = attr.ib()
140 length = attr.ib()
@@ -146,7 +146,7 class frameheader(object):
146
146
147
147
148 @attr.s(slots=True, repr=False)
148 @attr.s(slots=True, repr=False)
149 class frame(object):
149 class frame:
150 """Represents a parsed frame."""
150 """Represents a parsed frame."""
151
151
152 requestid = attr.ib()
152 requestid = attr.ib()
@@ -589,7 +589,7 def createtextoutputframe(
589 )
589 )
590
590
591
591
592 class bufferingcommandresponseemitter(object):
592 class bufferingcommandresponseemitter:
593 """Helper object to emit command response frames intelligently.
593 """Helper object to emit command response frames intelligently.
594
594
595 Raw command response data is likely emitted in chunks much smaller
595 Raw command response data is likely emitted in chunks much smaller
@@ -699,7 +699,7 class bufferingcommandresponseemitter(ob
699 # mechanism.
699 # mechanism.
700
700
701
701
702 class identityencoder(object):
702 class identityencoder:
703 """Encoder for the "identity" stream encoding profile."""
703 """Encoder for the "identity" stream encoding profile."""
704
704
705 def __init__(self, ui):
705 def __init__(self, ui):
@@ -715,7 +715,7 class identityencoder(object):
715 return b''
715 return b''
716
716
717
717
718 class identitydecoder(object):
718 class identitydecoder:
719 """Decoder for the "identity" stream encoding profile."""
719 """Decoder for the "identity" stream encoding profile."""
720
720
721 def __init__(self, ui, extraobjs):
721 def __init__(self, ui, extraobjs):
@@ -728,7 +728,7 class identitydecoder(object):
728 return data
728 return data
729
729
730
730
731 class zlibencoder(object):
731 class zlibencoder:
732 def __init__(self, ui):
732 def __init__(self, ui):
733 import zlib
733 import zlib
734
734
@@ -749,7 +749,7 class zlibencoder(object):
749 return res
749 return res
750
750
751
751
752 class zlibdecoder(object):
752 class zlibdecoder:
753 def __init__(self, ui, extraobjs):
753 def __init__(self, ui, extraobjs):
754 import zlib
754 import zlib
755
755
@@ -764,7 +764,7 class zlibdecoder(object):
764 return self._decompressor.decompress(data)
764 return self._decompressor.decompress(data)
765
765
766
766
767 class zstdbaseencoder(object):
767 class zstdbaseencoder:
768 def __init__(self, level):
768 def __init__(self, level):
769 from . import zstd
769 from . import zstd
770
770
@@ -792,7 +792,7 class zstd8mbencoder(zstdbaseencoder):
792 super(zstd8mbencoder, self).__init__(3)
792 super(zstd8mbencoder, self).__init__(3)
793
793
794
794
795 class zstdbasedecoder(object):
795 class zstdbasedecoder:
796 def __init__(self, maxwindowsize):
796 def __init__(self, maxwindowsize):
797 from . import zstd
797 from . import zstd
798
798
@@ -842,7 +842,7 def populatestreamencoders():
842 STREAM_ENCODERS_ORDER.append(b'identity')
842 STREAM_ENCODERS_ORDER.append(b'identity')
843
843
844
844
845 class stream(object):
845 class stream:
846 """Represents a logical unidirectional series of frames."""
846 """Represents a logical unidirectional series of frames."""
847
847
848 def __init__(self, streamid, active=False):
848 def __init__(self, streamid, active=False):
@@ -995,7 +995,7 DEFAULT_PROTOCOL_SETTINGS = {
995 }
995 }
996
996
997
997
998 class serverreactor(object):
998 class serverreactor:
999 """Holds state of a server handling frame-based protocol requests.
999 """Holds state of a server handling frame-based protocol requests.
1000
1000
1001 This class is the "brain" of the unified frame-based protocol server
1001 This class is the "brain" of the unified frame-based protocol server
@@ -1683,7 +1683,7 class serverreactor(object):
1683 return self._makeerrorresult(_(b'server already errored'))
1683 return self._makeerrorresult(_(b'server already errored'))
1684
1684
1685
1685
1686 class commandrequest(object):
1686 class commandrequest:
1687 """Represents a request to run a command."""
1687 """Represents a request to run a command."""
1688
1688
1689 def __init__(self, requestid, name, args, datafh=None, redirect=None):
1689 def __init__(self, requestid, name, args, datafh=None, redirect=None):
@@ -1695,7 +1695,7 class commandrequest(object):
1695 self.state = b'pending'
1695 self.state = b'pending'
1696
1696
1697
1697
1698 class clientreactor(object):
1698 class clientreactor:
1699 """Holds state of a client issuing frame-based protocol requests.
1699 """Holds state of a client issuing frame-based protocol requests.
1700
1700
1701 This is like ``serverreactor`` but for client-side state.
1701 This is like ``serverreactor`` but for client-side state.
@@ -56,7 +56,7 def decodevaluefromheaders(req, headerpr
56
56
57
57
58 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
58 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
59 class httpv1protocolhandler(object):
59 class httpv1protocolhandler:
60 def __init__(self, req, ui, checkperm):
60 def __init__(self, req, ui, checkperm):
61 self._req = req
61 self._req = req
62 self._ui = ui
62 self._ui = ui
@@ -374,7 +374,7 def _sshv1respondooberror(fout, ferr, rs
374
374
375
375
376 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
376 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
377 class sshv1protocolhandler(object):
377 class sshv1protocolhandler:
378 """Handler for requests services via version 1 of SSH protocol."""
378 """Handler for requests services via version 1 of SSH protocol."""
379
379
380 def __init__(self, ui, fin, fout):
380 def __init__(self, ui, fin, fout):
@@ -520,7 +520,7 def _runsshserver(ui, repo, fin, fout, e
520 )
520 )
521
521
522
522
523 class sshserver(object):
523 class sshserver:
524 def __init__(self, ui, repo, logfh=None):
524 def __init__(self, ui, repo, logfh=None):
525 self._ui = ui
525 self._ui = ui
526 self._repo = repo
526 self._repo = repo
@@ -39,14 +39,14 TRANSPORTS = {
39 }
39 }
40
40
41
41
42 class bytesresponse(object):
42 class bytesresponse:
43 """A wire protocol response consisting of raw bytes."""
43 """A wire protocol response consisting of raw bytes."""
44
44
45 def __init__(self, data):
45 def __init__(self, data):
46 self.data = data
46 self.data = data
47
47
48
48
49 class ooberror(object):
49 class ooberror:
50 """wireproto reply: failure of a batch of operation
50 """wireproto reply: failure of a batch of operation
51
51
52 Something failed during a batch call. The error message is stored in
52 Something failed during a batch call. The error message is stored in
@@ -57,7 +57,7 class ooberror(object):
57 self.message = message
57 self.message = message
58
58
59
59
60 class pushres(object):
60 class pushres:
61 """wireproto reply: success with simple integer return
61 """wireproto reply: success with simple integer return
62
62
63 The call was successful and returned an integer contained in `self.res`.
63 The call was successful and returned an integer contained in `self.res`.
@@ -68,7 +68,7 class pushres(object):
68 self.output = output
68 self.output = output
69
69
70
70
71 class pusherr(object):
71 class pusherr:
72 """wireproto reply: failure
72 """wireproto reply: failure
73
73
74 The call failed. The `self.res` attribute contains the error message.
74 The call failed. The `self.res` attribute contains the error message.
@@ -79,7 +79,7 class pusherr(object):
79 self.output = output
79 self.output = output
80
80
81
81
82 class streamres(object):
82 class streamres:
83 """wireproto reply: binary stream
83 """wireproto reply: binary stream
84
84
85 The call was successful and the result is a stream.
85 The call was successful and the result is a stream.
@@ -96,7 +96,7 class streamres(object):
96 self.prefer_uncompressed = prefer_uncompressed
96 self.prefer_uncompressed = prefer_uncompressed
97
97
98
98
99 class streamreslegacy(object):
99 class streamreslegacy:
100 """wireproto reply: uncompressed binary stream
100 """wireproto reply: uncompressed binary stream
101
101
102 The call was successful and the result is a stream.
102 The call was successful and the result is a stream.
@@ -243,7 +243,7 class baseprotocolhandler(interfaceutil.
243 """
243 """
244
244
245
245
246 class commandentry(object):
246 class commandentry:
247 """Represents a declared wire protocol command."""
247 """Represents a declared wire protocol command."""
248
248
249 def __init__(
249 def __init__(
@@ -406,7 +406,7 def supportedcompengines(ui, role):
406
406
407
407
408 @attr.s
408 @attr.s
409 class encodedresponse(object):
409 class encodedresponse:
410 """Represents response data that is already content encoded.
410 """Represents response data that is already content encoded.
411
411
412 Wire protocol version 2 only.
412 Wire protocol version 2 only.
@@ -420,7 +420,7 class encodedresponse(object):
420
420
421
421
422 @attr.s
422 @attr.s
423 class alternatelocationresponse(object):
423 class alternatelocationresponse:
424 """Represents a response available at an alternate location.
424 """Represents a response available at an alternate location.
425
425
426 Instances are sent in place of actual response objects when the server
426 Instances are sent in place of actual response objects when the server
@@ -439,7 +439,7 class alternatelocationresponse(object):
439
439
440
440
441 @attr.s
441 @attr.s
442 class indefinitebytestringresponse(object):
442 class indefinitebytestringresponse:
443 """Represents an object to be encoded to an indefinite length bytestring.
443 """Represents an object to be encoded to an indefinite length bytestring.
444
444
445 Instances are initialized from an iterable of chunks, with each chunk being
445 Instances are initialized from an iterable of chunks, with each chunk being
@@ -109,7 +109,7 class unsentfuture(futures.Future):
109
109
110
110
111 @interfaceutil.implementer(repository.ipeercommandexecutor)
111 @interfaceutil.implementer(repository.ipeercommandexecutor)
112 class peerexecutor(object):
112 class peerexecutor:
113 def __init__(self, peer):
113 def __init__(self, peer):
114 self._peer = peer
114 self._peer = peer
115 self._sent = False
115 self._sent = False
@@ -68,7 +68,7 def ismainthread():
68 return threading.current_thread() == threading.main_thread()
68 return threading.current_thread() == threading.main_thread()
69
69
70
70
71 class _blockingreader(object):
71 class _blockingreader:
72 def __init__(self, wrapped):
72 def __init__(self, wrapped):
73 self._wrapped = wrapped
73 self._wrapped = wrapped
74
74
@@ -213,7 +213,7 def runcmd(cmd, env, cwd=None):
213 return p.returncode, out, err
213 return p.returncode, out, err
214
214
215
215
216 class hgcommand(object):
216 class hgcommand:
217 def __init__(self, cmd, env):
217 def __init__(self, cmd, env):
218 self.cmd = cmd
218 self.cmd = cmd
219 self.env = env
219 self.env = env
@@ -1528,7 +1528,7 except ImportError:
1528 # the cygwinccompiler package is not available on some Python
1528 # the cygwinccompiler package is not available on some Python
1529 # distributions like the ones from the optware project for Synology
1529 # distributions like the ones from the optware project for Synology
1530 # DiskStation boxes
1530 # DiskStation boxes
1531 class HackedMingw32CCompiler(object):
1531 class HackedMingw32CCompiler:
1532 pass
1532 pass
1533
1533
1534
1534
@@ -265,7 +265,7 def _parseasciigraph(text):
265 return dict(edges)
265 return dict(edges)
266
266
267
267
268 class simplefilectx(object):
268 class simplefilectx:
269 def __init__(self, path, data):
269 def __init__(self, path, data):
270 self._data = data
270 self._data = data
271 self._path = path
271 self._path = path
@@ -37,7 +37,7 class _httprequesthandler(httpserver.sim
37 sys.stderr.flush()
37 sys.stderr.flush()
38
38
39
39
40 class simplehttpservice(object):
40 class simplehttpservice:
41 def __init__(self, host, port):
41 def __init__(self, host, port):
42 self.address = (host, port)
42 self.address = (host, port)
43
43
@@ -16,7 +16,7 def parse_keqv_list(req, l):
16 return parsed
16 return parsed
17
17
18
18
19 class digestauthserver(object):
19 class digestauthserver:
20 def __init__(self):
20 def __init__(self):
21 self._user_hashes = {}
21 self._user_hashes = {}
22
22
@@ -2,7 +2,7 import os
2 import time
2 import time
3
3
4
4
5 class mocktime(object):
5 class mocktime:
6 def __init__(self, increment):
6 def __init__(self, increment):
7 self.time = 0
7 self.time = 0
8 self.increment = [float(s) for s in increment.split()]
8 self.increment = [float(s) for s in increment.split()]
@@ -171,7 +171,7 if osenvironb is None:
171 # Windows lacks os.environb, for instance. A proxy over the real thing
171 # Windows lacks os.environb, for instance. A proxy over the real thing
172 # instead of a copy allows the environment to be updated via bytes on
172 # instead of a copy allows the environment to be updated via bytes on
173 # all platforms.
173 # all platforms.
174 class environbytes(object):
174 class environbytes:
175 def __init__(self, strenv):
175 def __init__(self, strenv):
176 self.__len__ = strenv.__len__
176 self.__len__ = strenv.__len__
177 self.clear = strenv.clear
177 self.clear = strenv.clear
@@ -2983,7 +2983,7 def sorttests(testdescs, previoustimes,
2983 testdescs.sort(key=sortkey)
2983 testdescs.sort(key=sortkey)
2984
2984
2985
2985
2986 class TestRunner(object):
2986 class TestRunner:
2987 """Holds context for executing tests.
2987 """Holds context for executing tests.
2988
2988
2989 Tests rely on a lot of state. This object holds it for them.
2989 Tests rely on a lot of state. This object holds it for them.
@@ -70,7 +70,7 class simplestoreerror(error.StorageErro
70
70
71 @interfaceutil.implementer(repository.irevisiondelta)
71 @interfaceutil.implementer(repository.irevisiondelta)
72 @attr.s(slots=True)
72 @attr.s(slots=True)
73 class simplestorerevisiondelta(object):
73 class simplestorerevisiondelta:
74 node = attr.ib()
74 node = attr.ib()
75 p1node = attr.ib()
75 p1node = attr.ib()
76 p2node = attr.ib()
76 p2node = attr.ib()
@@ -84,14 +84,14 class simplestorerevisiondelta(object):
84
84
85 @interfaceutil.implementer(repository.iverifyproblem)
85 @interfaceutil.implementer(repository.iverifyproblem)
86 @attr.s(frozen=True)
86 @attr.s(frozen=True)
87 class simplefilestoreproblem(object):
87 class simplefilestoreproblem:
88 warning = attr.ib(default=None)
88 warning = attr.ib(default=None)
89 error = attr.ib(default=None)
89 error = attr.ib(default=None)
90 node = attr.ib(default=None)
90 node = attr.ib(default=None)
91
91
92
92
93 @interfaceutil.implementer(repository.ifilestorage)
93 @interfaceutil.implementer(repository.ifilestorage)
94 class filestorage(object):
94 class filestorage:
95 """Implements storage for a tracked path.
95 """Implements storage for a tracked path.
96
96
97 Data is stored in the VFS in a directory corresponding to the tracked
97 Data is stored in the VFS in a directory corresponding to the tracked
@@ -3,7 +3,7 from mercurial import pycompat
3 from hgext import absorb
3 from hgext import absorb
4
4
5
5
6 class simplefctx(object):
6 class simplefctx:
7 def __init__(self, content):
7 def __init__(self, content):
8 self.content = content
8 self.content = content
9
9
@@ -62,7 +62,7 def buildancestorsets(graph):
62 return ancs
62 return ancs
63
63
64
64
65 class naiveincrementalmissingancestors(object):
65 class naiveincrementalmissingancestors:
66 def __init__(self, ancs, bases):
66 def __init__(self, ancs, bases):
67 self.ancs = ancs
67 self.ancs = ancs
68 self.bases = set(bases)
68 self.bases = set(bases)
@@ -20,7 +20,7 def bprint(*bs):
20
20
21
21
22 # equivalent of repo.repository
22 # equivalent of repo.repository
23 class thing(object):
23 class thing:
24 def hello(self):
24 def hello(self):
25 return b"Ready."
25 return b"Ready."
26
26
@@ -107,7 +107,7 def unescapearg(escaped):
107 # server side
107 # server side
108
108
109 # equivalent of wireproto's global functions
109 # equivalent of wireproto's global functions
110 class server(object):
110 class server:
111 def __init__(self, local):
111 def __init__(self, local):
112 self.local = local
112 self.local = local
113
113
@@ -80,7 +80,7 def checkzobject(o, allowextra=False):
80
80
81
81
82 # Facilitates testing localpeer.
82 # Facilitates testing localpeer.
83 class dummyrepo(object):
83 class dummyrepo:
84 def __init__(self):
84 def __init__(self):
85 self.ui = uimod.ui()
85 self.ui = uimod.ui()
86 self._wanted_sidedata = set()
86 self._wanted_sidedata = set()
@@ -92,7 +92,7 class dummyrepo(object):
92 pass
92 pass
93
93
94
94
95 class dummyopener(object):
95 class dummyopener:
96 handlers = []
96 handlers = []
97
97
98
98
@@ -108,7 +108,7 class badpeer(httppeer.httppeer):
108 pass
108 pass
109
109
110
110
111 class dummypipe(object):
111 class dummypipe:
112 def close(self):
112 def close(self):
113 pass
113 pass
114
114
@@ -16,7 +16,7 def getid(wrapper):
16 wrappers = [genwrapper(i) for i in range(5)]
16 wrappers = [genwrapper(i) for i in range(5)]
17
17
18
18
19 class dummyclass(object):
19 class dummyclass:
20 def getstack(self):
20 def getstack(self):
21 return ['orig']
21 return ['orig']
22
22
@@ -67,7 +67,7 with wrap1:
67 print('context manager', dummy.getstack())
67 print('context manager', dummy.getstack())
68
68
69 # Wrap callable object which has no __name__
69 # Wrap callable object which has no __name__
70 class callableobj(object):
70 class callableobj:
71 def __call__(self):
71 def __call__(self):
72 return ['orig']
72 return ['orig']
73
73
@@ -169,7 +169,7 def testcopyfrom():
169 os.unlink(path2)
169 os.unlink(path2)
170
170
171
171
172 class fakefctx(object):
172 class fakefctx:
173 def __init__(self, node, path=None):
173 def __init__(self, node, path=None):
174 self._node = node
174 self._node = node
175 self._path = path
175 self._path = path
@@ -35,11 +35,11 if pycompat.ispy3:
35 xrange = range
35 xrange = range
36
36
37
37
38 class fakerepo(object):
38 class fakerepo:
39 def __init__(self):
39 def __init__(self):
40 self._filecache = {}
40 self._filecache = {}
41
41
42 class fakevfs(object):
42 class fakevfs:
43 def join(self, p):
43 def join(self, p):
44 return p
44 return p
45
45
@@ -12,7 +12,7 except ImportError:
12 sha1dc = None
12 sha1dc = None
13
13
14
14
15 class hashertestsbase(object):
15 class hashertestsbase:
16 def test_basic_hash(self):
16 def test_basic_hash(self):
17 h = self.hasher()
17 h = self.hasher()
18 h.update(b'foo')
18 h.update(b'foo')
@@ -35,7 +35,7 class lockwrapper(lock.lock):
35 return super(lockwrapper, self)._getpid() + self._pidoffset
35 return super(lockwrapper, self)._getpid() + self._pidoffset
36
36
37
37
38 class teststate(object):
38 class teststate:
39 def __init__(self, testcase, dir, pidoffset=0):
39 def __init__(self, testcase, dir, pidoffset=0):
40 self._testcase = testcase
40 self._testcase = testcase
41 self._acquirecalled = False
41 self._acquirecalled = False
@@ -74,7 +74,7 A_HUGE_MANIFEST = b''.join(
74 )
74 )
75
75
76
76
77 class basemanifesttests(object):
77 class basemanifesttests:
78 def parsemanifest(self, text):
78 def parsemanifest(self, text):
79 raise NotImplementedError('parsemanifest not implemented by test case')
79 raise NotImplementedError('parsemanifest not implemented by test case')
80
80
@@ -35,7 +35,7 from hgext.remotefilelog import (
35 )
35 )
36
36
37
37
38 class datapacktestsbase(object):
38 class datapacktestsbase:
39 def __init__(self, datapackreader, paramsavailable):
39 def __init__(self, datapackreader, paramsavailable):
40 self.datapackreader = datapackreader
40 self.datapackreader = datapackreader
41 self.paramsavailable = paramsavailable
41 self.paramsavailable = paramsavailable
@@ -19,7 +19,7 from mercurial.revlogutils import (
19 )
19 )
20
20
21
21
22 class _NoTransaction(object):
22 class _NoTransaction:
23 """transaction like object to update the nodemap outside a transaction"""
23 """transaction like object to update the nodemap outside a transaction"""
24
24
25 def __init__(self):
25 def __init__(self):
@@ -150,7 +150,7 def addgroupcopy(rlog, tr, destname=b'_d
150 code path, which is not covered by "appendrev" alone.
150 code path, which is not covered by "appendrev" alone.
151 """
151 """
152
152
153 class dummychangegroup(object):
153 class dummychangegroup:
154 @staticmethod
154 @staticmethod
155 def deltachunk(pnode):
155 def deltachunk(pnode):
156 pnode = pnode or rlog.nullid
156 pnode = pnode or rlog.nullid
@@ -31,12 +31,12 data_non_inlined = (
31 )
31 )
32
32
33
33
34 class fakechangelog(object):
34 class fakechangelog:
35 def __init__(self, idx):
35 def __init__(self, idx):
36 self.index = idx
36 self.index = idx
37
37
38
38
39 class fakerepo(object):
39 class fakerepo:
40 def __init__(self, idx):
40 def __init__(self, idx):
41 """Just make so that self.changelog.index is the given idx."""
41 """Just make so that self.changelog.index is the given idx."""
42 self.changelog = fakechangelog(idx)
42 self.changelog = fakechangelog(idx)
@@ -7,7 +7,7 from mercurial import (
7 )
7 )
8
8
9
9
10 class mockfile(object):
10 class mockfile:
11 def __init__(self, name, fs):
11 def __init__(self, name, fs):
12 self.name = name
12 self.name = name
13 self.fs = fs
13 self.fs = fs
@@ -25,7 +25,7 class mockfile(object):
25 return self.fs.contents[self.name]
25 return self.fs.contents[self.name]
26
26
27
27
28 class mockvfs(object):
28 class mockvfs:
29 def __init__(self):
29 def __init__(self):
30 self.contents = {}
30 self.contents = {}
31
31
@@ -38,12 +38,12 def mockserver(inbytes):
38 return wireprotoserver.sshserver(ui, repo)
38 return wireprotoserver.sshserver(ui, repo)
39
39
40
40
41 class mockrepo(object):
41 class mockrepo:
42 def __init__(self, ui):
42 def __init__(self, ui):
43 self.ui = ui
43 self.ui = ui
44
44
45
45
46 class mockui(object):
46 class mockui:
47 def __init__(self, inbytes):
47 def __init__(self, inbytes):
48 self.fin = io.BytesIO(inbytes)
48 self.fin = io.BytesIO(inbytes)
49 self.fout = io.BytesIO()
49 self.fout = io.BytesIO()
@@ -14,7 +14,7 from mercurial.utils import stringutil
14 stringio = util.stringio
14 stringio = util.stringio
15
15
16
16
17 class proto(object):
17 class proto:
18 def __init__(self, args):
18 def __init__(self, args):
19 self.args = args
19 self.args = args
20 self.name = 'dummyproto'
20 self.name = 'dummyproto'
@@ -76,7 +76,7 class clientpeer(wireprotov1peer.wirepee
76 return {b'name': mangle(name)}, unmangle
76 return {b'name': mangle(name)}, unmangle
77
77
78
78
79 class serverrepo(object):
79 class serverrepo:
80 def __init__(self, ui):
80 def __init__(self, ui):
81 self.ui = ui
81 self.ui = ui
82
82
@@ -90,7 +90,7 configitem(
90 )
90 )
91
91
92
92
93 class ConditionTracker(object):
93 class ConditionTracker:
94 def __init__(
94 def __init__(
95 self,
95 self,
96 close_after_recv_bytes,
96 close_after_recv_bytes,
@@ -256,7 +256,7 class ConditionTracker(object):
256
256
257
257
258 # We can't adjust __class__ on a socket instance. So we define a proxy type.
258 # We can't adjust __class__ on a socket instance. So we define a proxy type.
259 class socketproxy(object):
259 class socketproxy:
260 __slots__ = ('_orig', '_logfp', '_cond')
260 __slots__ = ('_orig', '_logfp', '_cond')
261
261
262 def __init__(self, obj, logfp, condition_tracked):
262 def __init__(self, obj, logfp, condition_tracked):
@@ -300,7 +300,7 class socketproxy(object):
300
300
301
301
302 # We can't adjust __class__ on socket._fileobject, so define a proxy.
302 # We can't adjust __class__ on socket._fileobject, so define a proxy.
303 class fileobjectproxy(object):
303 class fileobjectproxy:
304 __slots__ = ('_orig', '_logfp', '_cond')
304 __slots__ = ('_orig', '_logfp', '_cond')
305
305
306 def __init__(self, obj, logfp, condition_tracked):
306 def __init__(self, obj, logfp, condition_tracked):
General Comments 0
You need to be logged in to leave comments. Login now