##// END OF EJS Templates
py3: fix keyword arguments handling in hgext/remotefilelog/...
Pulkit Goyal -
r40646:13d4ad8d default
parent child Browse files
Show More
@@ -151,6 +151,7 b' from mercurial import ('
151 151 merge,
152 152 node as nodemod,
153 153 patch,
154 pycompat,
154 155 registrar,
155 156 repair,
156 157 repoview,
@@ -272,7 +273,7 b' def uisetup(ui):'
272 273
273 274 # Prevent 'hg manifest --all'
274 275 def _manifest(orig, ui, repo, *args, **opts):
275 if (isenabled(repo) and opts.get('all')):
276 if (isenabled(repo) and opts.get(r'all')):
276 277 raise error.Abort(_("--all is not supported in a shallow repo"))
277 278
278 279 return orig(ui, repo, *args, **opts)
@@ -294,7 +295,7 b' def uisetup(ui):'
294 295 extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow)
295 296
296 297 def cloneshallow(orig, ui, repo, *args, **opts):
297 if opts.get('shallow'):
298 if opts.get(r'shallow'):
298 299 repos = []
299 300 def pull_shallow(orig, self, *args, **kwargs):
300 301 if not isenabled(self):
@@ -327,9 +328,9 b' def cloneshallow(orig, ui, repo, *args, '
327 328 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps:
328 329 opts = {}
329 330 if repo.includepattern:
330 opts['includepattern'] = '\0'.join(repo.includepattern)
331 opts[r'includepattern'] = '\0'.join(repo.includepattern)
331 332 if repo.excludepattern:
332 opts['excludepattern'] = '\0'.join(repo.excludepattern)
333 opts[r'excludepattern'] = '\0'.join(repo.excludepattern)
333 334 return remote._callstream('stream_out_shallow', **opts)
334 335 else:
335 336 return orig()
@@ -360,7 +361,7 b' def cloneshallow(orig, ui, repo, *args, '
360 361 try:
361 362 orig(ui, repo, *args, **opts)
362 363 finally:
363 if opts.get('shallow'):
364 if opts.get(r'shallow'):
364 365 for r in repos:
365 366 if util.safehasattr(r, 'fileservice'):
366 367 r.fileservice.close()
@@ -888,19 +889,20 b' def log(orig, ui, repo, *pats, **opts):'
888 889 if not isenabled(repo):
889 890 return orig(ui, repo, *pats, **opts)
890 891
891 follow = opts.get('follow')
892 revs = opts.get('rev')
892 follow = opts.get(r'follow')
893 revs = opts.get(r'rev')
893 894 if pats:
894 895 # Force slowpath for non-follow patterns and follows that start from
895 896 # non-working-copy-parent revs.
896 897 if not follow or revs:
897 898 # This forces the slowpath
898 opts['removed'] = True
899 opts[r'removed'] = True
899 900
900 901 # If this is a non-follow log without any revs specified, recommend that
901 902 # the user add -f to speed it up.
902 903 if not follow and not revs:
903 match, pats = scmutil.matchandpats(repo['.'], pats, opts)
904 match, pats = scmutil.matchandpats(repo['.'], pats,
905 pycompat.byteskwargs(opts))
904 906 isfile = not match.anypats()
905 907 if isfile:
906 908 for file in match.files():
@@ -1104,6 +1106,7 b' def prefetch(ui, repo, *pats, **opts):'
1104 1106
1105 1107 Return 0 on success.
1106 1108 """
1109 opts = pycompat.byteskwargs(opts)
1107 1110 if not isenabled(repo):
1108 1111 raise error.Abort(_("repo is not shallow"))
1109 1112
@@ -1121,15 +1124,15 b' def prefetch(ui, repo, *pats, **opts):'
1121 1124 ('', 'packsonly', None, _('only repack packs (skip loose objects)'), None),
1122 1125 ], _('hg repack [OPTIONS]'))
1123 1126 def repack_(ui, repo, *pats, **opts):
1124 if opts.get('background'):
1125 repackmod.backgroundrepack(repo, incremental=opts.get('incremental'),
1126 packsonly=opts.get('packsonly', False))
1127 if opts.get(r'background'):
1128 repackmod.backgroundrepack(repo, incremental=opts.get(r'incremental'),
1129 packsonly=opts.get(r'packsonly', False))
1127 1130 return
1128 1131
1129 options = {'packsonly': opts.get('packsonly')}
1132 options = {'packsonly': opts.get(r'packsonly')}
1130 1133
1131 1134 try:
1132 if opts.get('incremental'):
1135 if opts.get(r'incremental'):
1133 1136 repackmod.incrementalrepack(repo, options=options)
1134 1137 else:
1135 1138 repackmod.fullrepack(repo, options=options)
@@ -392,10 +392,10 b' class baseunionstore(object):'
392 392 # throw a KeyError, try this many times with a full refresh between
393 393 # attempts. A repack operation may have moved data from one store to
394 394 # another while we were running.
395 self.numattempts = kwargs.get('numretries', 0) + 1
395 self.numattempts = kwargs.get(r'numretries', 0) + 1
396 396 # If not-None, call this function on every retry and if the attempts are
397 397 # exhausted.
398 self.retrylog = kwargs.get('retrylog', None)
398 self.retrylog = kwargs.get(r'retrylog', None)
399 399
400 400 def markforrefresh(self):
401 401 for store in self.stores:
@@ -36,12 +36,12 b' class unioncontentstore(basestore.baseun'
36 36 super(unioncontentstore, self).__init__(*args, **kwargs)
37 37
38 38 self.stores = args
39 self.writestore = kwargs.get('writestore')
39 self.writestore = kwargs.get(r'writestore')
40 40
41 41 # If allowincomplete==True then the union store can return partial
42 42 # delta chains, otherwise it will throw a KeyError if a full
43 43 # deltachain can't be found.
44 self.allowincomplete = kwargs.get('allowincomplete', False)
44 self.allowincomplete = kwargs.get(r'allowincomplete', False)
45 45
46 46 def get(self, name, node):
47 47 """Fetches the full text revision contents of the given name+node pair.
@@ -28,7 +28,7 b' from . import ('
28 28 )
29 29
30 30 def debugremotefilelog(ui, path, **opts):
31 decompress = opts.get('decompress')
31 decompress = opts.get(r'decompress')
32 32
33 33 size, firstnode, mapping = parsefileblob(path, decompress)
34 34
@@ -89,9 +89,9 b' def buildtemprevlog(repo, file):'
89 89
90 90 def debugindex(orig, ui, repo, file_=None, **opts):
91 91 """dump the contents of an index file"""
92 if (opts.get('changelog') or
93 opts.get('manifest') or
94 opts.get('dir') or
92 if (opts.get(r'changelog') or
93 opts.get(r'manifest') or
94 opts.get(r'dir') or
95 95 not shallowutil.isenabled(repo) or
96 96 not repo.shallowmatch(file_)):
97 97 return orig(ui, repo, file_, **opts)
@@ -154,7 +154,7 b' def debugindexdot(orig, ui, repo, file_)'
154 154 ui.write("}\n")
155 155
156 156 def verifyremotefilelog(ui, path, **opts):
157 decompress = opts.get('decompress')
157 decompress = opts.get(r'decompress')
158 158
159 159 for root, dirs, files in os.walk(path):
160 160 for file in files:
@@ -213,13 +213,13 b' def debugdatapack(ui, *paths, **opts):'
213 213 path = path[:path.index('.data')]
214 214 ui.write("%s:\n" % path)
215 215 dpack = datapack.datapack(path)
216 node = opts.get('node')
216 node = opts.get(r'node')
217 217 if node:
218 218 deltachain = dpack.getdeltachain('', bin(node))
219 219 dumpdeltachain(ui, deltachain, **opts)
220 220 return
221 221
222 if opts.get('long'):
222 if opts.get(r'long'):
223 223 hashformatter = hex
224 224 hashlen = 42
225 225 else:
@@ -18,6 +18,7 b' from mercurial.i18n import _'
18 18 from mercurial.node import bin, hex, nullid
19 19 from mercurial import (
20 20 error,
21 pycompat,
21 22 revlog,
22 23 sshpeer,
23 24 util,
@@ -119,7 +120,7 b' def peersetup(ui, peer):'
119 120 def _callstream(self, command, **opts):
120 121 supertype = super(remotefilepeer, self)
121 122 if not util.safehasattr(supertype, '_sendrequest'):
122 self._updatecallstreamopts(command, opts)
123 self._updatecallstreamopts(command, pycompat.byteskwargs(opts))
123 124 return super(remotefilepeer, self)._callstream(command, **opts)
124 125
125 126 peer.__class__ = remotefilepeer
@@ -11,12 +11,12 b' class unionmetadatastore(basestore.baseu'
11 11 super(unionmetadatastore, self).__init__(*args, **kwargs)
12 12
13 13 self.stores = args
14 self.writestore = kwargs.get('writestore')
14 self.writestore = kwargs.get(r'writestore')
15 15
16 16 # If allowincomplete==True then the union store can return partial
17 17 # ancestor lists, otherwise it will throw a KeyError if a full
18 18 # history can't be found.
19 self.allowincomplete = kwargs.get('allowincomplete', False)
19 self.allowincomplete = kwargs.get(r'allowincomplete', False)
20 20
21 21 def getancestors(self, name, node, known=None):
22 22 """Returns as many ancestors as we're aware of.
@@ -15,6 +15,7 b' from mercurial import ('
15 15 context,
16 16 error,
17 17 phases,
18 pycompat,
18 19 util,
19 20 )
20 21 from . import shallowutil
@@ -218,11 +219,11 b' class remotefilectx(context.filectx):'
218 219 return linknode
219 220
220 221 commonlogkwargs = {
221 'revs': ' '.join([hex(cl.node(rev)) for rev in revs]),
222 'fnode': hex(fnode),
223 'filepath': path,
224 'user': shallowutil.getusername(repo.ui),
225 'reponame': shallowutil.getreponame(repo.ui),
222 r'revs': ' '.join([hex(cl.node(rev)) for rev in revs]),
223 r'fnode': hex(fnode),
224 r'filepath': path,
225 r'user': shallowutil.getusername(repo.ui),
226 r'reponame': shallowutil.getreponame(repo.ui),
226 227 }
227 228
228 229 repo.ui.log('linkrevfixup', 'adjusting linknode', **commonlogkwargs)
@@ -315,7 +316,7 b' class remotefilectx(context.filectx):'
315 316 finally:
316 317 elapsed = time.time() - start
317 318 repo.ui.log('linkrevfixup', logmsg, elapsed=elapsed * 1000,
318 **commonlogkwargs)
319 **pycompat.strkwargs(commonlogkwargs))
319 320
320 321 def _verifylinknode(self, revs, linknode):
321 322 """
@@ -408,7 +409,7 b' class remotefilectx(context.filectx):'
408 409
409 410 def annotate(self, *args, **kwargs):
410 411 introctx = self
411 prefetchskip = kwargs.pop('prefetchskip', None)
412 prefetchskip = kwargs.pop(r'prefetchskip', None)
412 413 if prefetchskip:
413 414 # use introrev so prefetchskip can be accurately tested
414 415 introrev = self.introrev()
@@ -146,7 +146,7 b' def makechangegroup(orig, repo, outgoing'
146 146 try:
147 147 # if serving, only send files the clients has patterns for
148 148 if source == 'serve':
149 bundlecaps = kwargs.get('bundlecaps')
149 bundlecaps = kwargs.get(r'bundlecaps')
150 150 includepattern = None
151 151 excludepattern = None
152 152 for cap in (bundlecaps or []):
@@ -105,7 +105,7 b' def prefixkeys(dict, prefix):'
105 105 def reportpackmetrics(ui, prefix, *stores):
106 106 dicts = [s.getmetrics() for s in stores]
107 107 dict = prefixkeys(sumdicts(*dicts), prefix + '_')
108 ui.log(prefix + "_packsizes", "", **dict)
108 ui.log(prefix + "_packsizes", "", **pycompat.strkwargs(dict))
109 109
110 110 def _parsepackmeta(metabuf):
111 111 """parse datapack meta, bytes (<metadata-list>) -> dict
General Comments 0
You need to be logged in to leave comments. Login now