Show More
@@ -151,6 +151,7 b' from mercurial import (' | |||||
151 | merge, |
|
151 | merge, | |
152 | node as nodemod, |
|
152 | node as nodemod, | |
153 | patch, |
|
153 | patch, | |
|
154 | pycompat, | |||
154 | registrar, |
|
155 | registrar, | |
155 | repair, |
|
156 | repair, | |
156 | repoview, |
|
157 | repoview, | |
@@ -272,7 +273,7 b' def uisetup(ui):' | |||||
272 |
|
273 | |||
273 | # Prevent 'hg manifest --all' |
|
274 | # Prevent 'hg manifest --all' | |
274 | def _manifest(orig, ui, repo, *args, **opts): |
|
275 | def _manifest(orig, ui, repo, *args, **opts): | |
275 | if (isenabled(repo) and opts.get('all')): |
|
276 | if (isenabled(repo) and opts.get(r'all')): | |
276 | raise error.Abort(_("--all is not supported in a shallow repo")) |
|
277 | raise error.Abort(_("--all is not supported in a shallow repo")) | |
277 |
|
278 | |||
278 | return orig(ui, repo, *args, **opts) |
|
279 | return orig(ui, repo, *args, **opts) | |
@@ -294,7 +295,7 b' def uisetup(ui):' | |||||
294 | extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow) |
|
295 | extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow) | |
295 |
|
296 | |||
296 | def cloneshallow(orig, ui, repo, *args, **opts): |
|
297 | def cloneshallow(orig, ui, repo, *args, **opts): | |
297 | if opts.get('shallow'): |
|
298 | if opts.get(r'shallow'): | |
298 | repos = [] |
|
299 | repos = [] | |
299 | def pull_shallow(orig, self, *args, **kwargs): |
|
300 | def pull_shallow(orig, self, *args, **kwargs): | |
300 | if not isenabled(self): |
|
301 | if not isenabled(self): | |
@@ -327,9 +328,9 b' def cloneshallow(orig, ui, repo, *args, ' | |||||
327 | if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps: |
|
328 | if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps: | |
328 | opts = {} |
|
329 | opts = {} | |
329 | if repo.includepattern: |
|
330 | if repo.includepattern: | |
330 | opts['includepattern'] = '\0'.join(repo.includepattern) |
|
331 | opts[r'includepattern'] = '\0'.join(repo.includepattern) | |
331 | if repo.excludepattern: |
|
332 | if repo.excludepattern: | |
332 | opts['excludepattern'] = '\0'.join(repo.excludepattern) |
|
333 | opts[r'excludepattern'] = '\0'.join(repo.excludepattern) | |
333 | return remote._callstream('stream_out_shallow', **opts) |
|
334 | return remote._callstream('stream_out_shallow', **opts) | |
334 | else: |
|
335 | else: | |
335 | return orig() |
|
336 | return orig() | |
@@ -360,7 +361,7 b' def cloneshallow(orig, ui, repo, *args, ' | |||||
360 | try: |
|
361 | try: | |
361 | orig(ui, repo, *args, **opts) |
|
362 | orig(ui, repo, *args, **opts) | |
362 | finally: |
|
363 | finally: | |
363 | if opts.get('shallow'): |
|
364 | if opts.get(r'shallow'): | |
364 | for r in repos: |
|
365 | for r in repos: | |
365 | if util.safehasattr(r, 'fileservice'): |
|
366 | if util.safehasattr(r, 'fileservice'): | |
366 | r.fileservice.close() |
|
367 | r.fileservice.close() | |
@@ -888,19 +889,20 b' def log(orig, ui, repo, *pats, **opts):' | |||||
888 | if not isenabled(repo): |
|
889 | if not isenabled(repo): | |
889 | return orig(ui, repo, *pats, **opts) |
|
890 | return orig(ui, repo, *pats, **opts) | |
890 |
|
891 | |||
891 | follow = opts.get('follow') |
|
892 | follow = opts.get(r'follow') | |
892 | revs = opts.get('rev') |
|
893 | revs = opts.get(r'rev') | |
893 | if pats: |
|
894 | if pats: | |
894 | # Force slowpath for non-follow patterns and follows that start from |
|
895 | # Force slowpath for non-follow patterns and follows that start from | |
895 | # non-working-copy-parent revs. |
|
896 | # non-working-copy-parent revs. | |
896 | if not follow or revs: |
|
897 | if not follow or revs: | |
897 | # This forces the slowpath |
|
898 | # This forces the slowpath | |
898 | opts['removed'] = True |
|
899 | opts[r'removed'] = True | |
899 |
|
900 | |||
900 | # If this is a non-follow log without any revs specified, recommend that |
|
901 | # If this is a non-follow log without any revs specified, recommend that | |
901 | # the user add -f to speed it up. |
|
902 | # the user add -f to speed it up. | |
902 | if not follow and not revs: |
|
903 | if not follow and not revs: | |
903 |
match, pats = scmutil.matchandpats(repo['.'], pats, |
|
904 | match, pats = scmutil.matchandpats(repo['.'], pats, | |
|
905 | pycompat.byteskwargs(opts)) | |||
904 | isfile = not match.anypats() |
|
906 | isfile = not match.anypats() | |
905 | if isfile: |
|
907 | if isfile: | |
906 | for file in match.files(): |
|
908 | for file in match.files(): | |
@@ -1104,6 +1106,7 b' def prefetch(ui, repo, *pats, **opts):' | |||||
1104 |
|
1106 | |||
1105 | Return 0 on success. |
|
1107 | Return 0 on success. | |
1106 | """ |
|
1108 | """ | |
|
1109 | opts = pycompat.byteskwargs(opts) | |||
1107 | if not isenabled(repo): |
|
1110 | if not isenabled(repo): | |
1108 | raise error.Abort(_("repo is not shallow")) |
|
1111 | raise error.Abort(_("repo is not shallow")) | |
1109 |
|
1112 | |||
@@ -1121,15 +1124,15 b' def prefetch(ui, repo, *pats, **opts):' | |||||
1121 | ('', 'packsonly', None, _('only repack packs (skip loose objects)'), None), |
|
1124 | ('', 'packsonly', None, _('only repack packs (skip loose objects)'), None), | |
1122 | ], _('hg repack [OPTIONS]')) |
|
1125 | ], _('hg repack [OPTIONS]')) | |
1123 | def repack_(ui, repo, *pats, **opts): |
|
1126 | def repack_(ui, repo, *pats, **opts): | |
1124 | if opts.get('background'): |
|
1127 | if opts.get(r'background'): | |
1125 | repackmod.backgroundrepack(repo, incremental=opts.get('incremental'), |
|
1128 | repackmod.backgroundrepack(repo, incremental=opts.get(r'incremental'), | |
1126 | packsonly=opts.get('packsonly', False)) |
|
1129 | packsonly=opts.get(r'packsonly', False)) | |
1127 | return |
|
1130 | return | |
1128 |
|
1131 | |||
1129 | options = {'packsonly': opts.get('packsonly')} |
|
1132 | options = {'packsonly': opts.get(r'packsonly')} | |
1130 |
|
1133 | |||
1131 | try: |
|
1134 | try: | |
1132 | if opts.get('incremental'): |
|
1135 | if opts.get(r'incremental'): | |
1133 | repackmod.incrementalrepack(repo, options=options) |
|
1136 | repackmod.incrementalrepack(repo, options=options) | |
1134 | else: |
|
1137 | else: | |
1135 | repackmod.fullrepack(repo, options=options) |
|
1138 | repackmod.fullrepack(repo, options=options) |
@@ -392,10 +392,10 b' class baseunionstore(object):' | |||||
392 | # throw a KeyError, try this many times with a full refresh between |
|
392 | # throw a KeyError, try this many times with a full refresh between | |
393 | # attempts. A repack operation may have moved data from one store to |
|
393 | # attempts. A repack operation may have moved data from one store to | |
394 | # another while we were running. |
|
394 | # another while we were running. | |
395 | self.numattempts = kwargs.get('numretries', 0) + 1 |
|
395 | self.numattempts = kwargs.get(r'numretries', 0) + 1 | |
396 | # If not-None, call this function on every retry and if the attempts are |
|
396 | # If not-None, call this function on every retry and if the attempts are | |
397 | # exhausted. |
|
397 | # exhausted. | |
398 | self.retrylog = kwargs.get('retrylog', None) |
|
398 | self.retrylog = kwargs.get(r'retrylog', None) | |
399 |
|
399 | |||
400 | def markforrefresh(self): |
|
400 | def markforrefresh(self): | |
401 | for store in self.stores: |
|
401 | for store in self.stores: |
@@ -36,12 +36,12 b' class unioncontentstore(basestore.baseun' | |||||
36 | super(unioncontentstore, self).__init__(*args, **kwargs) |
|
36 | super(unioncontentstore, self).__init__(*args, **kwargs) | |
37 |
|
37 | |||
38 | self.stores = args |
|
38 | self.stores = args | |
39 | self.writestore = kwargs.get('writestore') |
|
39 | self.writestore = kwargs.get(r'writestore') | |
40 |
|
40 | |||
41 | # If allowincomplete==True then the union store can return partial |
|
41 | # If allowincomplete==True then the union store can return partial | |
42 | # delta chains, otherwise it will throw a KeyError if a full |
|
42 | # delta chains, otherwise it will throw a KeyError if a full | |
43 | # deltachain can't be found. |
|
43 | # deltachain can't be found. | |
44 | self.allowincomplete = kwargs.get('allowincomplete', False) |
|
44 | self.allowincomplete = kwargs.get(r'allowincomplete', False) | |
45 |
|
45 | |||
46 | def get(self, name, node): |
|
46 | def get(self, name, node): | |
47 | """Fetches the full text revision contents of the given name+node pair. |
|
47 | """Fetches the full text revision contents of the given name+node pair. |
@@ -28,7 +28,7 b' from . import (' | |||||
28 | ) |
|
28 | ) | |
29 |
|
29 | |||
30 | def debugremotefilelog(ui, path, **opts): |
|
30 | def debugremotefilelog(ui, path, **opts): | |
31 | decompress = opts.get('decompress') |
|
31 | decompress = opts.get(r'decompress') | |
32 |
|
32 | |||
33 | size, firstnode, mapping = parsefileblob(path, decompress) |
|
33 | size, firstnode, mapping = parsefileblob(path, decompress) | |
34 |
|
34 | |||
@@ -89,9 +89,9 b' def buildtemprevlog(repo, file):' | |||||
89 |
|
89 | |||
90 | def debugindex(orig, ui, repo, file_=None, **opts): |
|
90 | def debugindex(orig, ui, repo, file_=None, **opts): | |
91 | """dump the contents of an index file""" |
|
91 | """dump the contents of an index file""" | |
92 | if (opts.get('changelog') or |
|
92 | if (opts.get(r'changelog') or | |
93 | opts.get('manifest') or |
|
93 | opts.get(r'manifest') or | |
94 | opts.get('dir') or |
|
94 | opts.get(r'dir') or | |
95 | not shallowutil.isenabled(repo) or |
|
95 | not shallowutil.isenabled(repo) or | |
96 | not repo.shallowmatch(file_)): |
|
96 | not repo.shallowmatch(file_)): | |
97 | return orig(ui, repo, file_, **opts) |
|
97 | return orig(ui, repo, file_, **opts) | |
@@ -154,7 +154,7 b' def debugindexdot(orig, ui, repo, file_)' | |||||
154 | ui.write("}\n") |
|
154 | ui.write("}\n") | |
155 |
|
155 | |||
156 | def verifyremotefilelog(ui, path, **opts): |
|
156 | def verifyremotefilelog(ui, path, **opts): | |
157 | decompress = opts.get('decompress') |
|
157 | decompress = opts.get(r'decompress') | |
158 |
|
158 | |||
159 | for root, dirs, files in os.walk(path): |
|
159 | for root, dirs, files in os.walk(path): | |
160 | for file in files: |
|
160 | for file in files: | |
@@ -213,13 +213,13 b' def debugdatapack(ui, *paths, **opts):' | |||||
213 | path = path[:path.index('.data')] |
|
213 | path = path[:path.index('.data')] | |
214 | ui.write("%s:\n" % path) |
|
214 | ui.write("%s:\n" % path) | |
215 | dpack = datapack.datapack(path) |
|
215 | dpack = datapack.datapack(path) | |
216 | node = opts.get('node') |
|
216 | node = opts.get(r'node') | |
217 | if node: |
|
217 | if node: | |
218 | deltachain = dpack.getdeltachain('', bin(node)) |
|
218 | deltachain = dpack.getdeltachain('', bin(node)) | |
219 | dumpdeltachain(ui, deltachain, **opts) |
|
219 | dumpdeltachain(ui, deltachain, **opts) | |
220 | return |
|
220 | return | |
221 |
|
221 | |||
222 | if opts.get('long'): |
|
222 | if opts.get(r'long'): | |
223 | hashformatter = hex |
|
223 | hashformatter = hex | |
224 | hashlen = 42 |
|
224 | hashlen = 42 | |
225 | else: |
|
225 | else: |
@@ -18,6 +18,7 b' from mercurial.i18n import _' | |||||
18 | from mercurial.node import bin, hex, nullid |
|
18 | from mercurial.node import bin, hex, nullid | |
19 | from mercurial import ( |
|
19 | from mercurial import ( | |
20 | error, |
|
20 | error, | |
|
21 | pycompat, | |||
21 | revlog, |
|
22 | revlog, | |
22 | sshpeer, |
|
23 | sshpeer, | |
23 | util, |
|
24 | util, | |
@@ -119,7 +120,7 b' def peersetup(ui, peer):' | |||||
119 | def _callstream(self, command, **opts): |
|
120 | def _callstream(self, command, **opts): | |
120 | supertype = super(remotefilepeer, self) |
|
121 | supertype = super(remotefilepeer, self) | |
121 | if not util.safehasattr(supertype, '_sendrequest'): |
|
122 | if not util.safehasattr(supertype, '_sendrequest'): | |
122 | self._updatecallstreamopts(command, opts) |
|
123 | self._updatecallstreamopts(command, pycompat.byteskwargs(opts)) | |
123 | return super(remotefilepeer, self)._callstream(command, **opts) |
|
124 | return super(remotefilepeer, self)._callstream(command, **opts) | |
124 |
|
125 | |||
125 | peer.__class__ = remotefilepeer |
|
126 | peer.__class__ = remotefilepeer |
@@ -11,12 +11,12 b' class unionmetadatastore(basestore.baseu' | |||||
11 | super(unionmetadatastore, self).__init__(*args, **kwargs) |
|
11 | super(unionmetadatastore, self).__init__(*args, **kwargs) | |
12 |
|
12 | |||
13 | self.stores = args |
|
13 | self.stores = args | |
14 | self.writestore = kwargs.get('writestore') |
|
14 | self.writestore = kwargs.get(r'writestore') | |
15 |
|
15 | |||
16 | # If allowincomplete==True then the union store can return partial |
|
16 | # If allowincomplete==True then the union store can return partial | |
17 | # ancestor lists, otherwise it will throw a KeyError if a full |
|
17 | # ancestor lists, otherwise it will throw a KeyError if a full | |
18 | # history can't be found. |
|
18 | # history can't be found. | |
19 | self.allowincomplete = kwargs.get('allowincomplete', False) |
|
19 | self.allowincomplete = kwargs.get(r'allowincomplete', False) | |
20 |
|
20 | |||
21 | def getancestors(self, name, node, known=None): |
|
21 | def getancestors(self, name, node, known=None): | |
22 | """Returns as many ancestors as we're aware of. |
|
22 | """Returns as many ancestors as we're aware of. |
@@ -15,6 +15,7 b' from mercurial import (' | |||||
15 | context, |
|
15 | context, | |
16 | error, |
|
16 | error, | |
17 | phases, |
|
17 | phases, | |
|
18 | pycompat, | |||
18 | util, |
|
19 | util, | |
19 | ) |
|
20 | ) | |
20 | from . import shallowutil |
|
21 | from . import shallowutil | |
@@ -218,11 +219,11 b' class remotefilectx(context.filectx):' | |||||
218 | return linknode |
|
219 | return linknode | |
219 |
|
220 | |||
220 | commonlogkwargs = { |
|
221 | commonlogkwargs = { | |
221 | 'revs': ' '.join([hex(cl.node(rev)) for rev in revs]), |
|
222 | r'revs': ' '.join([hex(cl.node(rev)) for rev in revs]), | |
222 | 'fnode': hex(fnode), |
|
223 | r'fnode': hex(fnode), | |
223 | 'filepath': path, |
|
224 | r'filepath': path, | |
224 | 'user': shallowutil.getusername(repo.ui), |
|
225 | r'user': shallowutil.getusername(repo.ui), | |
225 | 'reponame': shallowutil.getreponame(repo.ui), |
|
226 | r'reponame': shallowutil.getreponame(repo.ui), | |
226 | } |
|
227 | } | |
227 |
|
228 | |||
228 | repo.ui.log('linkrevfixup', 'adjusting linknode', **commonlogkwargs) |
|
229 | repo.ui.log('linkrevfixup', 'adjusting linknode', **commonlogkwargs) | |
@@ -315,7 +316,7 b' class remotefilectx(context.filectx):' | |||||
315 | finally: |
|
316 | finally: | |
316 | elapsed = time.time() - start |
|
317 | elapsed = time.time() - start | |
317 | repo.ui.log('linkrevfixup', logmsg, elapsed=elapsed * 1000, |
|
318 | repo.ui.log('linkrevfixup', logmsg, elapsed=elapsed * 1000, | |
318 | **commonlogkwargs) |
|
319 | **pycompat.strkwargs(commonlogkwargs)) | |
319 |
|
320 | |||
320 | def _verifylinknode(self, revs, linknode): |
|
321 | def _verifylinknode(self, revs, linknode): | |
321 | """ |
|
322 | """ | |
@@ -408,7 +409,7 b' class remotefilectx(context.filectx):' | |||||
408 |
|
409 | |||
409 | def annotate(self, *args, **kwargs): |
|
410 | def annotate(self, *args, **kwargs): | |
410 | introctx = self |
|
411 | introctx = self | |
411 | prefetchskip = kwargs.pop('prefetchskip', None) |
|
412 | prefetchskip = kwargs.pop(r'prefetchskip', None) | |
412 | if prefetchskip: |
|
413 | if prefetchskip: | |
413 | # use introrev so prefetchskip can be accurately tested |
|
414 | # use introrev so prefetchskip can be accurately tested | |
414 | introrev = self.introrev() |
|
415 | introrev = self.introrev() |
@@ -146,7 +146,7 b' def makechangegroup(orig, repo, outgoing' | |||||
146 | try: |
|
146 | try: | |
147 | # if serving, only send files the clients has patterns for |
|
147 | # if serving, only send files the clients has patterns for | |
148 | if source == 'serve': |
|
148 | if source == 'serve': | |
149 | bundlecaps = kwargs.get('bundlecaps') |
|
149 | bundlecaps = kwargs.get(r'bundlecaps') | |
150 | includepattern = None |
|
150 | includepattern = None | |
151 | excludepattern = None |
|
151 | excludepattern = None | |
152 | for cap in (bundlecaps or []): |
|
152 | for cap in (bundlecaps or []): |
@@ -105,7 +105,7 b' def prefixkeys(dict, prefix):' | |||||
105 | def reportpackmetrics(ui, prefix, *stores): |
|
105 | def reportpackmetrics(ui, prefix, *stores): | |
106 | dicts = [s.getmetrics() for s in stores] |
|
106 | dicts = [s.getmetrics() for s in stores] | |
107 | dict = prefixkeys(sumdicts(*dicts), prefix + '_') |
|
107 | dict = prefixkeys(sumdicts(*dicts), prefix + '_') | |
108 | ui.log(prefix + "_packsizes", "", **dict) |
|
108 | ui.log(prefix + "_packsizes", "", **pycompat.strkwargs(dict)) | |
109 |
|
109 | |||
110 | def _parsepackmeta(metabuf): |
|
110 | def _parsepackmeta(metabuf): | |
111 | """parse datapack meta, bytes (<metadata-list>) -> dict |
|
111 | """parse datapack meta, bytes (<metadata-list>) -> dict |
General Comments 0
You need to be logged in to leave comments.
Login now