Show More
@@ -1379,7 +1379,7 b' def perfmanifest(ui, repo, rev, manifest' | |||||
1379 | t = repo.manifestlog._revlog.lookup(rev) |
|
1379 | t = repo.manifestlog._revlog.lookup(rev) | |
1380 | except ValueError: |
|
1380 | except ValueError: | |
1381 | raise error.Abort( |
|
1381 | raise error.Abort( | |
1382 |
b'manifest revision must be integer or full |
|
1382 | b'manifest revision must be integer or full node' | |
1383 | ) |
|
1383 | ) | |
1384 |
|
1384 | |||
1385 | def d(): |
|
1385 | def d(): |
@@ -439,7 +439,7 b' def _txnhook(ui, repo, hooktype, node, s' | |||||
439 | branch = ctx.branch() |
|
439 | branch = ctx.branch() | |
440 | if denybranches and denybranches(branch): |
|
440 | if denybranches and denybranches(branch): | |
441 | raise error.Abort( |
|
441 | raise error.Abort( | |
442 |
_(b'acl: user "%s" denied on branch "%s" |
|
442 | _(b'acl: user "%s" denied on branch "%s" (changeset "%s")') | |
443 | % (user, branch, ctx) |
|
443 | % (user, branch, ctx) | |
444 | ) |
|
444 | ) | |
445 | if allowbranches and not allowbranches(branch): |
|
445 | if allowbranches and not allowbranches(branch): | |
@@ -457,7 +457,7 b' def _txnhook(ui, repo, hooktype, node, s' | |||||
457 | for f in ctx.files(): |
|
457 | for f in ctx.files(): | |
458 | if deny and deny(f): |
|
458 | if deny and deny(f): | |
459 | raise error.Abort( |
|
459 | raise error.Abort( | |
460 |
_(b'acl: user "%s" denied on "%s" |
|
460 | _(b'acl: user "%s" denied on "%s" (changeset "%s")') | |
461 | % (user, f, ctx) |
|
461 | % (user, f, ctx) | |
462 | ) |
|
462 | ) | |
463 | if allow and not allow(f): |
|
463 | if allow and not allow(f): |
@@ -169,7 +169,7 b' class converter_source(object):' | |||||
169 | """ |
|
169 | """ | |
170 | if not re.match(br'[0-9a-fA-F]{40,40}$', revstr): |
|
170 | if not re.match(br'[0-9a-fA-F]{40,40}$', revstr): | |
171 | raise error.Abort( |
|
171 | raise error.Abort( | |
172 |
_(b'%s entry %s is not a valid revision |
|
172 | _(b'%s entry %s is not a valid revision identifier') | |
173 | % (mapname, revstr) |
|
173 | % (mapname, revstr) | |
174 | ) |
|
174 | ) | |
175 |
|
175 |
@@ -124,7 +124,7 b' def createlog(ui, directory=None, root=b' | |||||
124 | re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$') |
|
124 | re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$') | |
125 | re_02 = re.compile(b'cvs (r?log|server): (.+)\n$') |
|
125 | re_02 = re.compile(b'cvs (r?log|server): (.+)\n$') | |
126 | re_03 = re.compile( |
|
126 | re_03 = re.compile( | |
127 |
b"(Cannot access.+CVSROOT)| |
|
127 | b"(Cannot access.+CVSROOT)|(can't create temporary directory.+)$" | |
128 | ) |
|
128 | ) | |
129 | re_10 = re.compile(b'Working file: (.+)$') |
|
129 | re_10 = re.compile(b'Working file: (.+)$') | |
130 | re_20 = re.compile(b'symbolic names:') |
|
130 | re_20 = re.compile(b'symbolic names:') | |
@@ -328,7 +328,7 b' def createlog(ui, directory=None, root=b' | |||||
328 | state = 5 |
|
328 | state = 5 | |
329 | else: |
|
329 | else: | |
330 | assert not re_32.match(line), _( |
|
330 | assert not re_32.match(line), _( | |
331 |
b'must have at least |
|
331 | b'must have at least some revisions' | |
332 | ) |
|
332 | ) | |
333 |
|
333 | |||
334 | elif state == 5: |
|
334 | elif state == 5: | |
@@ -563,7 +563,7 b' def createlog(ui, directory=None, root=b' | |||||
563 | raise error.Abort( |
|
563 | raise error.Abort( | |
564 | inst, |
|
564 | inst, | |
565 | hint=_( |
|
565 | hint=_( | |
566 |
b'check convert.cvsps.logencoding |
|
566 | b'check convert.cvsps.logencoding configuration' | |
567 | ), |
|
567 | ), | |
568 | ) |
|
568 | ) | |
569 | else: |
|
569 | else: | |
@@ -573,9 +573,7 b' def createlog(ui, directory=None, root=b' | |||||
573 | b" CVS log message for %s of %s" |
|
573 | b" CVS log message for %s of %s" | |
574 | ) |
|
574 | ) | |
575 | % (revstr(entry.revision), entry.file), |
|
575 | % (revstr(entry.revision), entry.file), | |
576 | hint=_( |
|
576 | hint=_(b'check convert.cvsps.logencoding configuration'), | |
577 | b'check convert.cvsps.logencoding' b' configuration' |
|
|||
578 | ), |
|
|||
579 | ) |
|
577 | ) | |
580 |
|
578 | |||
581 | hook.hook(ui, None, b"cvslog", True, log=log) |
|
579 | hook.hook(ui, None, b"cvslog", True, log=log) |
@@ -259,7 +259,7 b' class convert_git(common.converter_sourc' | |||||
259 | # This can happen if a file is in the repo that has permissions |
|
259 | # This can happen if a file is in the repo that has permissions | |
260 | # 160000, but there is no .gitmodules file. |
|
260 | # 160000, but there is no .gitmodules file. | |
261 | self.ui.warn( |
|
261 | self.ui.warn( | |
262 |
_(b"warning: cannot read submodules config file in |
|
262 | _(b"warning: cannot read submodules config file in %s\n") | |
263 | % version |
|
263 | % version | |
264 | ) |
|
264 | ) | |
265 | return |
|
265 | return |
@@ -263,7 +263,7 b' class monotone_source(common.converter_s' | |||||
263 | def getchanges(self, rev, full): |
|
263 | def getchanges(self, rev, full): | |
264 | if full: |
|
264 | if full: | |
265 | raise error.Abort( |
|
265 | raise error.Abort( | |
266 |
_(b"convert from monotone does not support |
|
266 | _(b"convert from monotone does not support --full") | |
267 | ) |
|
267 | ) | |
268 | revision = self.mtnrun(b"get_revision", rev).split(b"\n\n") |
|
268 | revision = self.mtnrun(b"get_revision", rev).split(b"\n\n") | |
269 | files = {} |
|
269 | files = {} | |
@@ -369,7 +369,7 b' class monotone_source(common.converter_s' | |||||
369 | version = float(versionstr) |
|
369 | version = float(versionstr) | |
370 | except Exception: |
|
370 | except Exception: | |
371 | raise error.Abort( |
|
371 | raise error.Abort( | |
372 |
_(b"unable to determine mtn automate interface |
|
372 | _(b"unable to determine mtn automate interface version") | |
373 | ) |
|
373 | ) | |
374 |
|
374 | |||
375 | if version >= 12.0: |
|
375 | if version >= 12.0: |
@@ -189,7 +189,7 b' def debugsvnlog(ui, **opts):' | |||||
189 | """ |
|
189 | """ | |
190 | if svn is None: |
|
190 | if svn is None: | |
191 | raise error.Abort( |
|
191 | raise error.Abort( | |
192 |
_(b'debugsvnlog could not load Subversion python |
|
192 | _(b'debugsvnlog could not load Subversion python bindings') | |
193 | ) |
|
193 | ) | |
194 |
|
194 | |||
195 | args = decodeargs(ui.fin.read()) |
|
195 | args = decodeargs(ui.fin.read()) | |
@@ -647,7 +647,7 b' class svn_source(converter_source):' | |||||
647 | revstr, |
|
647 | revstr, | |
648 | ): |
|
648 | ): | |
649 | raise error.Abort( |
|
649 | raise error.Abort( | |
650 |
_(b'%s entry %s is not a valid revision |
|
650 | _(b'%s entry %s is not a valid revision identifier') | |
651 | % (mapname, revstr) |
|
651 | % (mapname, revstr) | |
652 | ) |
|
652 | ) | |
653 |
|
653 |
@@ -460,7 +460,7 b' def reposetup(ui, repo):' | |||||
460 | continue |
|
460 | continue | |
461 | if inconsistenteol(data): |
|
461 | if inconsistenteol(data): | |
462 | raise errormod.Abort( |
|
462 | raise errormod.Abort( | |
463 |
_(b"inconsistent newline style |
|
463 | _(b"inconsistent newline style in %s\n") % f | |
464 | ) |
|
464 | ) | |
465 | return super(eolrepo, self).commitctx(ctx, error, origctx) |
|
465 | return super(eolrepo, self).commitctx(ctx, error, origctx) | |
466 |
|
466 |
@@ -93,7 +93,7 b' fastannotatecommandargs = {' | |||||
93 | b'l', |
|
93 | b'l', | |
94 | b'line-number', |
|
94 | b'line-number', | |
95 | None, |
|
95 | None, | |
96 |
_(b'show line number at the first |
|
96 | _(b'show line number at the first appearance'), | |
97 | ), |
|
97 | ), | |
98 | ( |
|
98 | ( | |
99 | b'e', |
|
99 | b'e', | |
@@ -127,7 +127,7 b' fastannotatecommandargs = {' | |||||
127 | b'', |
|
127 | b'', | |
128 | b'rebuild', |
|
128 | b'rebuild', | |
129 | None, |
|
129 | None, | |
130 |
_(b'rebuild cache even if it exists |
|
130 | _(b'rebuild cache even if it exists (EXPERIMENTAL)'), | |
131 | ), |
|
131 | ), | |
132 | ] |
|
132 | ] | |
133 | + commands.diffwsopts |
|
133 | + commands.diffwsopts |
@@ -116,7 +116,7 b' def _fctxannotate(' | |||||
116 | return _doannotate(self, follow, diffopts) |
|
116 | return _doannotate(self, follow, diffopts) | |
117 | except Exception as ex: |
|
117 | except Exception as ex: | |
118 | self._repo.ui.debug( |
|
118 | self._repo.ui.debug( | |
119 |
b'fastannotate: falling back to the vanilla |
|
119 | b'fastannotate: falling back to the vanilla annotate: %r\n' % ex | |
120 | ) |
|
120 | ) | |
121 | return orig(self, follow=follow, skiprevs=skiprevs, diffopts=diffopts) |
|
121 | return orig(self, follow=follow, skiprevs=skiprevs, diffopts=diffopts) | |
122 |
|
122 |
@@ -188,7 +188,7 b" def fetch(ui, repo, source=b'default', *" | |||||
188 | message, opts[b'user'], opts[b'date'], editor=editor |
|
188 | message, opts[b'user'], opts[b'date'], editor=editor | |
189 | ) |
|
189 | ) | |
190 | ui.status( |
|
190 | ui.status( | |
191 |
_(b'new changeset %d:%s merges remote changes |
|
191 | _(b'new changeset %d:%s merges remote changes with local\n') | |
192 | % (repo.changelog.rev(n), short(n)) |
|
192 | % (repo.changelog.rev(n), short(n)) | |
193 | ) |
|
193 | ) | |
194 |
|
194 |
@@ -423,7 +423,7 b' def checknodescendants(repo, revs):' | |||||
423 | b'(%ld::) - (%ld)', revs, revs |
|
423 | b'(%ld::) - (%ld)', revs, revs | |
424 | ): |
|
424 | ): | |
425 | raise error.Abort( |
|
425 | raise error.Abort( | |
426 |
_(b'can only fix a changeset together |
|
426 | _(b'can only fix a changeset together with all its descendants') | |
427 | ) |
|
427 | ) | |
428 |
|
428 | |||
429 |
|
429 |
@@ -66,7 +66,7 b' def githelp(ui, repo, *args, **kwargs):' | |||||
66 |
|
66 | |||
67 | if len(args) == 0 or (len(args) == 1 and args[0] == b'git'): |
|
67 | if len(args) == 0 or (len(args) == 1 and args[0] == b'git'): | |
68 | raise error.Abort( |
|
68 | raise error.Abort( | |
69 |
_(b'missing git command - |
|
69 | _(b'missing git command - usage: hg githelp -- <git command>') | |
70 | ) |
|
70 | ) | |
71 |
|
71 | |||
72 | if args[0] == b'git': |
|
72 | if args[0] == b'git': |
@@ -76,7 +76,7 b' class gpg(object):' | |||||
76 | fp = os.fdopen(fd, r'wb') |
|
76 | fp = os.fdopen(fd, r'wb') | |
77 | fp.write(data) |
|
77 | fp.write(data) | |
78 | fp.close() |
|
78 | fp.close() | |
79 |
gpgcmd = b"%s --logger-fd 1 --status-fd 1 --verify |
|
79 | gpgcmd = b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\"" % ( | |
80 | self.path, |
|
80 | self.path, | |
81 | sigfile, |
|
81 | sigfile, | |
82 | datafile, |
|
82 | datafile, | |
@@ -179,12 +179,12 b' def getkeys(ui, repo, mygpg, sigdata, co' | |||||
179 | continue |
|
179 | continue | |
180 | if key[0] == b"EXPSIG": |
|
180 | if key[0] == b"EXPSIG": | |
181 | ui.write( |
|
181 | ui.write( | |
182 |
_(b"%s Note: Signature has expired |
|
182 | _(b"%s Note: Signature has expired (signed by: \"%s\")\n") | |
183 | % (prefix, key[2]) |
|
183 | % (prefix, key[2]) | |
184 | ) |
|
184 | ) | |
185 | elif key[0] == b"EXPKEYSIG": |
|
185 | elif key[0] == b"EXPKEYSIG": | |
186 | ui.write( |
|
186 | ui.write( | |
187 |
_(b"%s Note: This key has expired |
|
187 | _(b"%s Note: This key has expired (signed by: \"%s\")\n") | |
188 | % (prefix, key[2]) |
|
188 | % (prefix, key[2]) | |
189 | ) |
|
189 | ) | |
190 | validkeys.append((key[1], key[2], key[3])) |
|
190 | validkeys.append((key[1], key[2], key[3])) | |
@@ -304,7 +304,7 b' def _dosign(ui, repo, *revs, **opts):' | |||||
304 | ] |
|
304 | ] | |
305 | if len(nodes) > 1: |
|
305 | if len(nodes) > 1: | |
306 | raise error.Abort( |
|
306 | raise error.Abort( | |
307 |
_(b'uncommitted merge - please provide a |
|
307 | _(b'uncommitted merge - please provide a specific revision') | |
308 | ) |
|
308 | ) | |
309 | if not nodes: |
|
309 | if not nodes: | |
310 | nodes = [repo.changelog.tip()] |
|
310 | nodes = [repo.changelog.tip()] |
@@ -39,7 +39,7 b' TextLexer = pygments.lexers.TextLexer' | |||||
39 | HtmlFormatter = pygments.formatters.HtmlFormatter |
|
39 | HtmlFormatter = pygments.formatters.HtmlFormatter | |
40 |
|
40 | |||
41 | SYNTAX_CSS = ( |
|
41 | SYNTAX_CSS = ( | |
42 |
b'\n<link rel="stylesheet" href="{url}highlightcss" |
|
42 | b'\n<link rel="stylesheet" href="{url}highlightcss" type="text/css" />' | |
43 | ) |
|
43 | ) | |
44 |
|
44 | |||
45 |
|
45 |
@@ -1884,7 +1884,7 b' def _validateargs(ui, repo, state, freea' | |||||
1884 | elif goal == b'edit-plan': |
|
1884 | elif goal == b'edit-plan': | |
1885 | if any((outg, revs, freeargs)): |
|
1885 | if any((outg, revs, freeargs)): | |
1886 | raise error.Abort( |
|
1886 | raise error.Abort( | |
1887 |
_(b'only --commands argument allowed with |
|
1887 | _(b'only --commands argument allowed with --edit-plan') | |
1888 | ) |
|
1888 | ) | |
1889 | else: |
|
1889 | else: | |
1890 | if state.inprogress(): |
|
1890 | if state.inprogress(): |
@@ -70,7 +70,7 b' class sqlindexapi(indexapi.indexapi):' | |||||
70 | raise indexapi.indexexception(b"SQL connection already open") |
|
70 | raise indexapi.indexexception(b"SQL connection already open") | |
71 | if self.sqlcursor: |
|
71 | if self.sqlcursor: | |
72 | raise indexapi.indexexception( |
|
72 | raise indexapi.indexexception( | |
73 |
b"SQL cursor already open without |
|
73 | b"SQL cursor already open without connection" | |
74 | ) |
|
74 | ) | |
75 | retry = 3 |
|
75 | retry = 3 | |
76 | while True: |
|
76 | while True: | |
@@ -126,7 +126,7 b' class sqlindexapi(indexapi.indexapi):' | |||||
126 | self.sqlconnect() |
|
126 | self.sqlconnect() | |
127 | self.log.info(b"ADD BUNDLE %r %r" % (self.reponame, bundleid)) |
|
127 | self.log.info(b"ADD BUNDLE %r %r" % (self.reponame, bundleid)) | |
128 | self.sqlcursor.execute( |
|
128 | self.sqlcursor.execute( | |
129 |
b"INSERT INTO bundles(bundle, reponame) VALUES |
|
129 | b"INSERT INTO bundles(bundle, reponame) VALUES (%s, %s)", | |
130 | params=(bundleid, self.reponame), |
|
130 | params=(bundleid, self.reponame), | |
131 | ) |
|
131 | ) | |
132 | for ctx in nodesctx: |
|
132 | for ctx in nodesctx: |
@@ -593,7 +593,7 b' class queue(object):' | |||||
593 | diffopts.git = self.gitmode == b'yes' |
|
593 | diffopts.git = self.gitmode == b'yes' | |
594 | else: |
|
594 | else: | |
595 | raise error.Abort( |
|
595 | raise error.Abort( | |
596 |
_(b'mq.git option can be auto/keep/yes/no |
|
596 | _(b'mq.git option can be auto/keep/yes/no got %s') | |
597 | % self.gitmode |
|
597 | % self.gitmode | |
598 | ) |
|
598 | ) | |
599 | if patchfn: |
|
599 | if patchfn: | |
@@ -1113,7 +1113,7 b' class queue(object):' | |||||
1113 |
|
1113 | |||
1114 | if patcherr: |
|
1114 | if patcherr: | |
1115 | self.ui.warn( |
|
1115 | self.ui.warn( | |
1116 |
_(b"patch failed, rejects left in working |
|
1116 | _(b"patch failed, rejects left in working directory\n") | |
1117 | ) |
|
1117 | ) | |
1118 | err = 2 |
|
1118 | err = 2 | |
1119 | break |
|
1119 | break | |
@@ -1214,7 +1214,7 b' class queue(object):' | |||||
1214 | def delete(self, repo, patches, opts): |
|
1214 | def delete(self, repo, patches, opts): | |
1215 | if not patches and not opts.get(b'rev'): |
|
1215 | if not patches and not opts.get(b'rev'): | |
1216 | raise error.Abort( |
|
1216 | raise error.Abort( | |
1217 |
_(b'qdelete requires at least one revision or |
|
1217 | _(b'qdelete requires at least one revision or patch name') | |
1218 | ) |
|
1218 | ) | |
1219 |
|
1219 | |||
1220 | realpatches = [] |
|
1220 | realpatches = [] | |
@@ -1295,7 +1295,7 b' class queue(object):' | |||||
1295 | if name != name.strip(): |
|
1295 | if name != name.strip(): | |
1296 | # whitespace is stripped by parseseries() |
|
1296 | # whitespace is stripped by parseseries() | |
1297 | raise error.Abort( |
|
1297 | raise error.Abort( | |
1298 |
_(b'patch name cannot begin or end with |
|
1298 | _(b'patch name cannot begin or end with whitespace') | |
1299 | ) |
|
1299 | ) | |
1300 | for prefix in (b'.hg', b'.mq'): |
|
1300 | for prefix in (b'.hg', b'.mq'): | |
1301 | if name.startswith(prefix): |
|
1301 | if name.startswith(prefix): | |
@@ -1629,11 +1629,11 b' class queue(object):' | |||||
1629 | ) |
|
1629 | ) | |
1630 | if move: |
|
1630 | if move: | |
1631 | raise error.Abort( |
|
1631 | raise error.Abort( | |
1632 |
_(b'cannot use --exact and --move |
|
1632 | _(b'cannot use --exact and --move together') | |
1633 | ) |
|
1633 | ) | |
1634 | if self.applied: |
|
1634 | if self.applied: | |
1635 | raise error.Abort( |
|
1635 | raise error.Abort( | |
1636 |
_(b'cannot push --exact with applied |
|
1636 | _(b'cannot push --exact with applied patches') | |
1637 | ) |
|
1637 | ) | |
1638 | root = self.series[start] |
|
1638 | root = self.series[start] | |
1639 | target = patchheader(self.join(root), self.plainmode).parent |
|
1639 | target = patchheader(self.join(root), self.plainmode).parent | |
@@ -2390,7 +2390,7 b' class queue(object):' | |||||
2390 | if rev: |
|
2390 | if rev: | |
2391 | if files: |
|
2391 | if files: | |
2392 | raise error.Abort( |
|
2392 | raise error.Abort( | |
2393 |
_(b'option "-r" not valid when importing |
|
2393 | _(b'option "-r" not valid when importing files') | |
2394 | ) |
|
2394 | ) | |
2395 | rev = scmutil.revrange(repo, rev) |
|
2395 | rev = scmutil.revrange(repo, rev) | |
2396 | rev.sort(reverse=True) |
|
2396 | rev.sort(reverse=True) | |
@@ -2398,7 +2398,7 b' class queue(object):' | |||||
2398 | raise error.Abort(_(b'no files or revisions specified')) |
|
2398 | raise error.Abort(_(b'no files or revisions specified')) | |
2399 | if (len(files) > 1 or len(rev) > 1) and patchname: |
|
2399 | if (len(files) > 1 or len(rev) > 1) and patchname: | |
2400 | raise error.Abort( |
|
2400 | raise error.Abort( | |
2401 |
_(b'option "-n" not valid when importing multiple |
|
2401 | _(b'option "-n" not valid when importing multiple patches') | |
2402 | ) |
|
2402 | ) | |
2403 | imported = [] |
|
2403 | imported = [] | |
2404 | if rev: |
|
2404 | if rev: | |
@@ -2408,7 +2408,7 b' class queue(object):' | |||||
2408 | heads = repo.changelog.heads(repo.changelog.node(rev.first())) |
|
2408 | heads = repo.changelog.heads(repo.changelog.node(rev.first())) | |
2409 | if len(heads) > 1: |
|
2409 | if len(heads) > 1: | |
2410 | raise error.Abort( |
|
2410 | raise error.Abort( | |
2411 |
_(b'revision %d is the root of more than one |
|
2411 | _(b'revision %d is the root of more than one branch') | |
2412 | % rev.last() |
|
2412 | % rev.last() | |
2413 | ) |
|
2413 | ) | |
2414 | if self.applied: |
|
2414 | if self.applied: | |
@@ -2419,7 +2419,7 b' class queue(object):' | |||||
2419 | ) |
|
2419 | ) | |
2420 | if heads != [self.applied[-1].node]: |
|
2420 | if heads != [self.applied[-1].node]: | |
2421 | raise error.Abort( |
|
2421 | raise error.Abort( | |
2422 |
_(b'revision %d is not the parent of |
|
2422 | _(b'revision %d is not the parent of the queue') | |
2423 | % rev.first() |
|
2423 | % rev.first() | |
2424 | ) |
|
2424 | ) | |
2425 | base = repo.changelog.rev(self.applied[0].node) |
|
2425 | base = repo.changelog.rev(self.applied[0].node) | |
@@ -2447,7 +2447,7 b' class queue(object):' | |||||
2447 | ) |
|
2447 | ) | |
2448 | if lastparent and lastparent != r: |
|
2448 | if lastparent and lastparent != r: | |
2449 | raise error.Abort( |
|
2449 | raise error.Abort( | |
2450 |
_(b'revision %d is not the parent of |
|
2450 | _(b'revision %d is not the parent of %d') | |
2451 | % (r, lastparent) |
|
2451 | % (r, lastparent) | |
2452 | ) |
|
2452 | ) | |
2453 | lastparent = p1 |
|
2453 | lastparent = p1 | |
@@ -2849,7 +2849,7 b' def clone(ui, source, dest=None, **opts)' | |||||
2849 | hg.peer(ui, opts, patchespath) |
|
2849 | hg.peer(ui, opts, patchespath) | |
2850 | except error.RepoError: |
|
2850 | except error.RepoError: | |
2851 | raise error.Abort( |
|
2851 | raise error.Abort( | |
2852 |
_(b'versioned patch repository not found |
|
2852 | _(b'versioned patch repository not found (see init --mq)') | |
2853 | ) |
|
2853 | ) | |
2854 | qbase, destrev = None, None |
|
2854 | qbase, destrev = None, None | |
2855 | if sr.local(): |
|
2855 | if sr.local(): | |
@@ -3359,7 +3359,7 b' def guard(ui, repo, *args, **opts):' | |||||
3359 | if opts.get(r'list'): |
|
3359 | if opts.get(r'list'): | |
3360 | if args or opts.get(r'none'): |
|
3360 | if args or opts.get(r'none'): | |
3361 | raise error.Abort( |
|
3361 | raise error.Abort( | |
3362 |
_(b'cannot mix -l/--list with options or |
|
3362 | _(b'cannot mix -l/--list with options or arguments') | |
3363 | ) |
|
3363 | ) | |
3364 | for i in pycompat.xrange(len(q.series)): |
|
3364 | for i in pycompat.xrange(len(q.series)): | |
3365 | status(i) |
|
3365 | status(i) | |
@@ -3671,13 +3671,12 b' def save(ui, repo, **opts):' | |||||
3671 | if os.path.exists(newpath): |
|
3671 | if os.path.exists(newpath): | |
3672 | if not os.path.isdir(newpath): |
|
3672 | if not os.path.isdir(newpath): | |
3673 | raise error.Abort( |
|
3673 | raise error.Abort( | |
3674 |
_(b'destination %s exists and is not |
|
3674 | _(b'destination %s exists and is not a directory') | |
3675 | % newpath |
|
3675 | % newpath | |
3676 | ) |
|
3676 | ) | |
3677 | if not opts.get(b'force'): |
|
3677 | if not opts.get(b'force'): | |
3678 | raise error.Abort( |
|
3678 | raise error.Abort( | |
3679 |
_(b'destination %s exists, |
|
3679 | _(b'destination %s exists, use -f to force') % newpath | |
3680 | % newpath |
|
|||
3681 | ) |
|
3680 | ) | |
3682 | else: |
|
3681 | else: | |
3683 | newpath = savename(path) |
|
3682 | newpath = savename(path) | |
@@ -4157,7 +4156,7 b' def reposetup(ui, repo):' | |||||
4157 | for patch in mqtags: |
|
4156 | for patch in mqtags: | |
4158 | if patch[1] in tags: |
|
4157 | if patch[1] in tags: | |
4159 | self.ui.warn( |
|
4158 | self.ui.warn( | |
4160 |
_(b'tag %s overrides mq patch of the same |
|
4159 | _(b'tag %s overrides mq patch of the same name\n') | |
4161 | % patch[1] |
|
4160 | % patch[1] | |
4162 | ) |
|
4161 | ) | |
4163 | else: |
|
4162 | else: | |
@@ -4191,13 +4190,13 b' def mqinit(orig, ui, *args, **kwargs):' | |||||
4191 | repopath = args[0] |
|
4190 | repopath = args[0] | |
4192 | if not hg.islocal(repopath): |
|
4191 | if not hg.islocal(repopath): | |
4193 | raise error.Abort( |
|
4192 | raise error.Abort( | |
4194 |
_(b'only a local queue repository |
|
4193 | _(b'only a local queue repository may be initialized') | |
4195 | ) |
|
4194 | ) | |
4196 | else: |
|
4195 | else: | |
4197 | repopath = cmdutil.findrepo(encoding.getcwd()) |
|
4196 | repopath = cmdutil.findrepo(encoding.getcwd()) | |
4198 | if not repopath: |
|
4197 | if not repopath: | |
4199 | raise error.Abort( |
|
4198 | raise error.Abort( | |
4200 |
_(b'there is no Mercurial repository here |
|
4199 | _(b'there is no Mercurial repository here (.hg not found)') | |
4201 | ) |
|
4200 | ) | |
4202 | repo = hg.repository(ui, repopath) |
|
4201 | repo = hg.repository(ui, repopath) | |
4203 | return qinit(ui, repo, True) |
|
4202 | return qinit(ui, repo, True) |
@@ -249,7 +249,7 b' def _narrow(' | |||||
249 | if not force: |
|
249 | if not force: | |
250 | raise error.Abort( |
|
250 | raise error.Abort( | |
251 | _(b'local changes found'), |
|
251 | _(b'local changes found'), | |
252 |
hint=_(b'use --force-delete-local-changes to |
|
252 | hint=_(b'use --force-delete-local-changes to ignore'), | |
253 | ) |
|
253 | ) | |
254 |
|
254 | |||
255 | with ui.uninterruptible(): |
|
255 | with ui.uninterruptible(): |
@@ -377,7 +377,7 b' class notifier(object):' | |||||
377 | continue |
|
377 | continue | |
378 | if len(subs) == 0: |
|
378 | if len(subs) == 0: | |
379 | self.ui.debug( |
|
379 | self.ui.debug( | |
380 |
b'notify: no subscribers to selected repo |
|
380 | b'notify: no subscribers to selected repo and revset\n' | |
381 | ) |
|
381 | ) | |
382 | return |
|
382 | return | |
383 |
|
383 |
@@ -162,7 +162,7 b' def _addpullheader(seq, ctx):' | |||||
162 | # destination before patchbombing anything. |
|
162 | # destination before patchbombing anything. | |
163 | publicurl = repo.ui.config(b'patchbomb', b'publicurl') |
|
163 | publicurl = repo.ui.config(b'patchbomb', b'publicurl') | |
164 | if publicurl: |
|
164 | if publicurl: | |
165 |
return b'Available At %s\n |
|
165 | return b'Available At %s\n# hg pull %s -r %s' % ( | |
166 | publicurl, |
|
166 | publicurl, | |
167 | publicurl, |
|
167 | publicurl, | |
168 | ctx, |
|
168 | ctx, | |
@@ -343,9 +343,7 b' def _getpatches(repo, revs, **opts):' | |||||
343 | prev = repo[b'.'].rev() |
|
343 | prev = repo[b'.'].rev() | |
344 | for r in revs: |
|
344 | for r in revs: | |
345 | if r == prev and (repo[None].files() or repo[None].deleted()): |
|
345 | if r == prev and (repo[None].files() or repo[None].deleted()): | |
346 | ui.warn( |
|
346 | ui.warn(_(b'warning: working directory has uncommitted changes\n')) | |
347 | _(b'warning: working directory has ' b'uncommitted changes\n') |
|
|||
348 | ) |
|
|||
349 | output = stringio() |
|
347 | output = stringio() | |
350 | cmdutil.exportfile( |
|
348 | cmdutil.exportfile( | |
351 | repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True) |
|
349 | repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True) | |
@@ -391,7 +389,7 b' def _getdescription(repo, defaultbody, s' | |||||
391 | body = open(opts.get(r'desc')).read() |
|
389 | body = open(opts.get(r'desc')).read() | |
392 | else: |
|
390 | else: | |
393 | ui.write( |
|
391 | ui.write( | |
394 |
_(b'\nWrite the introductory message for the |
|
392 | _(b'\nWrite the introductory message for the patch series.\n\n') | |
395 | ) |
|
393 | ) | |
396 | body = ui.edit( |
|
394 | body = ui.edit( | |
397 | defaultbody, sender, repopath=repo.path, action=b'patchbombbody' |
|
395 | defaultbody, sender, repopath=repo.path, action=b'patchbombbody' | |
@@ -911,7 +909,7 b' def email(ui, repo, *revs, **opts):' | |||||
911 | ui.write(ds, label=b'patchbomb.diffstats') |
|
909 | ui.write(ds, label=b'patchbomb.diffstats') | |
912 | ui.write(b'\n') |
|
910 | ui.write(b'\n') | |
913 | if ui.promptchoice( |
|
911 | if ui.promptchoice( | |
914 |
_(b'are you sure you want to send (yn)? |
|
912 | _(b'are you sure you want to send (yn)?$$ &Yes $$ &No') | |
915 | ): |
|
913 | ): | |
916 | raise error.Abort(_(b'patchbomb canceled')) |
|
914 | raise error.Abort(_(b'patchbomb canceled')) | |
917 |
|
915 |
@@ -830,7 +830,7 b' def _confirmbeforesend(repo, revs, oldma' | |||||
830 | ) |
|
830 | ) | |
831 |
|
831 | |||
832 | if ui.promptchoice( |
|
832 | if ui.promptchoice( | |
833 |
_(b'Send the above changes to %s (yn)? |
|
833 | _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url | |
834 | ): |
|
834 | ): | |
835 | return False |
|
835 | return False | |
836 |
|
836 |
@@ -469,7 +469,7 b' class rebaseruntime(object):' | |||||
469 | branches.add(repo[rev].branch()) |
|
469 | branches.add(repo[rev].branch()) | |
470 | if len(branches) > 1: |
|
470 | if len(branches) > 1: | |
471 | raise error.Abort( |
|
471 | raise error.Abort( | |
472 |
_(b'cannot collapse multiple named |
|
472 | _(b'cannot collapse multiple named branches') | |
473 | ) |
|
473 | ) | |
474 |
|
474 | |||
475 | # Calculate self.obsoletenotrebased |
|
475 | # Calculate self.obsoletenotrebased | |
@@ -589,14 +589,11 b' class rebaseruntime(object):' | |||||
589 | elif rev in self.obsoletenotrebased: |
|
589 | elif rev in self.obsoletenotrebased: | |
590 | succ = self.obsoletenotrebased[rev] |
|
590 | succ = self.obsoletenotrebased[rev] | |
591 | if succ is None: |
|
591 | if succ is None: | |
592 | msg = ( |
|
592 | msg = _(b'note: not rebasing %s, it has no successor\n') % desc | |
593 | _(b'note: not rebasing %s, it has no ' b'successor\n') |
|
|||
594 | % desc |
|
|||
595 | ) |
|
|||
596 | else: |
|
593 | else: | |
597 | succdesc = _ctxdesc(repo[succ]) |
|
594 | succdesc = _ctxdesc(repo[succ]) | |
598 | msg = _( |
|
595 | msg = _( | |
599 |
b'note: not rebasing %s, already in |
|
596 | b'note: not rebasing %s, already in destination as %s\n' | |
600 | ) % (desc, succdesc) |
|
597 | ) % (desc, succdesc) | |
601 | repo.ui.status(msg) |
|
598 | repo.ui.status(msg) | |
602 | # Make clearrebased aware state[rev] is not a true successor |
|
599 | # Make clearrebased aware state[rev] is not a true successor | |
@@ -1111,7 +1108,7 b' def _dryrunrebase(ui, repo, action, opts' | |||||
1111 | ui.status(_(b'starting in-memory rebase\n')) |
|
1108 | ui.status(_(b'starting in-memory rebase\n')) | |
1112 | else: |
|
1109 | else: | |
1113 | ui.status( |
|
1110 | ui.status( | |
1114 |
_(b'starting dry-run rebase; repository will not be |
|
1111 | _(b'starting dry-run rebase; repository will not be changed\n') | |
1115 | ) |
|
1112 | ) | |
1116 | with repo.wlock(), repo.lock(): |
|
1113 | with repo.wlock(), repo.lock(): | |
1117 | needsabort = True |
|
1114 | needsabort = True | |
@@ -1136,9 +1133,7 b' def _dryrunrebase(ui, repo, action, opts' | |||||
1136 | else: |
|
1133 | else: | |
1137 | if confirm: |
|
1134 | if confirm: | |
1138 | ui.status(_(b'rebase completed successfully\n')) |
|
1135 | ui.status(_(b'rebase completed successfully\n')) | |
1139 | if not ui.promptchoice( |
|
1136 | if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')): | |
1140 | _(b'apply changes (yn)?' b'$$ &Yes $$ &No') |
|
|||
1141 | ): |
|
|||
1142 | # finish unfinished rebase |
|
1137 | # finish unfinished rebase | |
1143 | rbsrt._finishrebase() |
|
1138 | rbsrt._finishrebase() | |
1144 | else: |
|
1139 | else: | |
@@ -1415,8 +1410,7 b' def _definedestmap(' | |||||
1415 | ui.note(_(b'skipping %s - empty destination\n') % repo[r]) |
|
1410 | ui.note(_(b'skipping %s - empty destination\n') % repo[r]) | |
1416 | else: |
|
1411 | else: | |
1417 | raise error.Abort( |
|
1412 | raise error.Abort( | |
1418 |
_(b'rebase destination for %s is not |
|
1413 | _(b'rebase destination for %s is not unique') % repo[r] | |
1419 | % repo[r] |
|
|||
1420 | ) |
|
1414 | ) | |
1421 |
|
1415 | |||
1422 | if dest is not None: |
|
1416 | if dest is not None: | |
@@ -1638,7 +1632,7 b' def _checkobsrebase(repo, ui, rebaseobsr' | |||||
1638 |
|
1632 | |||
1639 | if divergencebasecandidates and not divergenceok: |
|
1633 | if divergencebasecandidates and not divergenceok: | |
1640 | divhashes = (bytes(repo[r]) for r in divergencebasecandidates) |
|
1634 | divhashes = (bytes(repo[r]) for r in divergencebasecandidates) | |
1641 |
msg = _(b"this rebase will cause |
|
1635 | msg = _(b"this rebase will cause divergences from: %s") | |
1642 | h = _( |
|
1636 | h = _( | |
1643 | b"to force the rebase please set " |
|
1637 | b"to force the rebase please set " | |
1644 | b"experimental.evolution.allowdivergence=True" |
|
1638 | b"experimental.evolution.allowdivergence=True" |
@@ -300,7 +300,7 b' def checkadmonitions(ui, repo, directive' | |||||
300 | continue |
|
300 | continue | |
301 | else: |
|
301 | else: | |
302 | ui.write( |
|
302 | ui.write( | |
303 |
_(b"Invalid admonition '%s' present in changeset %s |
|
303 | _(b"Invalid admonition '%s' present in changeset %s\n") | |
304 | % (admonition.group(1), ctx.hex()[:12]) |
|
304 | % (admonition.group(1), ctx.hex()[:12]) | |
305 | ) |
|
305 | ) | |
306 | sim = lambda x: difflib.SequenceMatcher( |
|
306 | sim = lambda x: difflib.SequenceMatcher( | |
@@ -376,7 +376,7 b' def parsenotesfromrevisions(repo, direct' | |||||
376 | # TODO consider using title as paragraph for more concise notes. |
|
376 | # TODO consider using title as paragraph for more concise notes. | |
377 | if not paragraphs: |
|
377 | if not paragraphs: | |
378 | repo.ui.warn( |
|
378 | repo.ui.warn( | |
379 |
_(b"error parsing releasenotes for revision: |
|
379 | _(b"error parsing releasenotes for revision: '%s'\n") | |
380 | % node.hex(ctx.node()) |
|
380 | % node.hex(ctx.node()) | |
381 | ) |
|
381 | ) | |
382 | if title: |
|
382 | if title: | |
@@ -422,7 +422,7 b' def parsereleasenotesfile(sections, text' | |||||
422 | continue |
|
422 | continue | |
423 | elif block[b'type'] != b'paragraph': |
|
423 | elif block[b'type'] != b'paragraph': | |
424 | raise error.Abort( |
|
424 | raise error.Abort( | |
425 |
_(b'unexpected block type in release notes: |
|
425 | _(b'unexpected block type in release notes: %s') | |
426 | % block[b'type'] |
|
426 | % block[b'type'] | |
427 | ) |
|
427 | ) | |
428 | if title: |
|
428 | if title: |
@@ -273,7 +273,7 b' def uisetup(ui):' | |||||
273 | b'', |
|
273 | b'', | |
274 | b'shallow', |
|
274 | b'shallow', | |
275 | None, |
|
275 | None, | |
276 |
_(b"create a shallow clone which uses remote file |
|
276 | _(b"create a shallow clone which uses remote file history"), | |
277 | ) |
|
277 | ) | |
278 | ) |
|
278 | ) | |
279 |
|
279 | |||
@@ -796,8 +796,7 b' def walkfilerevs(orig, repo, match, foll' | |||||
796 | for filename in match.files(): |
|
796 | for filename in match.files(): | |
797 | if filename not in pctx: |
|
797 | if filename not in pctx: | |
798 | raise error.Abort( |
|
798 | raise error.Abort( | |
799 |
_(b'cannot follow file not in parent |
|
799 | _(b'cannot follow file not in parent revision: "%s"') % filename | |
800 | % filename |
|
|||
801 | ) |
|
800 | ) | |
802 | fctx = pctx[filename] |
|
801 | fctx = pctx[filename] | |
803 |
|
802 |
@@ -137,7 +137,7 b' class unioncontentstore(basestore.baseun' | |||||
137 |
|
137 | |||
138 | def add(self, name, node, data): |
|
138 | def add(self, name, node, data): | |
139 | raise RuntimeError( |
|
139 | raise RuntimeError( | |
140 |
b"cannot add content only to remotefilelog |
|
140 | b"cannot add content only to remotefilelog contentstore" | |
141 | ) |
|
141 | ) | |
142 |
|
142 | |||
143 | def getmissing(self, keys): |
|
143 | def getmissing(self, keys): | |
@@ -209,7 +209,7 b' class remotefilelogcontentstore(basestor' | |||||
209 |
|
209 | |||
210 | def add(self, name, node, data): |
|
210 | def add(self, name, node, data): | |
211 | raise RuntimeError( |
|
211 | raise RuntimeError( | |
212 |
b"cannot add content only to remotefilelog |
|
212 | b"cannot add content only to remotefilelog contentstore" | |
213 | ) |
|
213 | ) | |
214 |
|
214 | |||
215 | def _sanitizemetacache(self): |
|
215 | def _sanitizemetacache(self): |
@@ -414,7 +414,7 b' class fileserverclient(object):' | |||||
414 | ): |
|
414 | ): | |
415 | if not isinstance(remote, _sshv1peer): |
|
415 | if not isinstance(remote, _sshv1peer): | |
416 | raise error.Abort( |
|
416 | raise error.Abort( | |
417 |
b'remotefilelog requires ssh |
|
417 | b'remotefilelog requires ssh servers' | |
418 | ) |
|
418 | ) | |
419 | step = self.ui.configint( |
|
419 | step = self.ui.configint( | |
420 | b'remotefilelog', b'getfilesstep' |
|
420 | b'remotefilelog', b'getfilesstep' |
@@ -99,7 +99,7 b' class unionmetadatastore(basestore.baseu' | |||||
99 |
|
99 | |||
100 | def add(self, name, node, data): |
|
100 | def add(self, name, node, data): | |
101 | raise RuntimeError( |
|
101 | raise RuntimeError( | |
102 |
b"cannot add content only to remotefilelog |
|
102 | b"cannot add content only to remotefilelog contentstore" | |
103 | ) |
|
103 | ) | |
104 |
|
104 | |||
105 | def getmissing(self, keys): |
|
105 | def getmissing(self, keys): | |
@@ -136,7 +136,7 b' class remotefilelogmetadatastore(basesto' | |||||
136 |
|
136 | |||
137 | def add(self, name, node, parents, linknode): |
|
137 | def add(self, name, node, parents, linknode): | |
138 | raise RuntimeError( |
|
138 | raise RuntimeError( | |
139 |
b"cannot add metadata only to remotefilelog |
|
139 | b"cannot add metadata only to remotefilelog metadatastore" | |
140 | ) |
|
140 | ) | |
141 |
|
141 | |||
142 |
|
142 |
@@ -194,7 +194,7 b' def onetimesetup(ui):' | |||||
194 | # since it would require fetching every version of every |
|
194 | # since it would require fetching every version of every | |
195 | # file in order to create the revlogs. |
|
195 | # file in order to create the revlogs. | |
196 | raise error.Abort( |
|
196 | raise error.Abort( | |
197 |
_(b"Cannot clone from a shallow repo |
|
197 | _(b"Cannot clone from a shallow repo to a full repo.") | |
198 | ) |
|
198 | ) | |
199 | else: |
|
199 | else: | |
200 | for x in orig(repo, matcher): |
|
200 | for x in orig(repo, matcher): |
@@ -55,7 +55,7 b' def getcachepath(ui, allowempty=False):' | |||||
55 | return None |
|
55 | return None | |
56 | else: |
|
56 | else: | |
57 | raise error.Abort( |
|
57 | raise error.Abort( | |
58 |
_(b"could not find config option |
|
58 | _(b"could not find config option remotefilelog.cachepath") | |
59 | ) |
|
59 | ) | |
60 | return util.expandpath(cachepath) |
|
60 | return util.expandpath(cachepath) | |
61 |
|
61 | |||
@@ -427,7 +427,7 b' def readexactly(stream, n):' | |||||
427 | s = stream.read(n) |
|
427 | s = stream.read(n) | |
428 | if len(s) < n: |
|
428 | if len(s) < n: | |
429 | raise error.Abort( |
|
429 | raise error.Abort( | |
430 |
_(b"stream ended unexpectedly |
|
430 | _(b"stream ended unexpectedly (got %d bytes, expected %d)") | |
431 | % (len(s), n) |
|
431 | % (len(s), n) | |
432 | ) |
|
432 | ) | |
433 | return s |
|
433 | return s |
@@ -70,7 +70,7 b" testedwith = b'ships-with-hg-core'" | |||||
70 | b'', |
|
70 | b'', | |
71 | b'relative', |
|
71 | b'relative', | |
72 | None, |
|
72 | None, | |
73 |
_(b'point to source using a relative path |
|
73 | _(b'point to source using a relative path (EXPERIMENTAL)'), | |
74 | ), |
|
74 | ), | |
75 | ], |
|
75 | ], | |
76 | _(b'[-U] [-B] SOURCE [DEST]'), |
|
76 | _(b'[-U] [-B] SOURCE [DEST]'), |
@@ -108,7 +108,7 b' def split(ui, repo, *revs, **opts):' | |||||
108 | ) |
|
108 | ) | |
109 | if not alloworphaned and len(torebase) != len(descendants): |
|
109 | if not alloworphaned and len(torebase) != len(descendants): | |
110 | raise error.Abort( |
|
110 | raise error.Abort( | |
111 |
_(b'split would leave orphaned changesets |
|
111 | _(b'split would leave orphaned changesets behind') | |
112 | ) |
|
112 | ) | |
113 | else: |
|
113 | else: | |
114 | if not alloworphaned and descendants: |
|
114 | if not alloworphaned and descendants: | |
@@ -164,7 +164,7 b' def dosplit(ui, repo, tr, ctx, opts):' | |||||
164 | firstline = c.description().split(b'\n', 1)[0] |
|
164 | firstline = c.description().split(b'\n', 1)[0] | |
165 | header += _(b'HG: - %s: %s\n') % (short(c.node()), firstline) |
|
165 | header += _(b'HG: - %s: %s\n') % (short(c.node()), firstline) | |
166 | header += _( |
|
166 | header += _( | |
167 |
b'HG: Write commit message for the next split |
|
167 | b'HG: Write commit message for the next split changeset.\n' | |
168 | ) |
|
168 | ) | |
169 | else: |
|
169 | else: | |
170 | header = _( |
|
170 | header = _( |
@@ -358,7 +358,7 b' class sqlitefilestore(object):' | |||||
358 |
|
358 | |||
359 | if i != rev: |
|
359 | if i != rev: | |
360 | raise SQLiteStoreError( |
|
360 | raise SQLiteStoreError( | |
361 |
_(b'sqlite database has inconsistent |
|
361 | _(b'sqlite database has inconsistent revision numbers') | |
362 | ) |
|
362 | ) | |
363 |
|
363 | |||
364 | if p1rev == nullrev: |
|
364 | if p1rev == nullrev: | |
@@ -772,7 +772,7 b' class sqlitefilestore(object):' | |||||
772 | # SQLite, since columns can be resized at will. |
|
772 | # SQLite, since columns can be resized at will. | |
773 | if len(tombstone) > len(self.rawdata(censornode)): |
|
773 | if len(tombstone) > len(self.rawdata(censornode)): | |
774 | raise error.Abort( |
|
774 | raise error.Abort( | |
775 |
_(b'censor tombstone must be no longer than |
|
775 | _(b'censor tombstone must be no longer than censored data') | |
776 | ) |
|
776 | ) | |
777 |
|
777 | |||
778 | # We need to replace the censored revision's data with the tombstone. |
|
778 | # We need to replace the censored revision's data with the tombstone. | |
@@ -1161,7 +1161,7 b' def newreporequirements(orig, ui, create' | |||||
1161 | # This restriction can be lifted once we have more confidence. |
|
1161 | # This restriction can be lifted once we have more confidence. | |
1162 | if b'sharedrepo' in createopts: |
|
1162 | if b'sharedrepo' in createopts: | |
1163 | raise error.Abort( |
|
1163 | raise error.Abort( | |
1164 |
_(b'shared repositories not supported with SQLite |
|
1164 | _(b'shared repositories not supported with SQLite store') | |
1165 | ) |
|
1165 | ) | |
1166 |
|
1166 | |||
1167 | # This filtering is out of an abundance of caution: we want to ensure |
|
1167 | # This filtering is out of an abundance of caution: we want to ensure | |
@@ -1176,7 +1176,7 b' def newreporequirements(orig, ui, create' | |||||
1176 | unsupported = set(createopts) - known |
|
1176 | unsupported = set(createopts) - known | |
1177 | if unsupported: |
|
1177 | if unsupported: | |
1178 | raise error.Abort( |
|
1178 | raise error.Abort( | |
1179 |
_(b'SQLite store does not support repo creation |
|
1179 | _(b'SQLite store does not support repo creation option: %s') | |
1180 | % b', '.join(sorted(unsupported)) |
|
1180 | % b', '.join(sorted(unsupported)) | |
1181 | ) |
|
1181 | ) | |
1182 |
|
1182 |
@@ -124,24 +124,19 b' def strip(' | |||||
124 | ), |
|
124 | ), | |
125 | ), |
|
125 | ), | |
126 | (b'', b'no-backup', None, _(b'do not save backup bundle')), |
|
126 | (b'', b'no-backup', None, _(b'do not save backup bundle')), | |
127 | ( |
|
127 | (b'', b'nobackup', None, _(b'do not save backup bundle (DEPRECATED)'),), | |
128 | b'', |
|
|||
129 | b'nobackup', |
|
|||
130 | None, |
|
|||
131 | _(b'do not save backup bundle ' b'(DEPRECATED)'), |
|
|||
132 | ), |
|
|||
133 | (b'n', b'', None, _(b'ignored (DEPRECATED)')), |
|
128 | (b'n', b'', None, _(b'ignored (DEPRECATED)')), | |
134 | ( |
|
129 | ( | |
135 | b'k', |
|
130 | b'k', | |
136 | b'keep', |
|
131 | b'keep', | |
137 | None, |
|
132 | None, | |
138 |
_(b"do not modify working directory during |
|
133 | _(b"do not modify working directory during strip"), | |
139 | ), |
|
134 | ), | |
140 | ( |
|
135 | ( | |
141 | b'B', |
|
136 | b'B', | |
142 | b'bookmark', |
|
137 | b'bookmark', | |
143 | [], |
|
138 | [], | |
144 |
_(b"remove revs only reachable from given |
|
139 | _(b"remove revs only reachable from given bookmark"), | |
145 | _(b'BOOKMARK'), |
|
140 | _(b'BOOKMARK'), | |
146 | ), |
|
141 | ), | |
147 | ( |
|
142 | ( |
@@ -438,7 +438,7 b' class transplanter(object):' | |||||
438 | p1 = repo.dirstate.p1() |
|
438 | p1 = repo.dirstate.p1() | |
439 | if p1 != parent: |
|
439 | if p1 != parent: | |
440 | raise error.Abort( |
|
440 | raise error.Abort( | |
441 |
_(b'working directory not at transplant |
|
441 | _(b'working directory not at transplant parent %s') | |
442 | % nodemod.hex(parent) |
|
442 | % nodemod.hex(parent) | |
443 | ) |
|
443 | ) | |
444 | if merge: |
|
444 | if merge: | |
@@ -661,7 +661,7 b' def browserevs(ui, repo, nodes, opts):' | |||||
661 | b'c', |
|
661 | b'c', | |
662 | b'continue', |
|
662 | b'continue', | |
663 | None, |
|
663 | None, | |
664 |
_(b'continue last transplant session |
|
664 | _(b'continue last transplant session after fixing conflicts'), | |
665 | ), |
|
665 | ), | |
666 | ( |
|
666 | ( | |
667 | b'', |
|
667 | b'', | |
@@ -788,7 +788,7 b' def _dotransplant(ui, repo, *revs, **opt' | |||||
788 | raise error.Abort(_(b'--all requires a branch revision')) |
|
788 | raise error.Abort(_(b'--all requires a branch revision')) | |
789 | if revs: |
|
789 | if revs: | |
790 | raise error.Abort( |
|
790 | raise error.Abort( | |
791 |
_(b'--all is incompatible with a |
|
791 | _(b'--all is incompatible with a revision list') | |
792 | ) |
|
792 | ) | |
793 |
|
793 | |||
794 | opts = pycompat.byteskwargs(opts) |
|
794 | opts = pycompat.byteskwargs(opts) |
@@ -165,7 +165,7 b' def uncommit(ui, repo, *pats, **opts):' | |||||
165 | if not allowdirtywcopy and (not pats or isdirtypath): |
|
165 | if not allowdirtywcopy and (not pats or isdirtypath): | |
166 | cmdutil.bailifchanged( |
|
166 | cmdutil.bailifchanged( | |
167 | repo, |
|
167 | repo, | |
168 |
hint=_(b'requires |
|
168 | hint=_(b'requires --allow-dirty-working-copy to uncommit'), | |
169 | ) |
|
169 | ) | |
170 | old = repo[b'.'] |
|
170 | old = repo[b'.'] | |
171 | rewriteutil.precheck(repo, [old.rev()], b'uncommit') |
|
171 | rewriteutil.precheck(repo, [old.rev()], b'uncommit') | |
@@ -190,7 +190,7 b' def uncommit(ui, repo, *pats, **opts):' | |||||
190 | for f in sorted(badfiles): |
|
190 | for f in sorted(badfiles): | |
191 | if f in s.clean: |
|
191 | if f in s.clean: | |
192 | hint = _( |
|
192 | hint = _( | |
193 |
b"file was not changed in working directory |
|
193 | b"file was not changed in working directory parent" | |
194 | ) |
|
194 | ) | |
195 | elif repo.wvfs.exists(f): |
|
195 | elif repo.wvfs.exists(f): | |
196 | hint = _(b"file was untracked in working directory parent") |
|
196 | hint = _(b"file was untracked in working directory parent") |
@@ -128,7 +128,7 b' def basewrapper(func, argtype, enc, dec,' | |||||
128 | return enc(func(*dec(args), **dec(kwds))) |
|
128 | return enc(func(*dec(args), **dec(kwds))) | |
129 | except UnicodeError: |
|
129 | except UnicodeError: | |
130 | raise error.Abort( |
|
130 | raise error.Abort( | |
131 |
_(b"[win32mbcs] filename conversion failed with |
|
131 | _(b"[win32mbcs] filename conversion failed with %s encoding\n") | |
132 | % _encoding |
|
132 | % _encoding | |
133 | ) |
|
133 | ) | |
134 |
|
134 |
@@ -302,7 +302,7 b' class bmstore(object):' | |||||
302 | ) |
|
302 | ) | |
303 | return delbms |
|
303 | return delbms | |
304 | raise error.Abort( |
|
304 | raise error.Abort( | |
305 |
_(b"bookmark '%s' already exists |
|
305 | _(b"bookmark '%s' already exists (use -f to force)") % mark | |
306 | ) |
|
306 | ) | |
307 | if ( |
|
307 | if ( | |
308 | mark in self._repo.branchmap() |
|
308 | mark in self._repo.branchmap() | |
@@ -893,7 +893,7 b' def checkformat(repo, mark):' | |||||
893 | mark = mark.strip() |
|
893 | mark = mark.strip() | |
894 | if not mark: |
|
894 | if not mark: | |
895 | raise error.Abort( |
|
895 | raise error.Abort( | |
896 |
_(b"bookmark names cannot consist entirely of |
|
896 | _(b"bookmark names cannot consist entirely of whitespace") | |
897 | ) |
|
897 | ) | |
898 | scmutil.checknewlabel(repo, mark, b'bookmark') |
|
898 | scmutil.checknewlabel(repo, mark, b'bookmark') | |
899 | return mark |
|
899 | return mark |
@@ -1233,7 +1233,7 b' class interrupthandler(unpackermixin):' | |||||
1233 | def __call__(self): |
|
1233 | def __call__(self): | |
1234 |
|
1234 | |||
1235 | self.ui.debug( |
|
1235 | self.ui.debug( | |
1236 |
b'bundle2-input-stream-interrupt: |
|
1236 | b'bundle2-input-stream-interrupt: opening out of band context\n' | |
1237 | ) |
|
1237 | ) | |
1238 | indebug(self.ui, b'bundle2 stream interruption, looking for a part.') |
|
1238 | indebug(self.ui, b'bundle2 stream interruption, looking for a part.') | |
1239 | headerblock = self._readpartheader() |
|
1239 | headerblock = self._readpartheader() | |
@@ -1252,7 +1252,7 b' class interrupthandler(unpackermixin):' | |||||
1252 | if not hardabort: |
|
1252 | if not hardabort: | |
1253 | part.consume() |
|
1253 | part.consume() | |
1254 | self.ui.debug( |
|
1254 | self.ui.debug( | |
1255 |
b'bundle2-input-stream-interrupt: |
|
1255 | b'bundle2-input-stream-interrupt: closing out of band context\n' | |
1256 | ) |
|
1256 | ) | |
1257 |
|
1257 | |||
1258 |
|
1258 | |||
@@ -1320,7 +1320,7 b' def decodepayloadchunks(ui, fh):' | |||||
1320 | s = read(headersize) |
|
1320 | s = read(headersize) | |
1321 | if len(s) < headersize: |
|
1321 | if len(s) < headersize: | |
1322 | raise error.Abort( |
|
1322 | raise error.Abort( | |
1323 |
_(b'stream ended unexpectedly |
|
1323 | _(b'stream ended unexpectedly (got %d bytes, expected %d)') | |
1324 | % (len(s), chunksize) |
|
1324 | % (len(s), chunksize) | |
1325 | ) |
|
1325 | ) | |
1326 |
|
1326 | |||
@@ -1889,7 +1889,7 b' def writebundle(' | |||||
1889 | assert compression is None |
|
1889 | assert compression is None | |
1890 | if cg.version != b'01': |
|
1890 | if cg.version != b'01': | |
1891 | raise error.Abort( |
|
1891 | raise error.Abort( | |
1892 |
_(b'old bundle types only supports v1 |
|
1892 | _(b'old bundle types only supports v1 changegroups') | |
1893 | ) |
|
1893 | ) | |
1894 | header, comp = bundletypes[bundletype] |
|
1894 | header, comp = bundletypes[bundletype] | |
1895 | if comp not in util.compengines.supportedbundletypes: |
|
1895 | if comp not in util.compengines.supportedbundletypes: | |
@@ -2136,7 +2136,7 b' def handlecheckheads(op, inpart):' | |||||
2136 | op.gettransaction() |
|
2136 | op.gettransaction() | |
2137 | if sorted(heads) != sorted(op.repo.heads()): |
|
2137 | if sorted(heads) != sorted(op.repo.heads()): | |
2138 | raise error.PushRaced( |
|
2138 | raise error.PushRaced( | |
2139 |
b'remote repository changed while pushing - |
|
2139 | b'remote repository changed while pushing - please try again' | |
2140 | ) |
|
2140 | ) | |
2141 |
|
2141 | |||
2142 |
|
2142 |
@@ -269,7 +269,7 b' class bundlerepository(object):' | |||||
269 | if part.type == b'changegroup': |
|
269 | if part.type == b'changegroup': | |
270 | if cgpart: |
|
270 | if cgpart: | |
271 | raise NotImplementedError( |
|
271 | raise NotImplementedError( | |
272 |
b"can't process |
|
272 | b"can't process multiple changegroups" | |
273 | ) |
|
273 | ) | |
274 | cgpart = part |
|
274 | cgpart = part | |
275 |
|
275 |
@@ -1551,7 +1551,7 b' def getbundler(' | |||||
1551 |
|
1551 | |||
1552 | if version == b'01' and not matcher.always(): |
|
1552 | if version == b'01' and not matcher.always(): | |
1553 | raise error.ProgrammingError( |
|
1553 | raise error.ProgrammingError( | |
1554 |
b'version 01 changegroups do not support |
|
1554 | b'version 01 changegroups do not support sparse file matchers' | |
1555 | ) |
|
1555 | ) | |
1556 |
|
1556 | |||
1557 | if ellipses and version in (b'01', b'02'): |
|
1557 | if ellipses and version in (b'01', b'02'): |
@@ -257,13 +257,9 b' def resolvecommitoptions(ui, opts):' | |||||
257 | the ``date`` option is set. |
|
257 | the ``date`` option is set. | |
258 | """ |
|
258 | """ | |
259 | if opts.get(b'date') and opts.get(b'currentdate'): |
|
259 | if opts.get(b'date') and opts.get(b'currentdate'): | |
260 | raise error.Abort( |
|
260 | raise error.Abort(_(b'--date and --currentdate are mutually exclusive')) | |
261 | _(b'--date and --currentdate are mutually ' b'exclusive') |
|
|||
262 | ) |
|
|||
263 | if opts.get(b'user') and opts.get(b'currentuser'): |
|
261 | if opts.get(b'user') and opts.get(b'currentuser'): | |
264 | raise error.Abort( |
|
262 | raise error.Abort(_(b'--user and --currentuser are mutually exclusive')) | |
265 | _(b'--user and --currentuser are mutually ' b'exclusive') |
|
|||
266 | ) |
|
|||
267 |
|
263 | |||
268 | datemaydiffer = False # date-only change should be ignored? |
|
264 | datemaydiffer = False # date-only change should be ignored? | |
269 |
|
265 | |||
@@ -1010,7 +1006,7 b' def logmessage(ui, opts):' | |||||
1010 |
|
1006 | |||
1011 | if message and logfile: |
|
1007 | if message and logfile: | |
1012 | raise error.Abort( |
|
1008 | raise error.Abort( | |
1013 |
_(b'options --message and --logfile are mutually |
|
1009 | _(b'options --message and --logfile are mutually exclusive') | |
1014 | ) |
|
1010 | ) | |
1015 | if not message and logfile: |
|
1011 | if not message and logfile: | |
1016 | try: |
|
1012 | try: | |
@@ -1180,7 +1176,7 b' def _buildfntemplate(pat, total=None, se' | |||||
1180 | newname.append(stringutil.escapestr(pat[i:n])) |
|
1176 | newname.append(stringutil.escapestr(pat[i:n])) | |
1181 | if n + 2 > end: |
|
1177 | if n + 2 > end: | |
1182 | raise error.Abort( |
|
1178 | raise error.Abort( | |
1183 |
_(b"incomplete format spec in output |
|
1179 | _(b"incomplete format spec in output filename") | |
1184 | ) |
|
1180 | ) | |
1185 | c = pat[n + 1 : n + 2] |
|
1181 | c = pat[n + 1 : n + 2] | |
1186 | i = n + 2 |
|
1182 | i = n + 2 | |
@@ -1188,7 +1184,7 b' def _buildfntemplate(pat, total=None, se' | |||||
1188 | newname.append(expander[c]) |
|
1184 | newname.append(expander[c]) | |
1189 | except KeyError: |
|
1185 | except KeyError: | |
1190 | raise error.Abort( |
|
1186 | raise error.Abort( | |
1191 |
_(b"invalid format spec '%%%s' in output |
|
1187 | _(b"invalid format spec '%%%s' in output filename") % c | |
1192 | ) |
|
1188 | ) | |
1193 | return b''.join(newname) |
|
1189 | return b''.join(newname) | |
1194 |
|
1190 | |||
@@ -2264,7 +2260,7 b' def walkchangerevs(repo, match, opts, pr' | |||||
2264 |
|
2260 | |||
2265 | if follow: |
|
2261 | if follow: | |
2266 | raise error.Abort( |
|
2262 | raise error.Abort( | |
2267 |
_(b'can only follow copies/renames for explicit |
|
2263 | _(b'can only follow copies/renames for explicit filenames') | |
2268 | ) |
|
2264 | ) | |
2269 |
|
2265 | |||
2270 | # The slow path checks files modified in every changeset. |
|
2266 | # The slow path checks files modified in every changeset. |
@@ -830,7 +830,7 b' def _dobackout(ui, repo, node=None, rev=' | |||||
830 | hg._showstats(repo, stats) |
|
830 | hg._showstats(repo, stats) | |
831 | if stats.unresolvedcount: |
|
831 | if stats.unresolvedcount: | |
832 | repo.ui.status( |
|
832 | repo.ui.status( | |
833 |
_(b"use 'hg resolve' to retry unresolved |
|
833 | _(b"use 'hg resolve' to retry unresolved file merges\n") | |
834 | ) |
|
834 | ) | |
835 | return 1 |
|
835 | return 1 | |
836 | else: |
|
836 | else: | |
@@ -839,7 +839,7 b' def _dobackout(ui, repo, node=None, rev=' | |||||
839 | cmdutil.revert(ui, repo, rctx, repo.dirstate.parents()) |
|
839 | cmdutil.revert(ui, repo, rctx, repo.dirstate.parents()) | |
840 |
|
840 | |||
841 | if opts.get(b'no_commit'): |
|
841 | if opts.get(b'no_commit'): | |
842 |
msg = _(b"changeset %s backed out, |
|
842 | msg = _(b"changeset %s backed out, don't forget to commit.\n") | |
843 | ui.status(msg % short(node)) |
|
843 | ui.status(msg % short(node)) | |
844 | return 0 |
|
844 | return 0 | |
845 |
|
845 | |||
@@ -1353,7 +1353,7 b' def branch(ui, repo, label=None, **opts)' | |||||
1353 | if not opts.get(b'force') and label in repo.branchmap(): |
|
1353 | if not opts.get(b'force') and label in repo.branchmap(): | |
1354 | if label not in [p.branch() for p in repo[None].parents()]: |
|
1354 | if label not in [p.branch() for p in repo[None].parents()]: | |
1355 | raise error.Abort( |
|
1355 | raise error.Abort( | |
1356 |
_(b'a branch of the same name already |
|
1356 | _(b'a branch of the same name already exists'), | |
1357 | # i18n: "it" refers to an existing branch |
|
1357 | # i18n: "it" refers to an existing branch | |
1358 | hint=_(b"use 'hg update' to switch to it"), |
|
1358 | hint=_(b"use 'hg update' to switch to it"), | |
1359 | ) |
|
1359 | ) | |
@@ -1562,9 +1562,7 b' def bundle(ui, repo, fname, dest=None, *' | |||||
1562 | except error.UnsupportedBundleSpecification as e: |
|
1562 | except error.UnsupportedBundleSpecification as e: | |
1563 | raise error.Abort( |
|
1563 | raise error.Abort( | |
1564 | pycompat.bytestr(e), |
|
1564 | pycompat.bytestr(e), | |
1565 | hint=_( |
|
1565 | hint=_(b"see 'hg help bundlespec' for supported values for --type"), | |
1566 | b"see 'hg help bundlespec' for supported " b"values for --type" |
|
|||
1567 | ), |
|
|||
1568 | ) |
|
1566 | ) | |
1569 | cgversion = bundlespec.contentopts[b"cg.version"] |
|
1567 | cgversion = bundlespec.contentopts[b"cg.version"] | |
1570 |
|
1568 | |||
@@ -1578,7 +1576,7 b' def bundle(ui, repo, fname, dest=None, *' | |||||
1578 | if opts.get(b'all'): |
|
1576 | if opts.get(b'all'): | |
1579 | if dest: |
|
1577 | if dest: | |
1580 | raise error.Abort( |
|
1578 | raise error.Abort( | |
1581 |
_(b"--all is incompatible with specifying |
|
1579 | _(b"--all is incompatible with specifying a destination") | |
1582 | ) |
|
1580 | ) | |
1583 | if opts.get(b'base'): |
|
1581 | if opts.get(b'base'): | |
1584 | ui.warn(_(b"ignoring --base because --all was specified\n")) |
|
1582 | ui.warn(_(b"ignoring --base because --all was specified\n")) | |
@@ -1593,7 +1591,7 b' def bundle(ui, repo, fname, dest=None, *' | |||||
1593 | if base: |
|
1591 | if base: | |
1594 | if dest: |
|
1592 | if dest: | |
1595 | raise error.Abort( |
|
1593 | raise error.Abort( | |
1596 |
_(b"--base is incompatible with specifying |
|
1594 | _(b"--base is incompatible with specifying a destination") | |
1597 | ) |
|
1595 | ) | |
1598 | common = [repo[rev].node() for rev in base] |
|
1596 | common = [repo[rev].node() for rev in base] | |
1599 | heads = [repo[r].node() for r in revs] if revs else None |
|
1597 | heads = [repo[r].node() for r in revs] if revs else None | |
@@ -2048,7 +2046,7 b' def _docommit(ui, repo, *pats, **opts):' | |||||
2048 |
|
2046 | |||
2049 | if repo[b'.'].closesbranch(): |
|
2047 | if repo[b'.'].closesbranch(): | |
2050 | raise error.Abort( |
|
2048 | raise error.Abort( | |
2051 |
_(b'current revision is already a branch closing |
|
2049 | _(b'current revision is already a branch closing head') | |
2052 | ) |
|
2050 | ) | |
2053 | elif not bheads: |
|
2051 | elif not bheads: | |
2054 | raise error.Abort(_(b'branch "%s" has no heads to close') % branch) |
|
2052 | raise error.Abort(_(b'branch "%s" has no heads to close') % branch) | |
@@ -2298,7 +2296,7 b' def continuecmd(ui, repo, **opts):' | |||||
2298 | if not contstate.continuefunc: |
|
2296 | if not contstate.continuefunc: | |
2299 | raise error.Abort( |
|
2297 | raise error.Abort( | |
2300 | ( |
|
2298 | ( | |
2301 |
_(b"%s in progress but does not support |
|
2299 | _(b"%s in progress but does not support 'hg continue'") | |
2302 | % (contstate._opname) |
|
2300 | % (contstate._opname) | |
2303 | ), |
|
2301 | ), | |
2304 | hint=contstate.continuemsg(), |
|
2302 | hint=contstate.continuemsg(), | |
@@ -2987,19 +2985,19 b' def _dograft(ui, repo, *revs, **opts):' | |||||
2987 | if opts.get(b'no_commit'): |
|
2985 | if opts.get(b'no_commit'): | |
2988 | if opts.get(b'edit'): |
|
2986 | if opts.get(b'edit'): | |
2989 | raise error.Abort( |
|
2987 | raise error.Abort( | |
2990 |
_(b"cannot specify --no-commit and |
|
2988 | _(b"cannot specify --no-commit and --edit together") | |
2991 | ) |
|
2989 | ) | |
2992 | if opts.get(b'currentuser'): |
|
2990 | if opts.get(b'currentuser'): | |
2993 | raise error.Abort( |
|
2991 | raise error.Abort( | |
2994 |
_(b"cannot specify --no-commit and |
|
2992 | _(b"cannot specify --no-commit and --currentuser together") | |
2995 | ) |
|
2993 | ) | |
2996 | if opts.get(b'currentdate'): |
|
2994 | if opts.get(b'currentdate'): | |
2997 | raise error.Abort( |
|
2995 | raise error.Abort( | |
2998 |
_(b"cannot specify --no-commit and |
|
2996 | _(b"cannot specify --no-commit and --currentdate together") | |
2999 | ) |
|
2997 | ) | |
3000 | if opts.get(b'log'): |
|
2998 | if opts.get(b'log'): | |
3001 | raise error.Abort( |
|
2999 | raise error.Abort( | |
3002 |
_(b"cannot specify --no-commit and |
|
3000 | _(b"cannot specify --no-commit and --log together") | |
3003 | ) |
|
3001 | ) | |
3004 |
|
3002 | |||
3005 | graftstate = statemod.cmdstate(repo, b'graftstate') |
|
3003 | graftstate = statemod.cmdstate(repo, b'graftstate') | |
@@ -3007,7 +3005,7 b' def _dograft(ui, repo, *revs, **opts):' | |||||
3007 | if opts.get(b'stop'): |
|
3005 | if opts.get(b'stop'): | |
3008 | if opts.get(b'continue'): |
|
3006 | if opts.get(b'continue'): | |
3009 | raise error.Abort( |
|
3007 | raise error.Abort( | |
3010 |
_(b"cannot use '--continue' and |
|
3008 | _(b"cannot use '--continue' and '--stop' together") | |
3011 | ) |
|
3009 | ) | |
3012 | if opts.get(b'abort'): |
|
3010 | if opts.get(b'abort'): | |
3013 | raise error.Abort(_(b"cannot use '--abort' and '--stop' together")) |
|
3011 | raise error.Abort(_(b"cannot use '--abort' and '--stop' together")) | |
@@ -3028,7 +3026,7 b' def _dograft(ui, repo, *revs, **opts):' | |||||
3028 | elif opts.get(b'abort'): |
|
3026 | elif opts.get(b'abort'): | |
3029 | if opts.get(b'continue'): |
|
3027 | if opts.get(b'continue'): | |
3030 | raise error.Abort( |
|
3028 | raise error.Abort( | |
3031 |
_(b"cannot use '--continue' and |
|
3029 | _(b"cannot use '--continue' and '--abort' together") | |
3032 | ) |
|
3030 | ) | |
3033 | if any( |
|
3031 | if any( | |
3034 | ( |
|
3032 | ( | |
@@ -3866,7 +3864,7 b' def identify(' | |||||
3866 | opts = pycompat.byteskwargs(opts) |
|
3864 | opts = pycompat.byteskwargs(opts) | |
3867 | if not repo and not source: |
|
3865 | if not repo and not source: | |
3868 | raise error.Abort( |
|
3866 | raise error.Abort( | |
3869 |
_(b"there is no Mercurial repository here |
|
3867 | _(b"there is no Mercurial repository here (.hg not found)") | |
3870 | ) |
|
3868 | ) | |
3871 |
|
3869 | |||
3872 | default = not (num or id or branch or tags or bookmarks) |
|
3870 | default = not (num or id or branch or tags or bookmarks) | |
@@ -4904,7 +4902,7 b' statemod.addunfinished(' | |||||
4904 | cmdmsg=_(b'outstanding uncommitted merge'), |
|
4902 | cmdmsg=_(b'outstanding uncommitted merge'), | |
4905 | abortfunc=hg.abortmerge, |
|
4903 | abortfunc=hg.abortmerge, | |
4906 | statushint=_( |
|
4904 | statushint=_( | |
4907 |
b'To continue: hg commit\n |
|
4905 | b'To continue: hg commit\nTo abort: hg merge --abort' | |
4908 | ), |
|
4906 | ), | |
4909 | cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"), |
|
4907 | cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"), | |
4910 | ) |
|
4908 | ) | |
@@ -5305,7 +5303,7 b' def postincoming(ui, repo, modheads, opt' | |||||
5305 | ) |
|
5303 | ) | |
5306 | elif currentbranchheads > 1: |
|
5304 | elif currentbranchheads > 1: | |
5307 | ui.status( |
|
5305 | ui.status( | |
5308 |
_(b"(run 'hg heads .' to see heads, 'hg merge' to |
|
5306 | _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n") | |
5309 | ) |
|
5307 | ) | |
5310 | else: |
|
5308 | else: | |
5311 | ui.status(_(b"(run 'hg heads' to see heads)\n")) |
|
5309 | ui.status(_(b"(run 'hg heads' to see heads)\n")) | |
@@ -5615,7 +5613,7 b' def push(ui, repo, dest=None, **opts):' | |||||
5615 | revs = [repo[rev].node() for rev in revs] |
|
5613 | revs = [repo[rev].node() for rev in revs] | |
5616 | if not revs: |
|
5614 | if not revs: | |
5617 | raise error.Abort( |
|
5615 | raise error.Abort( | |
5618 |
_(b'default push revset for path evaluates to an |
|
5616 | _(b'default push revset for path evaluates to an empty set') | |
5619 | ) |
|
5617 | ) | |
5620 |
|
5618 | |||
5621 | repo._subtoppath = dest |
|
5619 | repo._subtoppath = dest | |
@@ -5882,7 +5880,7 b' def resolve(ui, repo, *pats, **opts):' | |||||
5882 | if confirm: |
|
5880 | if confirm: | |
5883 | if all: |
|
5881 | if all: | |
5884 | if ui.promptchoice( |
|
5882 | if ui.promptchoice( | |
5885 |
_(b're-merge all unresolved files (yn)? |
|
5883 | _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No') | |
5886 | ): |
|
5884 | ): | |
5887 | raise error.Abort(_(b'user quit')) |
|
5885 | raise error.Abort(_(b'user quit')) | |
5888 | if mark and not pats: |
|
5886 | if mark and not pats: | |
@@ -6474,7 +6472,7 b' def serve(ui, repo, **opts):' | |||||
6474 | if opts[b"stdio"]: |
|
6472 | if opts[b"stdio"]: | |
6475 | if repo is None: |
|
6473 | if repo is None: | |
6476 | raise error.RepoError( |
|
6474 | raise error.RepoError( | |
6477 |
_(b"there is no Mercurial repository here |
|
6475 | _(b"there is no Mercurial repository here (.hg not found)") | |
6478 | ) |
|
6476 | ) | |
6479 | s = wireprotoserver.sshserver(ui, repo) |
|
6477 | s = wireprotoserver.sshserver(ui, repo) | |
6480 | s.serve_forever() |
|
6478 | s.serve_forever() | |
@@ -7234,7 +7232,7 b' def tag(ui, repo, name1, *names, **opts)' | |||||
7234 | scmutil.checknewlabel(repo, n, b'tag') |
|
7232 | scmutil.checknewlabel(repo, n, b'tag') | |
7235 | if not n: |
|
7233 | if not n: | |
7236 | raise error.Abort( |
|
7234 | raise error.Abort( | |
7237 |
_(b'tag names cannot consist entirely of |
|
7235 | _(b'tag names cannot consist entirely of whitespace') | |
7238 | ) |
|
7236 | ) | |
7239 | if opts.get(b'rev') and opts.get(b'remove'): |
|
7237 | if opts.get(b'rev') and opts.get(b'remove'): | |
7240 | raise error.Abort(_(b"--rev and --remove are incompatible")) |
|
7238 | raise error.Abort(_(b"--rev and --remove are incompatible")) | |
@@ -7269,7 +7267,7 b' def tag(ui, repo, name1, *names, **opts)' | |||||
7269 | for n in names: |
|
7267 | for n in names: | |
7270 | if n in repo.tags(): |
|
7268 | if n in repo.tags(): | |
7271 | raise error.Abort( |
|
7269 | raise error.Abort( | |
7272 |
_(b"tag '%s' already exists |
|
7270 | _(b"tag '%s' already exists (use -f to force)") % n | |
7273 | ) |
|
7271 | ) | |
7274 | if not opts.get(b'local'): |
|
7272 | if not opts.get(b'local'): | |
7275 | p1, p2 = repo.dirstate.parents() |
|
7273 | p1, p2 = repo.dirstate.parents() |
@@ -1619,7 +1619,7 b' class workingctx(committablectx):' | |||||
1619 | return |
|
1619 | return | |
1620 | if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): |
|
1620 | if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): | |
1621 | self._repo.ui.warn( |
|
1621 | self._repo.ui.warn( | |
1622 |
_(b"copy failed: %s is not a file or a |
|
1622 | _(b"copy failed: %s is not a file or a symbolic link\n") | |
1623 | % self._repo.dirstate.pathto(dest) |
|
1623 | % self._repo.dirstate.pathto(dest) | |
1624 | ) |
|
1624 | ) | |
1625 | else: |
|
1625 | else: | |
@@ -1678,7 +1678,7 b' class workingctx(committablectx):' | |||||
1678 | or stringutil.binary(d) |
|
1678 | or stringutil.binary(d) | |
1679 | ): |
|
1679 | ): | |
1680 | self._repo.ui.debug( |
|
1680 | self._repo.ui.debug( | |
1681 |
b'ignoring suspect symlink placeholder |
|
1681 | b'ignoring suspect symlink placeholder "%s"\n' % f | |
1682 | ) |
|
1682 | ) | |
1683 | continue |
|
1683 | continue | |
1684 | sane.append(f) |
|
1684 | sane.append(f) | |
@@ -1750,7 +1750,7 b' class workingctx(committablectx):' | |||||
1750 | # already changed simultaneously after last |
|
1750 | # already changed simultaneously after last | |
1751 | # caching (see also issue5584 for detail) |
|
1751 | # caching (see also issue5584 for detail) | |
1752 | self._repo.ui.debug( |
|
1752 | self._repo.ui.debug( | |
1753 |
b'skip updating dirstate: |
|
1753 | b'skip updating dirstate: identity mismatch\n' | |
1754 | ) |
|
1754 | ) | |
1755 | except error.LockError: |
|
1755 | except error.LockError: | |
1756 | pass |
|
1756 | pass |
@@ -677,7 +677,7 b' def _fullcopytracing(repo, c1, c2, base)' | |||||
677 | if df not in copy: |
|
677 | if df not in copy: | |
678 | movewithdir[f] = df |
|
678 | movewithdir[f] = df | |
679 | repo.ui.debug( |
|
679 | repo.ui.debug( | |
680 |
|
|
680 | b" pending file src: '%s' -> dst: '%s'\n" | |
681 | % (f, df) |
|
681 | % (f, df) | |
682 | ) |
|
682 | ) | |
683 | break |
|
683 | break |
@@ -275,7 +275,7 b' def parsedag(desc):' | |||||
275 | i += 1 |
|
275 | i += 1 | |
276 | c = nextch() |
|
276 | c = nextch() | |
277 | raise error.Abort( |
|
277 | raise error.Abort( | |
278 |
_(b'invalid character in dag description: |
|
278 | _(b'invalid character in dag description: %s...') % s | |
279 | ) |
|
279 | ) | |
280 |
|
280 | |||
281 |
|
281 | |||
@@ -381,7 +381,7 b' def dagtextlines(' | |||||
381 | yield b'\n' |
|
381 | yield b'\n' | |
382 | else: |
|
382 | else: | |
383 | raise error.Abort( |
|
383 | raise error.Abort( | |
384 |
_(b"invalid event type in dag: |
|
384 | _(b"invalid event type in dag: ('%s', '%s')") | |
385 | % ( |
|
385 | % ( | |
386 | stringutil.escapestr(kind), |
|
386 | stringutil.escapestr(kind), | |
387 | stringutil.escapestr(data), |
|
387 | stringutil.escapestr(data), |
@@ -109,7 +109,7 b' def debugancestor(ui, repo, *args):' | |||||
109 | elif len(args) == 2: |
|
109 | elif len(args) == 2: | |
110 | if not repo: |
|
110 | if not repo: | |
111 | raise error.Abort( |
|
111 | raise error.Abort( | |
112 |
_(b'there is no Mercurial repository here |
|
112 | _(b'there is no Mercurial repository here (.hg not found)') | |
113 | ) |
|
113 | ) | |
114 | rev1, rev2 = args |
|
114 | rev1, rev2 = args | |
115 | r = repo.changelog |
|
115 | r = repo.changelog | |
@@ -1464,7 +1464,7 b' def debuginstall(ui, **opts):' | |||||
1464 | fm.condwrite( |
|
1464 | fm.condwrite( | |
1465 | err, |
|
1465 | err, | |
1466 | b'encodingerror', |
|
1466 | b'encodingerror', | |
1467 |
_(b" %s\n |
|
1467 | _(b" %s\n (check that your locale is properly set)\n"), | |
1468 | err, |
|
1468 | err, | |
1469 | ) |
|
1469 | ) | |
1470 |
|
1470 | |||
@@ -1577,7 +1577,7 b' def debuginstall(ui, **opts):' | |||||
1577 | ) |
|
1577 | ) | |
1578 | fm.write( |
|
1578 | fm.write( | |
1579 | b'compenginesavail', |
|
1579 | b'compenginesavail', | |
1580 |
_(b'checking available compression engines |
|
1580 | _(b'checking available compression engines (%s)\n'), | |
1581 | fm.formatlist( |
|
1581 | fm.formatlist( | |
1582 | sorted(e.name() for e in compengines if e.available()), |
|
1582 | sorted(e.name() for e in compengines if e.available()), | |
1583 | name=b'compengine', |
|
1583 | name=b'compengine', | |
@@ -1701,7 +1701,7 b' def debuginstall(ui, **opts):' | |||||
1701 | fm.condwrite( |
|
1701 | fm.condwrite( | |
1702 | problems, |
|
1702 | problems, | |
1703 | b'problems', |
|
1703 | b'problems', | |
1704 |
_(b"%d problems detected, |
|
1704 | _(b"%d problems detected, please check your install!\n"), | |
1705 | problems, |
|
1705 | problems, | |
1706 | ) |
|
1706 | ) | |
1707 | fm.end() |
|
1707 | fm.end() | |
@@ -2071,7 +2071,7 b' def debugnamecomplete(ui, repo, *args):' | |||||
2071 | b'', |
|
2071 | b'', | |
2072 | b'exclusive', |
|
2072 | b'exclusive', | |
2073 | False, |
|
2073 | False, | |
2074 |
_(b'restrict display to markers only |
|
2074 | _(b'restrict display to markers only relevant to REV'), | |
2075 | ), |
|
2075 | ), | |
2076 | (b'', b'index', False, _(b'display index of the marker')), |
|
2076 | (b'', b'index', False, _(b'display index of the marker')), | |
2077 | (b'', b'delete', [], _(b'delete markers specified by indices')), |
|
2077 | (b'', b'delete', [], _(b'delete markers specified by indices')), | |
@@ -2115,7 +2115,7 b' def debugobsolete(ui, repo, precursor=No' | |||||
2115 |
|
2115 | |||
2116 | if repo.currenttransaction(): |
|
2116 | if repo.currenttransaction(): | |
2117 | raise error.Abort( |
|
2117 | raise error.Abort( | |
2118 |
_(b'cannot delete obsmarkers in the middle |
|
2118 | _(b'cannot delete obsmarkers in the middle of transaction.') | |
2119 | ) |
|
2119 | ) | |
2120 |
|
2120 | |||
2121 | with repo.lock(): |
|
2121 | with repo.lock(): | |
@@ -2949,7 +2949,7 b' def debugrevlogindex(ui, repo, file_=Non' | |||||
2949 | if format == 0: |
|
2949 | if format == 0: | |
2950 | if ui.verbose: |
|
2950 | if ui.verbose: | |
2951 | ui.writenoi18n( |
|
2951 | ui.writenoi18n( | |
2952 |
|
|
2952 | b" rev offset length linkrev %s %s p2\n" | |
2953 | % (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) |
|
2953 | % (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) | |
2954 | ) |
|
2954 | ) | |
2955 | else: |
|
2955 | else: | |
@@ -3096,7 +3096,7 b' def debugrevspec(ui, repo, expr, **opts)' | |||||
3096 | stages = stages[:-1] |
|
3096 | stages = stages[:-1] | |
3097 | if opts[b'verify_optimized'] and opts[b'no_optimized']: |
|
3097 | if opts[b'verify_optimized'] and opts[b'no_optimized']: | |
3098 | raise error.Abort( |
|
3098 | raise error.Abort( | |
3099 |
_(b'cannot use --verify-optimized with |
|
3099 | _(b'cannot use --verify-optimized with --no-optimized') | |
3100 | ) |
|
3100 | ) | |
3101 | stagenames = set(n for n, f in stages) |
|
3101 | stagenames = set(n for n, f in stages) | |
3102 |
|
3102 | |||
@@ -3276,7 +3276,7 b' def debugssl(ui, repo, source=None, **op' | |||||
3276 | ''' |
|
3276 | ''' | |
3277 | if not pycompat.iswindows: |
|
3277 | if not pycompat.iswindows: | |
3278 | raise error.Abort( |
|
3278 | raise error.Abort( | |
3279 |
_(b'certificate chain building is only possible on |
|
3279 | _(b'certificate chain building is only possible on Windows') | |
3280 | ) |
|
3280 | ) | |
3281 |
|
3281 | |||
3282 | if not source: |
|
3282 | if not source: | |
@@ -3422,7 +3422,7 b' def debugtemplate(ui, repo, tmpl, **opts' | |||||
3422 | if opts[r'rev']: |
|
3422 | if opts[r'rev']: | |
3423 | if repo is None: |
|
3423 | if repo is None: | |
3424 | raise error.RepoError( |
|
3424 | raise error.RepoError( | |
3425 |
_(b'there is no Mercurial repository here |
|
3425 | _(b'there is no Mercurial repository here (.hg not found)') | |
3426 | ) |
|
3426 | ) | |
3427 | revs = scmutil.revrange(repo, opts[r'rev']) |
|
3427 | revs = scmutil.revrange(repo, opts[r'rev']) | |
3428 |
|
3428 | |||
@@ -3882,9 +3882,7 b' def debugwireproto(ui, repo, path=None, ' | |||||
3882 | ) |
|
3882 | ) | |
3883 |
|
3883 | |||
3884 | if path and opts[b'localssh']: |
|
3884 | if path and opts[b'localssh']: | |
3885 | raise error.Abort( |
|
3885 | raise error.Abort(_(b'cannot specify --localssh with an explicit path')) | |
3886 | _(b'cannot specify --localssh with an explicit ' b'path') |
|
|||
3887 | ) |
|
|||
3888 |
|
3886 | |||
3889 | if ui.interactive(): |
|
3887 | if ui.interactive(): | |
3890 | ui.write(_(b'(waiting for commands on stdin)\n')) |
|
3888 | ui.write(_(b'(waiting for commands on stdin)\n')) | |
@@ -4153,7 +4151,7 b' def debugwireproto(ui, repo, path=None, ' | |||||
4153 | elif action.startswith(b'httprequest '): |
|
4151 | elif action.startswith(b'httprequest '): | |
4154 | if not opener: |
|
4152 | if not opener: | |
4155 | raise error.Abort( |
|
4153 | raise error.Abort( | |
4156 |
_(b'cannot use httprequest without an HTTP |
|
4154 | _(b'cannot use httprequest without an HTTP peer') | |
4157 | ) |
|
4155 | ) | |
4158 |
|
4156 | |||
4159 | request = action.split(b' ', 2) |
|
4157 | request = action.split(b' ', 2) |
@@ -129,9 +129,9 b' def _destupdatebranchfallback(repo, clea' | |||||
129 | heads = repo.branchheads(currentbranch, closed=True) |
|
129 | heads = repo.branchheads(currentbranch, closed=True) | |
130 | assert heads, b"any branch has at least one head" |
|
130 | assert heads, b"any branch has at least one head" | |
131 | node = repo.revs(b'max(.::(%ln))', heads).first() |
|
131 | node = repo.revs(b'max(.::(%ln))', heads).first() | |
132 |
assert |
|
132 | assert ( | |
133 | b"any revision has at least " b"one descendant branch head" |
|
133 | node is not None | |
134 | ) |
|
134 | ), b"any revision has at least one descendant branch head" | |
135 | if bookmarks.isactivewdirparent(repo): |
|
135 | if bookmarks.isactivewdirparent(repo): | |
136 | movemark = repo[b'.'].node() |
|
136 | movemark = repo[b'.'].node() | |
137 | else: |
|
137 | else: |
@@ -467,7 +467,7 b' class dirstate(object):' | |||||
467 | '''Mark as coming from the other parent, always dirty.''' |
|
467 | '''Mark as coming from the other parent, always dirty.''' | |
468 | if self._pl[1] == nullid: |
|
468 | if self._pl[1] == nullid: | |
469 | raise error.Abort( |
|
469 | raise error.Abort( | |
470 |
_(b"setting %r to other parent |
|
470 | _(b"setting %r to other parent only allowed in merges") % f | |
471 | ) |
|
471 | ) | |
472 | if f in self and self[f] == b'n': |
|
472 | if f in self and self[f] == b'n': | |
473 | # merge-like |
|
473 | # merge-like | |
@@ -1470,7 +1470,7 b' class dirstatemap(object):' | |||||
1470 | if self._pendingmode is not None and self._pendingmode != mode: |
|
1470 | if self._pendingmode is not None and self._pendingmode != mode: | |
1471 | fp.close() |
|
1471 | fp.close() | |
1472 | raise error.Abort( |
|
1472 | raise error.Abort( | |
1473 |
_(b'working directory state may be |
|
1473 | _(b'working directory state may be changed parallelly') | |
1474 | ) |
|
1474 | ) | |
1475 | self._pendingmode = mode |
|
1475 | self._pendingmode = mode | |
1476 | return fp |
|
1476 | return fp | |
@@ -1494,7 +1494,7 b' class dirstatemap(object):' | |||||
1494 | self._parents = (nullid, nullid) |
|
1494 | self._parents = (nullid, nullid) | |
1495 | else: |
|
1495 | else: | |
1496 | raise error.Abort( |
|
1496 | raise error.Abort( | |
1497 |
_(b'working directory state appears |
|
1497 | _(b'working directory state appears damaged!') | |
1498 | ) |
|
1498 | ) | |
1499 |
|
1499 | |||
1500 | return self._parents |
|
1500 | return self._parents | |
@@ -1671,7 +1671,7 b' if rustmod is not None:' | |||||
1671 | if self._pendingmode is not None and self._pendingmode != mode: |
|
1671 | if self._pendingmode is not None and self._pendingmode != mode: | |
1672 | fp.close() |
|
1672 | fp.close() | |
1673 | raise error.Abort( |
|
1673 | raise error.Abort( | |
1674 |
_(b'working directory state may be |
|
1674 | _(b'working directory state may be changed parallelly') | |
1675 | ) |
|
1675 | ) | |
1676 | self._pendingmode = mode |
|
1676 | self._pendingmode = mode | |
1677 | return fp |
|
1677 | return fp | |
@@ -1697,7 +1697,7 b' if rustmod is not None:' | |||||
1697 | self._parents = self._rustmap.parents(st) |
|
1697 | self._parents = self._rustmap.parents(st) | |
1698 | except ValueError: |
|
1698 | except ValueError: | |
1699 | raise error.Abort( |
|
1699 | raise error.Abort( | |
1700 |
_(b'working directory state appears |
|
1700 | _(b'working directory state appears damaged!') | |
1701 | ) |
|
1701 | ) | |
1702 |
|
1702 | |||
1703 | return self._parents |
|
1703 | return self._parents |
@@ -412,11 +412,11 b' def checkheads(pushop):' | |||||
412 | heads = scmutil.nodesummaries(repo, unsyncedheads) |
|
412 | heads = scmutil.nodesummaries(repo, unsyncedheads) | |
413 | if heads is None: |
|
413 | if heads is None: | |
414 | repo.ui.status( |
|
414 | repo.ui.status( | |
415 |
_(b"remote has heads that are |
|
415 | _(b"remote has heads that are not known locally\n") | |
416 | ) |
|
416 | ) | |
417 | elif branch is None: |
|
417 | elif branch is None: | |
418 | repo.ui.status( |
|
418 | repo.ui.status( | |
419 |
_(b"remote has heads that are |
|
419 | _(b"remote has heads that are not known locally: %s\n") | |
420 | % heads |
|
420 | % heads | |
421 | ) |
|
421 | ) | |
422 | else: |
|
422 | else: | |
@@ -447,7 +447,7 b' def checkheads(pushop):' | |||||
447 | if errormsg is None: |
|
447 | if errormsg is None: | |
448 | if branch not in (b'default', None): |
|
448 | if branch not in (b'default', None): | |
449 | errormsg = _( |
|
449 | errormsg = _( | |
450 |
b"push creates new remote head %s |
|
450 | b"push creates new remote head %s on branch '%s'!" | |
451 | ) % (short(dhs[0]), branch) |
|
451 | ) % (short(dhs[0]), branch) | |
452 | elif repo[dhs[0]].bookmarks(): |
|
452 | elif repo[dhs[0]].bookmarks(): | |
453 | errormsg = _( |
|
453 | errormsg = _( |
@@ -631,7 +631,7 b' class cmdalias(object):' | |||||
631 | continue |
|
631 | continue | |
632 | if not encoding.isasciistr(v): |
|
632 | if not encoding.isasciistr(v): | |
633 | self.badalias = _( |
|
633 | self.badalias = _( | |
634 |
b"non-ASCII character in alias definition |
|
634 | b"non-ASCII character in alias definition '%s:%s'" | |
635 | ) % (name, k) |
|
635 | ) % (name, k) | |
636 | return |
|
636 | return | |
637 | cfg[k] = v |
|
637 | cfg[k] = v |
@@ -319,7 +319,7 b' def getbundlespec(ui, fh):' | |||||
319 | b'a known bundlespec' |
|
319 | b'a known bundlespec' | |
320 | ) |
|
320 | ) | |
321 | % version, |
|
321 | % version, | |
322 |
hint=_(b'try upgrading your Mercurial |
|
322 | hint=_(b'try upgrading your Mercurial client'), | |
323 | ) |
|
323 | ) | |
324 | elif part.type == b'stream2' and version is None: |
|
324 | elif part.type == b'stream2' and version is None: | |
325 | # A stream2 part requires to be part of a v2 bundle |
|
325 | # A stream2 part requires to be part of a v2 bundle | |
@@ -330,7 +330,7 b' def getbundlespec(ui, fh):' | |||||
330 |
|
330 | |||
331 | if not version: |
|
331 | if not version: | |
332 | raise error.Abort( |
|
332 | raise error.Abort( | |
333 |
_(b'could not identify changegroup version in |
|
333 | _(b'could not identify changegroup version in bundle') | |
334 | ) |
|
334 | ) | |
335 |
|
335 | |||
336 | return b'%s-%s' % (comp, version) |
|
336 | return b'%s-%s' % (comp, version) | |
@@ -383,7 +383,7 b' def _checkpublish(pushop):' | |||||
383 | ) |
|
383 | ) | |
384 | elif behavior == b'confirm': |
|
384 | elif behavior == b'confirm': | |
385 | if ui.promptchoice( |
|
385 | if ui.promptchoice( | |
386 |
_(b'push and publish %i changesets (yn)? |
|
386 | _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No') | |
387 | % len(published) |
|
387 | % len(published) | |
388 | ): |
|
388 | ): | |
389 | raise error.Abort(_(b'user quit')) |
|
389 | raise error.Abort(_(b'user quit')) | |
@@ -1166,8 +1166,7 b' def _abortonsecretctx(pushop, node, b):' | |||||
1166 | """abort if a given bookmark points to a secret changeset""" |
|
1166 | """abort if a given bookmark points to a secret changeset""" | |
1167 | if node and pushop.repo[node].phase() == phases.secret: |
|
1167 | if node and pushop.repo[node].phase() == phases.secret: | |
1168 | raise error.Abort( |
|
1168 | raise error.Abort( | |
1169 |
_(b'cannot push bookmark %s as it points to a secret' b |
|
1169 | _(b'cannot push bookmark %s as it points to a secret changeset') % b | |
1170 | % b |
|
|||
1171 | ) |
|
1170 | ) | |
1172 |
|
1171 | |||
1173 |
|
1172 | |||
@@ -2696,7 +2695,7 b' def check_heads(repo, their_heads, conte' | |||||
2696 | # someone else committed/pushed/unbundled while we |
|
2695 | # someone else committed/pushed/unbundled while we | |
2697 | # were transferring data |
|
2696 | # were transferring data | |
2698 | raise error.PushRaced( |
|
2697 | raise error.PushRaced( | |
2699 |
b'repository changed while %s - |
|
2698 | b'repository changed while %s - please try again' % context | |
2700 | ) |
|
2699 | ) | |
2701 |
|
2700 | |||
2702 |
|
2701 | |||
@@ -2842,7 +2841,7 b' def _maybeapplyclonebundle(pullop):' | |||||
2842 | ) |
|
2841 | ) | |
2843 | ) |
|
2842 | ) | |
2844 | repo.ui.warn( |
|
2843 | repo.ui.warn( | |
2845 |
_(b'(you may want to report this to the server |
|
2844 | _(b'(you may want to report this to the server operator)\n') | |
2846 | ) |
|
2845 | ) | |
2847 | return |
|
2846 | return | |
2848 |
|
2847 |
@@ -219,7 +219,7 b' def _fetchrawstorefiles(repo, remote):' | |||||
219 | vfs = repo.svfs |
|
219 | vfs = repo.svfs | |
220 | else: |
|
220 | else: | |
221 | raise error.Abort( |
|
221 | raise error.Abort( | |
222 |
_(b'invalid location for raw file data: |
|
222 | _(b'invalid location for raw file data: %s') | |
223 | % filemeta[b'location'] |
|
223 | % filemeta[b'location'] | |
224 | ) |
|
224 | ) | |
225 |
|
225 |
@@ -315,7 +315,7 b' def _iprompt(repo, mynode, orig, fcd, fc' | |||||
315 | # conflicts. |
|
315 | # conflicts. | |
316 | if fcd.changectx().isinmemory(): |
|
316 | if fcd.changectx().isinmemory(): | |
317 | raise error.InMemoryMergeConflictsError( |
|
317 | raise error.InMemoryMergeConflictsError( | |
318 |
b'in-memory merge does not |
|
318 | b'in-memory merge does not support file conflicts' | |
319 | ) |
|
319 | ) | |
320 |
|
320 | |||
321 | prompts = partextras(labels) |
|
321 | prompts = partextras(labels) | |
@@ -415,7 +415,7 b' def _premerge(repo, fcd, fco, fca, toolc' | |||||
415 | if premerge not in validkeep: |
|
415 | if premerge not in validkeep: | |
416 | _valid = b', '.join([b"'" + v + b"'" for v in validkeep]) |
|
416 | _valid = b', '.join([b"'" + v + b"'" for v in validkeep]) | |
417 | raise error.ConfigError( |
|
417 | raise error.ConfigError( | |
418 |
_(b"%s.premerge not valid |
|
418 | _(b"%s.premerge not valid ('%s' is neither boolean nor %s)") | |
419 | % (tool, premerge, _valid) |
|
419 | % (tool, premerge, _valid) | |
420 | ) |
|
420 | ) | |
421 |
|
421 | |||
@@ -440,7 +440,7 b' def _mergecheck(repo, mynode, orig, fcd,' | |||||
440 | uipathfn = scmutil.getuipathfn(repo) |
|
440 | uipathfn = scmutil.getuipathfn(repo) | |
441 | if symlink: |
|
441 | if symlink: | |
442 | repo.ui.warn( |
|
442 | repo.ui.warn( | |
443 |
_(b'warning: internal %s cannot merge symlinks |
|
443 | _(b'warning: internal %s cannot merge symlinks for %s\n') | |
444 | % (tool, uipathfn(fcd.path())) |
|
444 | % (tool, uipathfn(fcd.path())) | |
445 | ) |
|
445 | ) | |
446 | return False |
|
446 | return False | |
@@ -606,7 +606,7 b' def _idump(repo, mynode, orig, fcd, fco,' | |||||
606 |
|
606 | |||
607 | if isinstance(fcd, context.overlayworkingfilectx): |
|
607 | if isinstance(fcd, context.overlayworkingfilectx): | |
608 | raise error.InMemoryMergeConflictsError( |
|
608 | raise error.InMemoryMergeConflictsError( | |
609 |
b'in-memory merge does not |
|
609 | b'in-memory merge does not support the :dump tool.' | |
610 | ) |
|
610 | ) | |
611 |
|
611 | |||
612 | util.writefile(a + b".local", fcd.decodeddata()) |
|
612 | util.writefile(a + b".local", fcd.decodeddata()) | |
@@ -635,7 +635,7 b' def _xmergeimm(repo, mynode, orig, fcd, ' | |||||
635 | # directory and tell the user how to get it is my best idea, but it's |
|
635 | # directory and tell the user how to get it is my best idea, but it's | |
636 | # clunky.) |
|
636 | # clunky.) | |
637 | raise error.InMemoryMergeConflictsError( |
|
637 | raise error.InMemoryMergeConflictsError( | |
638 |
b'in-memory merge does not support |
|
638 | b'in-memory merge does not support external merge tools' | |
639 | ) |
|
639 | ) | |
640 |
|
640 | |||
641 |
|
641 | |||
@@ -698,7 +698,7 b' def _xmerge(repo, mynode, orig, fcd, fco' | |||||
698 | uipathfn = scmutil.getuipathfn(repo) |
|
698 | uipathfn = scmutil.getuipathfn(repo) | |
699 | if fcd.isabsent() or fco.isabsent(): |
|
699 | if fcd.isabsent() or fco.isabsent(): | |
700 | repo.ui.warn( |
|
700 | repo.ui.warn( | |
701 |
_(b'warning: %s cannot merge change/delete conflict |
|
701 | _(b'warning: %s cannot merge change/delete conflict for %s\n') | |
702 | % (tool, uipathfn(fcd.path())) |
|
702 | % (tool, uipathfn(fcd.path())) | |
703 | ) |
|
703 | ) | |
704 | return False, 1, None |
|
704 | return False, 1, None | |
@@ -1064,7 +1064,7 b' def _filemerge(premerge, repo, wctx, myn' | |||||
1064 | if onfailure: |
|
1064 | if onfailure: | |
1065 | if wctx.isinmemory(): |
|
1065 | if wctx.isinmemory(): | |
1066 | raise error.InMemoryMergeConflictsError( |
|
1066 | raise error.InMemoryMergeConflictsError( | |
1067 |
b'in-memory merge does |
|
1067 | b'in-memory merge does not support merge conflicts' | |
1068 | ) |
|
1068 | ) | |
1069 | ui.warn(onfailure % fduipath) |
|
1069 | ui.warn(onfailure % fduipath) | |
1070 | return True, 1, False |
|
1070 | return True, 1, False | |
@@ -1150,7 +1150,7 b' def _haltmerge():' | |||||
1150 | def _onfilemergefailure(ui): |
|
1150 | def _onfilemergefailure(ui): | |
1151 | action = ui.config(b'merge', b'on-failure') |
|
1151 | action = ui.config(b'merge', b'on-failure') | |
1152 | if action == b'prompt': |
|
1152 | if action == b'prompt': | |
1153 |
msg = _(b'continue merge operation (yn)? |
|
1153 | msg = _(b'continue merge operation (yn)?$$ &Yes $$ &No') | |
1154 | if ui.promptchoice(msg, 0) == 1: |
|
1154 | if ui.promptchoice(msg, 0) == 1: | |
1155 | _haltmerge() |
|
1155 | _haltmerge() | |
1156 | if action == b'halt': |
|
1156 | if action == b'halt': | |
@@ -1180,7 +1180,7 b' def _check(repo, r, ui, tool, fcd, files' | |||||
1180 | if b'prompt' in _toollist(ui, tool, b"check"): |
|
1180 | if b'prompt' in _toollist(ui, tool, b"check"): | |
1181 | checked = True |
|
1181 | checked = True | |
1182 | if ui.promptchoice( |
|
1182 | if ui.promptchoice( | |
1183 |
_(b"was merge of '%s' successful (yn)? |
|
1183 | _(b"was merge of '%s' successful (yn)?$$ &Yes $$ &No") | |
1184 | % uipathfn(fd), |
|
1184 | % uipathfn(fd), | |
1185 | 1, |
|
1185 | 1, | |
1186 | ): |
|
1186 | ): |
@@ -860,7 +860,7 b' def help_(' | |||||
860 | ) |
|
860 | ) | |
861 | if name == b'shortlist': |
|
861 | if name == b'shortlist': | |
862 | rst.append( |
|
862 | rst.append( | |
863 |
_(b"\n(use 'hg help' for the full list |
|
863 | _(b"\n(use 'hg help' for the full list of commands)\n") | |
864 | ) |
|
864 | ) | |
865 | else: |
|
865 | else: | |
866 | if name == b'shortlist': |
|
866 | if name == b'shortlist': | |
@@ -872,7 +872,7 b' def help_(' | |||||
872 | ) |
|
872 | ) | |
873 | elif name and not full: |
|
873 | elif name and not full: | |
874 | rst.append( |
|
874 | rst.append( | |
875 |
_(b"\n(use 'hg help %s' to show the full help |
|
875 | _(b"\n(use 'hg help %s' to show the full help text)\n") | |
876 | % name |
|
876 | % name | |
877 | ) |
|
877 | ) | |
878 | elif name and syns and name in syns.keys(): |
|
878 | elif name and syns and name in syns.keys(): | |
@@ -929,7 +929,7 b' def help_(' | |||||
929 | try: |
|
929 | try: | |
930 | cmdutil.findcmd(name, commands.table) |
|
930 | cmdutil.findcmd(name, commands.table) | |
931 | rst.append( |
|
931 | rst.append( | |
932 |
_(b"\nuse 'hg help -c %s' to see help for |
|
932 | _(b"\nuse 'hg help -c %s' to see help for the %s command\n") | |
933 | % (name, name) |
|
933 | % (name, name) | |
934 | ) |
|
934 | ) | |
935 | except error.UnknownCommand: |
|
935 | except error.UnknownCommand: | |
@@ -985,7 +985,7 b' def help_(' | |||||
985 | doc = doc.splitlines()[0] |
|
985 | doc = doc.splitlines()[0] | |
986 |
|
986 | |||
987 | rst = listexts( |
|
987 | rst = listexts( | |
988 |
_(b"'%s' is provided by the following |
|
988 | _(b"'%s' is provided by the following extension:") % cmd, | |
989 | {ext: doc}, |
|
989 | {ext: doc}, | |
990 | indent=4, |
|
990 | indent=4, | |
991 | showdeprecated=True, |
|
991 | showdeprecated=True, |
@@ -379,7 +379,7 b' def postshare(sourcerepo, destrepo, defa' | |||||
379 | """ |
|
379 | """ | |
380 | default = defaultpath or sourcerepo.ui.config(b'paths', b'default') |
|
380 | default = defaultpath or sourcerepo.ui.config(b'paths', b'default') | |
381 | if default: |
|
381 | if default: | |
382 |
template = b'[paths]\n |
|
382 | template = b'[paths]\ndefault = %s\n' | |
383 | destrepo.vfs.write(b'hgrc', util.tonativeeol(template % default)) |
|
383 | destrepo.vfs.write(b'hgrc', util.tonativeeol(template % default)) | |
384 | if repositorymod.NARROW_REQUIREMENT in sourcerepo.requirements: |
|
384 | if repositorymod.NARROW_REQUIREMENT in sourcerepo.requirements: | |
385 | with destrepo.wlock(): |
|
385 | with destrepo.wlock(): | |
@@ -1182,9 +1182,7 b' def abortmerge(ui, repo):' | |||||
1182 | # there were no conficts, mergestate was not stored |
|
1182 | # there were no conficts, mergestate was not stored | |
1183 | node = repo[b'.'].hex() |
|
1183 | node = repo[b'.'].hex() | |
1184 |
|
1184 | |||
1185 | repo.ui.status( |
|
1185 | repo.ui.status(_(b"aborting the merge, updating back to %s\n") % node[:12]) | |
1186 | _(b"aborting the merge, updating back to" b" %s\n") % node[:12] |
|
|||
1187 | ) |
|
|||
1188 | stats = mergemod.update(repo, node, branchmerge=False, force=True) |
|
1186 | stats = mergemod.update(repo, node, branchmerge=False, force=True) | |
1189 | _showstats(repo, stats) |
|
1187 | _showstats(repo, stats) | |
1190 | return stats.unresolvedcount > 0 |
|
1188 | return stats.unresolvedcount > 0 |
@@ -119,6 +119,6 b' def createapp(baseui, repo, webconf):' | |||||
119 | else: |
|
119 | else: | |
120 | if not repo: |
|
120 | if not repo: | |
121 | raise error.RepoError( |
|
121 | raise error.RepoError( | |
122 |
_(b"there is no Mercurial repository |
|
122 | _(b"there is no Mercurial repository here (.hg not found)") | |
123 | ) |
|
123 | ) | |
124 | return hgweb_mod.hgweb(repo, baseui=baseui) |
|
124 | return hgweb_mod.hgweb(repo, baseui=baseui) |
@@ -516,7 +516,7 b' class wsgiresponse(object):' | |||||
516 |
|
516 | |||
517 | if self._bodygen is not None or self._bodywillwrite: |
|
517 | if self._bodygen is not None or self._bodywillwrite: | |
518 | raise error.ProgrammingError( |
|
518 | raise error.ProgrammingError( | |
519 |
b"must use setbodybytes('') with |
|
519 | b"must use setbodybytes('') with 304 responses" | |
520 | ) |
|
520 | ) | |
521 |
|
521 | |||
522 | # Various HTTP clients (notably httplib) won't read the HTTP response |
|
522 | # Various HTTP clients (notably httplib) won't read the HTTP response |
@@ -242,7 +242,7 b' class _httprequesthandler(httpservermod.' | |||||
242 | def send_headers(self): |
|
242 | def send_headers(self): | |
243 | if not self.saved_status: |
|
243 | if not self.saved_status: | |
244 | raise AssertionError( |
|
244 | raise AssertionError( | |
245 |
b"Sending headers before |
|
245 | b"Sending headers before start_response() called" | |
246 | ) |
|
246 | ) | |
247 | saved_status = self.saved_status.split(None, 1) |
|
247 | saved_status = self.saved_status.split(None, 1) | |
248 | saved_status[0] = int(saved_status[0]) |
|
248 | saved_status[0] = int(saved_status[0]) |
@@ -1295,7 +1295,7 b' def archive(web):' | |||||
1295 | web.res.setbodywillwrite() |
|
1295 | web.res.setbodywillwrite() | |
1296 | if list(web.res.sendresponse()): |
|
1296 | if list(web.res.sendresponse()): | |
1297 | raise error.ProgrammingError( |
|
1297 | raise error.ProgrammingError( | |
1298 |
b'sendresponse() should not emit data |
|
1298 | b'sendresponse() should not emit data if writing later' | |
1299 | ) |
|
1299 | ) | |
1300 |
|
1300 | |||
1301 | bodyfh = web.res.getbodyfile() |
|
1301 | bodyfh = web.res.getbodyfile() |
@@ -117,7 +117,7 b' def pythonhook(ui, repo, htype, hname, f' | |||||
117 | ui.warn(_(b'error: %s hook failed: %s\n') % (hname, exc.args[0])) |
|
117 | ui.warn(_(b'error: %s hook failed: %s\n') % (hname, exc.args[0])) | |
118 | else: |
|
118 | else: | |
119 | ui.warn( |
|
119 | ui.warn( | |
120 |
_(b'error: %s hook raised an exception: |
|
120 | _(b'error: %s hook raised an exception: %s\n') | |
121 | % (hname, stringutil.forcebytestr(exc)) |
|
121 | % (hname, stringutil.forcebytestr(exc)) | |
122 | ) |
|
122 | ) | |
123 | if throw: |
|
123 | if throw: |
@@ -383,14 +383,13 b' def parsev1commandresponse(' | |||||
383 | return respurl, proto, resp |
|
383 | return respurl, proto, resp | |
384 | else: |
|
384 | else: | |
385 | raise error.RepoError( |
|
385 | raise error.RepoError( | |
386 |
_(b'unexpected CBOR response from |
|
386 | _(b'unexpected CBOR response from server') | |
387 | ) |
|
387 | ) | |
388 |
|
388 | |||
389 | version_info = tuple([int(n) for n in subtype.split(b'.')]) |
|
389 | version_info = tuple([int(n) for n in subtype.split(b'.')]) | |
390 | except ValueError: |
|
390 | except ValueError: | |
391 | raise error.RepoError( |
|
391 | raise error.RepoError( | |
392 |
_(b"'%s' sent a broken Content-Type |
|
392 | _(b"'%s' sent a broken Content-Type header (%s)") % (safeurl, proto) | |
393 | % (safeurl, proto) |
|
|||
394 | ) |
|
393 | ) | |
395 |
|
394 | |||
396 | # TODO consider switching to a decompression reader that uses |
|
395 | # TODO consider switching to a decompression reader that uses | |
@@ -685,12 +684,12 b' class httpv2executor(object):' | |||||
685 | def callcommand(self, command, args): |
|
684 | def callcommand(self, command, args): | |
686 | if self._sent: |
|
685 | if self._sent: | |
687 | raise error.ProgrammingError( |
|
686 | raise error.ProgrammingError( | |
688 |
b'callcommand() cannot be used after |
|
687 | b'callcommand() cannot be used after commands are sent' | |
689 | ) |
|
688 | ) | |
690 |
|
689 | |||
691 | if self._closed: |
|
690 | if self._closed: | |
692 | raise error.ProgrammingError( |
|
691 | raise error.ProgrammingError( | |
693 |
b'callcommand() cannot be used after |
|
692 | b'callcommand() cannot be used after close()' | |
694 | ) |
|
693 | ) | |
695 |
|
694 | |||
696 | # The service advertises which commands are available. So if we attempt |
|
695 | # The service advertises which commands are available. So if we attempt | |
@@ -763,7 +762,7 b' class httpv2executor(object):' | |||||
763 |
|
762 | |||
764 | if len(permissions) > 1: |
|
763 | if len(permissions) > 1: | |
765 | raise error.RepoError( |
|
764 | raise error.RepoError( | |
766 |
_(b'cannot make request requiring multiple |
|
765 | _(b'cannot make request requiring multiple permissions: %s') | |
767 | % _(b', ').join(sorted(permissions)) |
|
766 | % _(b', ').join(sorted(permissions)) | |
768 | ) |
|
767 | ) | |
769 |
|
768 | |||
@@ -1101,7 +1100,7 b' def instance(ui, path, create, intents=N' | |||||
1101 | try: |
|
1100 | try: | |
1102 | if path.startswith(b'https:') and not urlmod.has_https: |
|
1101 | if path.startswith(b'https:') and not urlmod.has_https: | |
1103 | raise error.Abort( |
|
1102 | raise error.Abort( | |
1104 |
_(b'Python support for SSL and HTTPS |
|
1103 | _(b'Python support for SSL and HTTPS is not installed') | |
1105 | ) |
|
1104 | ) | |
1106 |
|
1105 | |||
1107 | inst = makepeer(ui, path) |
|
1106 | inst = makepeer(ui, path) |
@@ -296,7 +296,7 b' class KeepAliveHandler(object):' | |||||
296 | # a DIFFERENT exception |
|
296 | # a DIFFERENT exception | |
297 | if DEBUG: |
|
297 | if DEBUG: | |
298 | DEBUG.error( |
|
298 | DEBUG.error( | |
299 |
b"unexpected exception - closing |
|
299 | b"unexpected exception - closing connection to %s (%d)", | |
300 | host, |
|
300 | host, | |
301 | id(h), |
|
301 | id(h), | |
302 | ) |
|
302 | ) |
@@ -223,12 +223,12 b' class localcommandexecutor(object):' | |||||
223 | def callcommand(self, command, args): |
|
223 | def callcommand(self, command, args): | |
224 | if self._sent: |
|
224 | if self._sent: | |
225 | raise error.ProgrammingError( |
|
225 | raise error.ProgrammingError( | |
226 |
b'callcommand() cannot be used after |
|
226 | b'callcommand() cannot be used after sendcommands()' | |
227 | ) |
|
227 | ) | |
228 |
|
228 | |||
229 | if self._closed: |
|
229 | if self._closed: | |
230 | raise error.ProgrammingError( |
|
230 | raise error.ProgrammingError( | |
231 |
b'callcommand() cannot be used after |
|
231 | b'callcommand() cannot be used after close()' | |
232 | ) |
|
232 | ) | |
233 |
|
233 | |||
234 | # We don't need to support anything fancy. Just call the named |
|
234 | # We don't need to support anything fancy. Just call the named | |
@@ -343,9 +343,7 b' class localpeer(repository.peer):' | |||||
343 | return self._repo.pushkey(namespace, key, old, new) |
|
343 | return self._repo.pushkey(namespace, key, old, new) | |
344 |
|
344 | |||
345 | def stream_out(self): |
|
345 | def stream_out(self): | |
346 | raise error.Abort( |
|
346 | raise error.Abort(_(b'cannot perform stream clone against local peer')) | |
347 | _(b'cannot perform stream clone against local ' b'peer') |
|
|||
348 | ) |
|
|||
349 |
|
347 | |||
350 | def unbundle(self, bundle, heads, url): |
|
348 | def unbundle(self, bundle, heads, url): | |
351 | """apply a bundle on a repo |
|
349 | """apply a bundle on a repo | |
@@ -568,7 +566,7 b' def makelocalrepository(baseui, path, in' | |||||
568 |
|
566 | |||
569 | if not sharedvfs.exists(): |
|
567 | if not sharedvfs.exists(): | |
570 | raise error.RepoError( |
|
568 | raise error.RepoError( | |
571 |
_(b'.hg/sharedpath points to nonexistent |
|
569 | _(b'.hg/sharedpath points to nonexistent directory %s') | |
572 | % sharedvfs.base |
|
570 | % sharedvfs.base | |
573 | ) |
|
571 | ) | |
574 |
|
572 | |||
@@ -1453,7 +1451,7 b' class localrepository(object):' | |||||
1453 | if not self._dirstatevalidatewarned: |
|
1451 | if not self._dirstatevalidatewarned: | |
1454 | self._dirstatevalidatewarned = True |
|
1452 | self._dirstatevalidatewarned = True | |
1455 | self.ui.warn( |
|
1453 | self.ui.warn( | |
1456 |
_(b"warning: ignoring unknown |
|
1454 | _(b"warning: ignoring unknown working parent %s!\n") | |
1457 | % short(node) |
|
1455 | % short(node) | |
1458 | ) |
|
1456 | ) | |
1459 | return nullid |
|
1457 | return nullid | |
@@ -2302,7 +2300,7 b' class localrepository(object):' | |||||
2302 | ) % (oldtip, desc, detail) |
|
2300 | ) % (oldtip, desc, detail) | |
2303 | else: |
|
2301 | else: | |
2304 | msg = _( |
|
2302 | msg = _( | |
2305 |
b'repository tip rolled back to revision %d |
|
2303 | b'repository tip rolled back to revision %d (undo %s)\n' | |
2306 | ) % (oldtip, desc) |
|
2304 | ) % (oldtip, desc) | |
2307 | except IOError: |
|
2305 | except IOError: | |
2308 | msg = _(b'rolling back unknown transaction\n') |
|
2306 | msg = _(b'rolling back unknown transaction\n') | |
@@ -2367,8 +2365,7 b' class localrepository(object):' | |||||
2367 | ) |
|
2365 | ) | |
2368 | else: |
|
2366 | else: | |
2369 | ui.status( |
|
2367 | ui.status( | |
2370 |
_(b'working directory now based on |
|
2368 | _(b'working directory now based on revision %d\n') % parents | |
2371 | % parents |
|
|||
2372 | ) |
|
2369 | ) | |
2373 | mergemod.mergestate.clean(self, self[b'.'].node()) |
|
2370 | mergemod.mergestate.clean(self, self[b'.'].node()) | |
2374 |
|
2371 | |||
@@ -3600,7 +3597,7 b' def createrepository(ui, path, createopt' | |||||
3600 |
|
3597 | |||
3601 | if not isinstance(unknownopts, dict): |
|
3598 | if not isinstance(unknownopts, dict): | |
3602 | raise error.ProgrammingError( |
|
3599 | raise error.ProgrammingError( | |
3603 |
b'filterknowncreateopts() did not return |
|
3600 | b'filterknowncreateopts() did not return a dict' | |
3604 | ) |
|
3601 | ) | |
3605 |
|
3602 | |||
3606 | if unknownopts: |
|
3603 | if unknownopts: | |
@@ -3687,7 +3684,7 b' def poisonrepository(repo):' | |||||
3687 | return object.__getattribute__(self, item) |
|
3684 | return object.__getattribute__(self, item) | |
3688 |
|
3685 | |||
3689 | raise error.ProgrammingError( |
|
3686 | raise error.ProgrammingError( | |
3690 |
b'repo instances should not be used |
|
3687 | b'repo instances should not be used after unshare' | |
3691 | ) |
|
3688 | ) | |
3692 |
|
3689 | |||
3693 | def close(self): |
|
3690 | def close(self): |
@@ -925,8 +925,7 b' def getlinerangerevs(repo, userrevs, opt' | |||||
925 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): |
|
925 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): | |
926 | if fname not in wctx: |
|
926 | if fname not in wctx: | |
927 | raise error.Abort( |
|
927 | raise error.Abort( | |
928 |
_(b'cannot follow file not in parent |
|
928 | _(b'cannot follow file not in parent revision: "%s"') % fname | |
929 | % fname |
|
|||
930 | ) |
|
929 | ) | |
931 | fctx = wctx.filectx(fname) |
|
930 | fctx = wctx.filectx(fname) | |
932 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): |
|
931 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): |
@@ -236,8 +236,7 b' def validateconfig(ui):' | |||||
236 | else: |
|
236 | else: | |
237 | if not procutil.findexe(method): |
|
237 | if not procutil.findexe(method): | |
238 | raise error.Abort( |
|
238 | raise error.Abort( | |
239 |
_(b'%r specified as email transport, |
|
239 | _(b'%r specified as email transport, but not in PATH') % method | |
240 | % method |
|
|||
241 | ) |
|
240 | ) | |
242 |
|
241 | |||
243 |
|
242 |
@@ -778,7 +778,7 b' class treemanifest(object):' | |||||
778 |
|
778 | |||
779 | def readsubtree(subdir, subm): |
|
779 | def readsubtree(subdir, subm): | |
780 | raise AssertionError( |
|
780 | raise AssertionError( | |
781 |
b'treemanifest constructor only accepts |
|
781 | b'treemanifest constructor only accepts flat manifests' | |
782 | ) |
|
782 | ) | |
783 |
|
783 | |||
784 | self.parse(text, readsubtree) |
|
784 | self.parse(text, readsubtree) |
@@ -66,7 +66,7 b' def _expandsets(kindpats, ctx=None, list' | |||||
66 | if kind == b'set': |
|
66 | if kind == b'set': | |
67 | if ctx is None: |
|
67 | if ctx is None: | |
68 | raise error.ProgrammingError( |
|
68 | raise error.ProgrammingError( | |
69 |
b"fileset expression with no |
|
69 | b"fileset expression with no context" | |
70 | ) |
|
70 | ) | |
71 | matchers.append(ctx.matchfileset(pat, badfn=badfn)) |
|
71 | matchers.append(ctx.matchfileset(pat, badfn=badfn)) | |
72 |
|
72 | |||
@@ -546,7 +546,7 b' class predicatematcher(basematcher):' | |||||
546 | def normalizerootdir(dir, funcname): |
|
546 | def normalizerootdir(dir, funcname): | |
547 | if dir == b'.': |
|
547 | if dir == b'.': | |
548 | util.nouideprecwarn( |
|
548 | util.nouideprecwarn( | |
549 |
b"match.%s() no longer accepts |
|
549 | b"match.%s() no longer accepts '.', use '' instead." % funcname, | |
550 | b'5.1', |
|
550 | b'5.1', | |
551 | ) |
|
551 | ) | |
552 | return b'' |
|
552 | return b'' |
@@ -83,7 +83,7 b' class diffopts(object):' | |||||
83 | self.context = int(self.context) |
|
83 | self.context = int(self.context) | |
84 | except ValueError: |
|
84 | except ValueError: | |
85 | raise error.Abort( |
|
85 | raise error.Abort( | |
86 |
_(b'diff context lines count must be |
|
86 | _(b'diff context lines count must be an integer, not %r') | |
87 | % pycompat.bytestr(self.context) |
|
87 | % pycompat.bytestr(self.context) | |
88 | ) |
|
88 | ) | |
89 |
|
89 |
@@ -760,7 +760,7 b' def _getcheckunknownconfig(repo, section' | |||||
760 | if config not in valid: |
|
760 | if config not in valid: | |
761 | validstr = b', '.join([b"'" + v + b"'" for v in valid]) |
|
761 | validstr = b', '.join([b"'" + v + b"'" for v in valid]) | |
762 | raise error.ConfigError( |
|
762 | raise error.ConfigError( | |
763 |
_(b"%s.%s not valid |
|
763 | _(b"%s.%s not valid ('%s' is none of %s)") | |
764 | % (section, name, config, validstr) |
|
764 | % (section, name, config, validstr) | |
765 | ) |
|
765 | ) | |
766 | return config |
|
766 | return config | |
@@ -1048,7 +1048,7 b' def _checkcollision(repo, wmf, actions):' | |||||
1048 | if fold.startswith(foldprefix) and not f.startswith(unfoldprefix): |
|
1048 | if fold.startswith(foldprefix) and not f.startswith(unfoldprefix): | |
1049 | # the folded prefix matches but actual casing is different |
|
1049 | # the folded prefix matches but actual casing is different | |
1050 | raise error.Abort( |
|
1050 | raise error.Abort( | |
1051 |
_(b"case-folding collision between |
|
1051 | _(b"case-folding collision between %s and directory of %s") | |
1052 | % (lastfull, f) |
|
1052 | % (lastfull, f) | |
1053 | ) |
|
1053 | ) | |
1054 | foldprefix = fold + b'/' |
|
1054 | foldprefix = fold + b'/' | |
@@ -1225,11 +1225,11 b' def _filternarrowactions(narrowmatch, br' | |||||
1225 | b'which is not yet supported' |
|
1225 | b'which is not yet supported' | |
1226 | ) |
|
1226 | ) | |
1227 | % f, |
|
1227 | % f, | |
1228 |
hint=_(b'merging in the other direction |
|
1228 | hint=_(b'merging in the other direction may work'), | |
1229 | ) |
|
1229 | ) | |
1230 | else: |
|
1230 | else: | |
1231 | raise error.Abort( |
|
1231 | raise error.Abort( | |
1232 |
_(b'conflict in file \'%s\' is outside |
|
1232 | _(b'conflict in file \'%s\' is outside narrow clone') % f | |
1233 | ) |
|
1233 | ) | |
1234 |
|
1234 | |||
1235 |
|
1235 | |||
@@ -1992,7 +1992,7 b' def applyupdates(' | |||||
1992 | if usemergedriver: |
|
1992 | if usemergedriver: | |
1993 | if wctx.isinmemory(): |
|
1993 | if wctx.isinmemory(): | |
1994 | raise error.InMemoryMergeConflictsError( |
|
1994 | raise error.InMemoryMergeConflictsError( | |
1995 |
b"in-memory merge does not |
|
1995 | b"in-memory merge does not support mergedriver" | |
1996 | ) |
|
1996 | ) | |
1997 | ms.commit() |
|
1997 | ms.commit() | |
1998 | proceed = driverpreprocess(repo, ms, wctx, labels=labels) |
|
1998 | proceed = driverpreprocess(repo, ms, wctx, labels=labels) | |
@@ -2334,7 +2334,7 b' def update(' | |||||
2334 | if not mergeancestor and wc.branch() == p2.branch(): |
|
2334 | if not mergeancestor and wc.branch() == p2.branch(): | |
2335 | raise error.Abort( |
|
2335 | raise error.Abort( | |
2336 | _(b"nothing to merge"), |
|
2336 | _(b"nothing to merge"), | |
2337 |
hint=_(b"use 'hg update' |
|
2337 | hint=_(b"use 'hg update' or check 'hg heads'"), | |
2338 | ) |
|
2338 | ) | |
2339 | if not force and (wc.files() or wc.deleted()): |
|
2339 | if not force and (wc.files() or wc.deleted()): | |
2340 | raise error.Abort( |
|
2340 | raise error.Abort( |
@@ -15,7 +15,7 b' from . import error' | |||||
15 | def checkunresolved(ms): |
|
15 | def checkunresolved(ms): | |
16 | if list(ms.unresolved()): |
|
16 | if list(ms.unresolved()): | |
17 | raise error.Abort( |
|
17 | raise error.Abort( | |
18 |
_(b"unresolved merge conflicts |
|
18 | _(b"unresolved merge conflicts (see 'hg help resolve')") | |
19 | ) |
|
19 | ) | |
20 | if ms.mdstate() != b's' or list(ms.driverresolved()): |
|
20 | if ms.mdstate() != b's' or list(ms.driverresolved()): | |
21 | raise error.Abort( |
|
21 | raise error.Abort( |
@@ -110,7 +110,7 b' def validatepatterns(pats):' | |||||
110 | """ |
|
110 | """ | |
111 | if not isinstance(pats, set): |
|
111 | if not isinstance(pats, set): | |
112 | raise error.ProgrammingError( |
|
112 | raise error.ProgrammingError( | |
113 |
b'narrow patterns should be a set; |
|
113 | b'narrow patterns should be a set; got %r' % pats | |
114 | ) |
|
114 | ) | |
115 |
|
115 | |||
116 | for pat in pats: |
|
116 | for pat in pats: |
@@ -667,7 +667,7 b' class obsstore(object):' | |||||
667 | Return the number of new marker.""" |
|
667 | Return the number of new marker.""" | |
668 | if self._readonly: |
|
668 | if self._readonly: | |
669 | raise error.Abort( |
|
669 | raise error.Abort( | |
670 |
_(b'creating obsolete markers is not enabled on |
|
670 | _(b'creating obsolete markers is not enabled on this repo') | |
671 | ) |
|
671 | ) | |
672 | known = set() |
|
672 | known = set() | |
673 | getsuccessors = self.successors.get |
|
673 | getsuccessors = self.successors.get |
@@ -929,7 +929,7 b' filteredmsgtable = {' | |||||
929 | b"superseded": _(b"hidden revision '%s' was rewritten as: %s"), |
|
929 | b"superseded": _(b"hidden revision '%s' was rewritten as: %s"), | |
930 | b"superseded_split": _(b"hidden revision '%s' was split as: %s"), |
|
930 | b"superseded_split": _(b"hidden revision '%s' was split as: %s"), | |
931 | b"superseded_split_several": _( |
|
931 | b"superseded_split_several": _( | |
932 |
b"hidden revision '%s' was split as: %s and |
|
932 | b"hidden revision '%s' was split as: %s and %d more" | |
933 | ), |
|
933 | ), | |
934 | } |
|
934 | } | |
935 |
|
935 |
@@ -163,12 +163,12 b' def buildargsdict(trees, funcname, argsp' | |||||
163 | ) |
|
163 | ) | |
164 | if kwstart < len(poskeys): |
|
164 | if kwstart < len(poskeys): | |
165 | raise error.ParseError( |
|
165 | raise error.ParseError( | |
166 |
_(b"%(func)s takes at least %(nargs)d positional |
|
166 | _(b"%(func)s takes at least %(nargs)d positional arguments") | |
167 | % {b'func': funcname, b'nargs': len(poskeys)} |
|
167 | % {b'func': funcname, b'nargs': len(poskeys)} | |
168 | ) |
|
168 | ) | |
169 | if not varkey and kwstart > len(poskeys) + len(keys): |
|
169 | if not varkey and kwstart > len(poskeys) + len(keys): | |
170 | raise error.ParseError( |
|
170 | raise error.ParseError( | |
171 |
_(b"%(func)s takes at most %(nargs)d positional |
|
171 | _(b"%(func)s takes at most %(nargs)d positional arguments") | |
172 | % {b'func': funcname, b'nargs': len(poskeys) + len(keys)} |
|
172 | % {b'func': funcname, b'nargs': len(poskeys) + len(keys)} | |
173 | ) |
|
173 | ) | |
174 | args = util.sortdict() |
|
174 | args = util.sortdict() | |
@@ -193,7 +193,7 b' def buildargsdict(trees, funcname, argsp' | |||||
193 | d = args |
|
193 | d = args | |
194 | elif not optkey: |
|
194 | elif not optkey: | |
195 | raise error.ParseError( |
|
195 | raise error.ParseError( | |
196 |
_(b"%(func)s got an unexpected keyword |
|
196 | _(b"%(func)s got an unexpected keyword argument '%(key)s'") | |
197 | % {b'func': funcname, b'key': k} |
|
197 | % {b'func': funcname, b'key': k} | |
198 | ) |
|
198 | ) | |
199 | else: |
|
199 | else: | |
@@ -713,7 +713,7 b' class basealiasrules(object):' | |||||
713 | raise error.Abort(a.error) |
|
713 | raise error.Abort(a.error) | |
714 | if a in expanding: |
|
714 | if a in expanding: | |
715 | raise error.ParseError( |
|
715 | raise error.ParseError( | |
716 |
_(b'infinite expansion of %(section)s |
|
716 | _(b'infinite expansion of %(section)s "%(name)s" detected') | |
717 | % {b'section': cls._section, b'name': a.name} |
|
717 | % {b'section': cls._section, b'name': a.name} | |
718 | ) |
|
718 | ) | |
719 | # get cacheable replacement tree by expanding aliases recursively |
|
719 | # get cacheable replacement tree by expanding aliases recursively |
@@ -51,7 +51,7 b' stringio = util.stringio' | |||||
51 | gitre = re.compile(br'diff --git a/(.*) b/(.*)') |
|
51 | gitre = re.compile(br'diff --git a/(.*) b/(.*)') | |
52 | tabsplitter = re.compile(br'(\t+|[^\t]+)') |
|
52 | tabsplitter = re.compile(br'(\t+|[^\t]+)') | |
53 | wordsplitter = re.compile( |
|
53 | wordsplitter = re.compile( | |
54 |
br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+| |
|
54 | br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])' | |
55 | ) |
|
55 | ) | |
56 |
|
56 | |||
57 | PatchError = error.PatchError |
|
57 | PatchError = error.PatchError | |
@@ -805,7 +805,7 b' class patchfile(object):' | |||||
805 | if self.exists and self.create: |
|
805 | if self.exists and self.create: | |
806 | if self.copysource: |
|
806 | if self.copysource: | |
807 | self.ui.warn( |
|
807 | self.ui.warn( | |
808 |
_(b"cannot create %s: destination already |
|
808 | _(b"cannot create %s: destination already exists\n") | |
809 | % self.fname |
|
809 | % self.fname | |
810 | ) |
|
810 | ) | |
811 | else: |
|
811 | else: | |
@@ -3191,7 +3191,7 b' def diffstat(lines, width=80):' | |||||
3191 |
|
3191 | |||
3192 | if stats: |
|
3192 | if stats: | |
3193 | output.append( |
|
3193 | output.append( | |
3194 |
_(b' %d files changed, %d insertions(+), |
|
3194 | _(b' %d files changed, %d insertions(+), %d deletions(-)\n') | |
3195 | % (len(stats), totaladds, totalremoves) |
|
3195 | % (len(stats), totaladds, totalremoves) | |
3196 | ) |
|
3196 | ) | |
3197 |
|
3197 |
@@ -43,9 +43,7 b' def lsprofile(ui, fp):' | |||||
43 | climit = ui.configint(b'profiling', b'nested') |
|
43 | climit = ui.configint(b'profiling', b'nested') | |
44 |
|
44 | |||
45 | if format not in [b'text', b'kcachegrind']: |
|
45 | if format not in [b'text', b'kcachegrind']: | |
46 | ui.warn( |
|
46 | ui.warn(_(b"unrecognized profiling format '%s' - Ignored\n") % format) | |
47 | _(b"unrecognized profiling format '%s'" b" - Ignored\n") % format |
|
|||
48 | ) |
|
|||
49 | format = b'text' |
|
47 | format = b'text' | |
50 |
|
48 | |||
51 | try: |
|
49 | try: |
@@ -361,7 +361,7 b' else:' | |||||
361 | setattr = setattr |
|
361 | setattr = setattr | |
362 |
|
362 | |||
363 | # this can't be parsed on Python 3 |
|
363 | # this can't be parsed on Python 3 | |
364 |
exec(b'def raisewithtb(exc, tb):\n |
|
364 | exec(b'def raisewithtb(exc, tb):\n raise exc, None, tb\n') | |
365 |
|
365 | |||
366 | def fsencode(filename): |
|
366 | def fsencode(filename): | |
367 | """ |
|
367 | """ |
@@ -359,7 +359,7 b' def safestriproots(ui, repo, nodes):' | |||||
359 | if notstrip: |
|
359 | if notstrip: | |
360 | nodestr = b', '.join(sorted(short(repo[n].node()) for n in notstrip)) |
|
360 | nodestr = b', '.join(sorted(short(repo[n].node()) for n in notstrip)) | |
361 | ui.warn( |
|
361 | ui.warn( | |
362 |
_(b'warning: orphaned descendants detected, |
|
362 | _(b'warning: orphaned descendants detected, not stripping %s\n') | |
363 | % nodestr |
|
363 | % nodestr | |
364 | ) |
|
364 | ) | |
365 | return [c.node() for c in repo.set(b'roots(%ld)', tostrip)] |
|
365 | return [c.node() for c in repo.set(b'roots(%ld)', tostrip)] |
@@ -70,7 +70,7 b' class repoloader(object):' | |||||
70 | """ |
|
70 | """ | |
71 | if self._thread and self._thread.is_alive(): |
|
71 | if self._thread and self._thread.is_alive(): | |
72 | raise error.ProgrammingError( |
|
72 | raise error.ProgrammingError( | |
73 |
b'cannot obtain cached repo while |
|
73 | b'cannot obtain cached repo while loader is active' | |
74 | ) |
|
74 | ) | |
75 | return self._cache.peek(path, None) |
|
75 | return self._cache.peek(path, None) | |
76 |
|
76 |
@@ -244,7 +244,7 b' class revlogoldio(object):' | |||||
244 | def packentry(self, entry, node, version, rev): |
|
244 | def packentry(self, entry, node, version, rev): | |
245 | if gettype(entry[0]): |
|
245 | if gettype(entry[0]): | |
246 | raise error.RevlogError( |
|
246 | raise error.RevlogError( | |
247 |
_(b'index entry flags need revlog |
|
247 | _(b'index entry flags need revlog version 1') | |
248 | ) |
|
248 | ) | |
249 | e2 = ( |
|
249 | e2 = ( | |
250 | getoffset(entry[0]), |
|
250 | getoffset(entry[0]), | |
@@ -451,12 +451,12 b' class revlog(object):' | |||||
451 |
|
451 | |||
452 | if self._chunkcachesize <= 0: |
|
452 | if self._chunkcachesize <= 0: | |
453 | raise error.RevlogError( |
|
453 | raise error.RevlogError( | |
454 |
_(b'revlog chunk cache size %r is not |
|
454 | _(b'revlog chunk cache size %r is not greater than 0') | |
455 | % self._chunkcachesize |
|
455 | % self._chunkcachesize | |
456 | ) |
|
456 | ) | |
457 | elif self._chunkcachesize & (self._chunkcachesize - 1): |
|
457 | elif self._chunkcachesize & (self._chunkcachesize - 1): | |
458 | raise error.RevlogError( |
|
458 | raise error.RevlogError( | |
459 |
_(b'revlog chunk cache size %r is not a |
|
459 | _(b'revlog chunk cache size %r is not a power of 2') | |
460 | % self._chunkcachesize |
|
460 | % self._chunkcachesize | |
461 | ) |
|
461 | ) | |
462 |
|
462 | |||
@@ -492,7 +492,7 b' class revlog(object):' | |||||
492 | if fmt == REVLOGV0: |
|
492 | if fmt == REVLOGV0: | |
493 | if flags: |
|
493 | if flags: | |
494 | raise error.RevlogError( |
|
494 | raise error.RevlogError( | |
495 |
_(b'unknown flags (%#04x) in version %d |
|
495 | _(b'unknown flags (%#04x) in version %d revlog %s') | |
496 | % (flags >> 16, fmt, self.indexfile) |
|
496 | % (flags >> 16, fmt, self.indexfile) | |
497 | ) |
|
497 | ) | |
498 |
|
498 | |||
@@ -502,7 +502,7 b' class revlog(object):' | |||||
502 | elif fmt == REVLOGV1: |
|
502 | elif fmt == REVLOGV1: | |
503 | if flags & ~REVLOGV1_FLAGS: |
|
503 | if flags & ~REVLOGV1_FLAGS: | |
504 | raise error.RevlogError( |
|
504 | raise error.RevlogError( | |
505 |
_(b'unknown flags (%#04x) in version %d |
|
505 | _(b'unknown flags (%#04x) in version %d revlog %s') | |
506 | % (flags >> 16, fmt, self.indexfile) |
|
506 | % (flags >> 16, fmt, self.indexfile) | |
507 | ) |
|
507 | ) | |
508 |
|
508 | |||
@@ -512,7 +512,7 b' class revlog(object):' | |||||
512 | elif fmt == REVLOGV2: |
|
512 | elif fmt == REVLOGV2: | |
513 | if flags & ~REVLOGV2_FLAGS: |
|
513 | if flags & ~REVLOGV2_FLAGS: | |
514 | raise error.RevlogError( |
|
514 | raise error.RevlogError( | |
515 |
_(b'unknown flags (%#04x) in version %d |
|
515 | _(b'unknown flags (%#04x) in version %d revlog %s') | |
516 | % (flags >> 16, fmt, self.indexfile) |
|
516 | % (flags >> 16, fmt, self.indexfile) | |
517 | ) |
|
517 | ) | |
518 |
|
518 | |||
@@ -2707,7 +2707,7 b' class revlog(object):' | |||||
2707 |
|
2707 | |||
2708 | if len(tombstone) > self.rawsize(censorrev): |
|
2708 | if len(tombstone) > self.rawsize(censorrev): | |
2709 | raise error.Abort( |
|
2709 | raise error.Abort( | |
2710 |
_(b'censor tombstone must be no longer than |
|
2710 | _(b'censor tombstone must be no longer than censored data') | |
2711 | ) |
|
2711 | ) | |
2712 |
|
2712 | |||
2713 | # Rewriting the revlog in place is hard. Our strategy for censoring is |
|
2713 | # Rewriting the revlog in place is hard. Our strategy for censoring is |
@@ -980,7 +980,7 b' def expectsize(repo, subset, x, order):' | |||||
980 | raise error.ParseError(_(b'invalid set of arguments')) |
|
980 | raise error.ParseError(_(b'invalid set of arguments')) | |
981 | minsize, maxsize = getintrange( |
|
981 | minsize, maxsize = getintrange( | |
982 | args[b'size'], |
|
982 | args[b'size'], | |
983 |
_(b'expectsize requires a size range |
|
983 | _(b'expectsize requires a size range or a positive integer'), | |
984 | _(b'size range bounds must be integers'), |
|
984 | _(b'size range bounds must be integers'), | |
985 | minsize, |
|
985 | minsize, | |
986 | maxsize, |
|
986 | maxsize, | |
@@ -989,11 +989,13 b' def expectsize(repo, subset, x, order):' | |||||
989 | raise error.ParseError(_(b'negative size')) |
|
989 | raise error.ParseError(_(b'negative size')) | |
990 | rev = getset(repo, fullreposet(repo), args[b'set'], order=order) |
|
990 | rev = getset(repo, fullreposet(repo), args[b'set'], order=order) | |
991 | if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize): |
|
991 | if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize): | |
992 | err = _( |
|
992 | err = _(b'revset size mismatch. expected between %d and %d, got %d') % ( | |
993 | b'revset size mismatch.' b' expected between %d and %d, got %d' |
|
993 | minsize, | |
994 |
|
|
994 | maxsize, | |
|
995 | len(rev), | |||
|
996 | ) | |||
995 | elif minsize == maxsize and len(rev) != minsize: |
|
997 | elif minsize == maxsize and len(rev) != minsize: | |
996 |
err = _(b'revset size mismatch. |
|
998 | err = _(b'revset size mismatch. expected %d, got %d') % ( | |
997 | minsize, |
|
999 | minsize, | |
998 | len(rev), |
|
1000 | len(rev), | |
999 | ) |
|
1001 | ) | |
@@ -1043,14 +1045,14 b' def extra(repo, subset, x):' | |||||
1043 | raise error.ParseError(_(b'extra takes at least 1 argument')) |
|
1045 | raise error.ParseError(_(b'extra takes at least 1 argument')) | |
1044 | # i18n: "extra" is a keyword |
|
1046 | # i18n: "extra" is a keyword | |
1045 | label = getstring( |
|
1047 | label = getstring( | |
1046 |
args[b'label'], _(b'first argument to extra must be |
|
1048 | args[b'label'], _(b'first argument to extra must be a string') | |
1047 | ) |
|
1049 | ) | |
1048 | value = None |
|
1050 | value = None | |
1049 |
|
1051 | |||
1050 | if b'value' in args: |
|
1052 | if b'value' in args: | |
1051 | # i18n: "extra" is a keyword |
|
1053 | # i18n: "extra" is a keyword | |
1052 | value = getstring( |
|
1054 | value = getstring( | |
1053 |
args[b'value'], _(b'second argument to extra must be |
|
1055 | args[b'value'], _(b'second argument to extra must be a string') | |
1054 | ) |
|
1056 | ) | |
1055 | kind, value, matcher = stringutil.stringmatcher(value) |
|
1057 | kind, value, matcher = stringutil.stringmatcher(value) | |
1056 |
|
1058 | |||
@@ -1314,7 +1316,7 b' def _matchfiles(repo, subset, x):' | |||||
1314 | elif prefix == b'r:': |
|
1316 | elif prefix == b'r:': | |
1315 | if rev is not None: |
|
1317 | if rev is not None: | |
1316 | raise error.ParseError( |
|
1318 | raise error.ParseError( | |
1317 |
b'_matchfiles expected at most one |
|
1319 | b'_matchfiles expected at most one revision' | |
1318 | ) |
|
1320 | ) | |
1319 | if value == b'': # empty means working directory |
|
1321 | if value == b'': # empty means working directory | |
1320 | rev = node.wdirrev |
|
1322 | rev = node.wdirrev | |
@@ -1323,7 +1325,7 b' def _matchfiles(repo, subset, x):' | |||||
1323 | elif prefix == b'd:': |
|
1325 | elif prefix == b'd:': | |
1324 | if default is not None: |
|
1326 | if default is not None: | |
1325 | raise error.ParseError( |
|
1327 | raise error.ParseError( | |
1326 |
b'_matchfiles expected at most one |
|
1328 | b'_matchfiles expected at most one default mode' | |
1327 | ) |
|
1329 | ) | |
1328 | default = value |
|
1330 | default = value | |
1329 | else: |
|
1331 | else: | |
@@ -2127,7 +2129,7 b' def matching(repo, subset, x):' | |||||
2127 | fieldlist = getstring( |
|
2129 | fieldlist = getstring( | |
2128 | l[1], |
|
2130 | l[1], | |
2129 | # i18n: "matching" is a keyword |
|
2131 | # i18n: "matching" is a keyword | |
2130 |
_(b"matching requires a string |
|
2132 | _(b"matching requires a string as its second argument"), | |
2131 | ).split() |
|
2133 | ).split() | |
2132 |
|
2134 | |||
2133 | # Make sure that there are no repeated fields, |
|
2135 | # Make sure that there are no repeated fields, | |
@@ -2284,7 +2286,7 b' def _getsortargs(x):' | |||||
2284 | if len(keyflags) > 1 and any(k == b'topo' for k, reverse in keyflags): |
|
2286 | if len(keyflags) > 1 and any(k == b'topo' for k, reverse in keyflags): | |
2285 | # i18n: "topo" is a keyword |
|
2287 | # i18n: "topo" is a keyword | |
2286 | raise error.ParseError( |
|
2288 | raise error.ParseError( | |
2287 |
_(b'topo sort order cannot be combined |
|
2289 | _(b'topo sort order cannot be combined with other sort keys') | |
2288 | ) |
|
2290 | ) | |
2289 |
|
2291 | |||
2290 | opts = {} |
|
2292 | opts = {} |
@@ -456,7 +456,7 b' def _nothingtoshelvemessaging(ui, repo, ' | |||||
456 | stat = repo.status(match=scmutil.match(repo[None], pats, opts)) |
|
456 | stat = repo.status(match=scmutil.match(repo[None], pats, opts)) | |
457 | if stat.deleted: |
|
457 | if stat.deleted: | |
458 | ui.status( |
|
458 | ui.status( | |
459 |
_(b"nothing changed (%d missing files, see |
|
459 | _(b"nothing changed (%d missing files, see 'hg status')\n") | |
460 | % len(stat.deleted) |
|
460 | % len(stat.deleted) | |
461 | ) |
|
461 | ) | |
462 | else: |
|
462 | else: | |
@@ -707,7 +707,7 b' def checkparents(repo, state):' | |||||
707 | """check parent while resuming an unshelve""" |
|
707 | """check parent while resuming an unshelve""" | |
708 | if state.parents != repo.dirstate.parents(): |
|
708 | if state.parents != repo.dirstate.parents(): | |
709 | raise error.Abort( |
|
709 | raise error.Abort( | |
710 |
_(b'working directory parents do not match unshelve |
|
710 | _(b'working directory parents do not match unshelve state') | |
711 | ) |
|
711 | ) | |
712 |
|
712 | |||
713 |
|
713 |
@@ -121,7 +121,7 b' def patternsforrev(repo, rev):' | |||||
121 |
|
121 | |||
122 | if rev is None: |
|
122 | if rev is None: | |
123 | raise error.Abort( |
|
123 | raise error.Abort( | |
124 |
_(b'cannot parse sparse patterns from working |
|
124 | _(b'cannot parse sparse patterns from working directory') | |
125 | ) |
|
125 | ) | |
126 |
|
126 | |||
127 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') |
|
127 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') | |
@@ -483,7 +483,7 b' def refreshwdir(repo, origstatus, origsp' | |||||
483 |
|
483 | |||
484 | if abort: |
|
484 | if abort: | |
485 | raise error.Abort( |
|
485 | raise error.Abort( | |
486 |
_(b'could not update sparseness due to pending |
|
486 | _(b'could not update sparseness due to pending changes') | |
487 | ) |
|
487 | ) | |
488 |
|
488 | |||
489 | # Calculate actions |
|
489 | # Calculate actions |
@@ -210,7 +210,7 b' def _hostsettings(ui, hostname):' | |||||
210 | if not (fingerprint.startswith((b'sha1:', b'sha256:', b'sha512:'))): |
|
210 | if not (fingerprint.startswith((b'sha1:', b'sha256:', b'sha512:'))): | |
211 | raise error.Abort( |
|
211 | raise error.Abort( | |
212 | _(b'invalid fingerprint for %s: %s') % (bhostname, fingerprint), |
|
212 | _(b'invalid fingerprint for %s: %s') % (bhostname, fingerprint), | |
213 |
hint=_(b'must begin with "sha1:", "sha256:", |
|
213 | hint=_(b'must begin with "sha1:", "sha256:", or "sha512:"'), | |
214 | ) |
|
214 | ) | |
215 |
|
215 | |||
216 | alg, fingerprint = fingerprint.split(b':', 1) |
|
216 | alg, fingerprint = fingerprint.split(b':', 1) | |
@@ -328,7 +328,7 b' def protocolsettings(protocol):' | |||||
328 | if supportedprotocols == {b'tls1.0'}: |
|
328 | if supportedprotocols == {b'tls1.0'}: | |
329 | if protocol != b'tls1.0': |
|
329 | if protocol != b'tls1.0': | |
330 | raise error.Abort( |
|
330 | raise error.Abort( | |
331 |
_(b'current Python does not support protocol |
|
331 | _(b'current Python does not support protocol setting %s') | |
332 | % protocol, |
|
332 | % protocol, | |
333 | hint=_( |
|
333 | hint=_( | |
334 | b'upgrade Python or disable setting since ' |
|
334 | b'upgrade Python or disable setting since ' | |
@@ -616,7 +616,7 b' def wrapserversocket(' | |||||
616 | for f in (certfile, keyfile, cafile): |
|
616 | for f in (certfile, keyfile, cafile): | |
617 | if f and not os.path.exists(f): |
|
617 | if f and not os.path.exists(f): | |
618 | raise error.Abort( |
|
618 | raise error.Abort( | |
619 |
_(b'referenced certificate file (%s) does not |
|
619 | _(b'referenced certificate file (%s) does not exist') % f | |
620 | ) |
|
620 | ) | |
621 |
|
621 | |||
622 | protocol, options, _protocolui = protocolsettings(b'tls1.0') |
|
622 | protocol, options, _protocolui = protocolsettings(b'tls1.0') | |
@@ -928,7 +928,7 b' def validatesocket(sock):' | |||||
928 |
|
928 | |||
929 | if not peercert: |
|
929 | if not peercert: | |
930 | raise error.Abort( |
|
930 | raise error.Abort( | |
931 |
_(b'%s certificate error: |
|
931 | _(b'%s certificate error: no certificate received') % host | |
932 | ) |
|
932 | ) | |
933 |
|
933 | |||
934 | if settings[b'disablecertverification']: |
|
934 | if settings[b'disablecertverification']: | |
@@ -990,7 +990,7 b' def validatesocket(sock):' | |||||
990 | section = b'hostsecurity' |
|
990 | section = b'hostsecurity' | |
991 | nice = b'%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash])) |
|
991 | nice = b'%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash])) | |
992 | raise error.Abort( |
|
992 | raise error.Abort( | |
993 |
_(b'certificate for %s has unexpected |
|
993 | _(b'certificate for %s has unexpected fingerprint %s') | |
994 | % (host, nice), |
|
994 | % (host, nice), | |
995 | hint=_(b'check %s configuration') % section, |
|
995 | hint=_(b'check %s configuration') % section, | |
996 | ) |
|
996 | ) |
@@ -60,7 +60,7 b' class cmdstate(object):' | |||||
60 | """ |
|
60 | """ | |
61 | if not isinstance(version, int): |
|
61 | if not isinstance(version, int): | |
62 | raise error.ProgrammingError( |
|
62 | raise error.ProgrammingError( | |
63 |
b"version of state file should be |
|
63 | b"version of state file should be an integer" | |
64 | ) |
|
64 | ) | |
65 |
|
65 | |||
66 | with self._repo.vfs(self.fname, b'wb', atomictemp=True) as fp: |
|
66 | with self._repo.vfs(self.fname, b'wb', atomictemp=True) as fp: | |
@@ -76,7 +76,7 b' class cmdstate(object):' | |||||
76 | int(fp.readline()) |
|
76 | int(fp.readline()) | |
77 | except ValueError: |
|
77 | except ValueError: | |
78 | raise error.CorruptedState( |
|
78 | raise error.CorruptedState( | |
79 |
b"unknown version of state file |
|
79 | b"unknown version of state file found" | |
80 | ) |
|
80 | ) | |
81 |
|
81 | |||
82 | return cborutil.decodeall(fp.read())[0] |
|
82 | return cborutil.decodeall(fp.read())[0] |
@@ -468,14 +468,14 b' def applybundlev1(repo, fp):' | |||||
468 | """ |
|
468 | """ | |
469 | if len(repo): |
|
469 | if len(repo): | |
470 | raise error.Abort( |
|
470 | raise error.Abort( | |
471 |
_(b'cannot apply stream clone bundle on non-empty |
|
471 | _(b'cannot apply stream clone bundle on non-empty repo') | |
472 | ) |
|
472 | ) | |
473 |
|
473 | |||
474 | filecount, bytecount, requirements = readbundle1header(fp) |
|
474 | filecount, bytecount, requirements = readbundle1header(fp) | |
475 | missingreqs = requirements - repo.supportedformats |
|
475 | missingreqs = requirements - repo.supportedformats | |
476 | if missingreqs: |
|
476 | if missingreqs: | |
477 | raise error.Abort( |
|
477 | raise error.Abort( | |
478 |
_(b'unable to apply stream clone: |
|
478 | _(b'unable to apply stream clone: unsupported format: %s') | |
479 | % b', '.join(sorted(missingreqs)) |
|
479 | % b', '.join(sorted(missingreqs)) | |
480 | ) |
|
480 | ) | |
481 |
|
481 | |||
@@ -715,7 +715,7 b' def applybundlev2(repo, fp, filecount, f' | |||||
715 | missingreqs = [r for r in requirements if r not in repo.supported] |
|
715 | missingreqs = [r for r in requirements if r not in repo.supported] | |
716 | if missingreqs: |
|
716 | if missingreqs: | |
717 | raise error.Abort( |
|
717 | raise error.Abort( | |
718 |
_(b'unable to apply stream clone: |
|
718 | _(b'unable to apply stream clone: unsupported format: %s') | |
719 | % b', '.join(sorted(missingreqs)) |
|
719 | % b', '.join(sorted(missingreqs)) | |
720 | ) |
|
720 | ) | |
721 |
|
721 |
@@ -1232,7 +1232,7 b' class svnsubrepo(abstractsubrepo):' | |||||
1232 | def remove(self): |
|
1232 | def remove(self): | |
1233 | if self.dirty(): |
|
1233 | if self.dirty(): | |
1234 | self.ui.warn( |
|
1234 | self.ui.warn( | |
1235 |
_(b'not removing repo %s because |
|
1235 | _(b'not removing repo %s because it has changes.\n') | |
1236 | % self._path |
|
1236 | % self._path | |
1237 | ) |
|
1237 | ) | |
1238 | return |
|
1238 | return | |
@@ -1572,7 +1572,7 b' class gitsubrepo(abstractsubrepo):' | |||||
1572 | self._gitcommand([b'fetch']) |
|
1572 | self._gitcommand([b'fetch']) | |
1573 | if not self._githavelocally(revision): |
|
1573 | if not self._githavelocally(revision): | |
1574 | raise error.Abort( |
|
1574 | raise error.Abort( | |
1575 |
_(b'revision %s does not exist in subrepository |
|
1575 | _(b'revision %s does not exist in subrepository "%s"\n') | |
1576 | % (revision, self._relpath) |
|
1576 | % (revision, self._relpath) | |
1577 | ) |
|
1577 | ) | |
1578 |
|
1578 | |||
@@ -1630,11 +1630,11 b' class gitsubrepo(abstractsubrepo):' | |||||
1630 | def rawcheckout(): |
|
1630 | def rawcheckout(): | |
1631 | # no branch to checkout, check it out with no branch |
|
1631 | # no branch to checkout, check it out with no branch | |
1632 | self.ui.warn( |
|
1632 | self.ui.warn( | |
1633 |
_(b'checking out detached HEAD in |
|
1633 | _(b'checking out detached HEAD in subrepository "%s"\n') | |
1634 | % self._relpath |
|
1634 | % self._relpath | |
1635 | ) |
|
1635 | ) | |
1636 | self.ui.warn( |
|
1636 | self.ui.warn( | |
1637 |
_(b'check out a git branch if you intend |
|
1637 | _(b'check out a git branch if you intend to make changes\n') | |
1638 | ) |
|
1638 | ) | |
1639 | checkout([b'-q', revision]) |
|
1639 | checkout([b'-q', revision]) | |
1640 |
|
1640 | |||
@@ -1822,7 +1822,7 b' class gitsubrepo(abstractsubrepo):' | |||||
1822 | return |
|
1822 | return | |
1823 | if self.dirty(): |
|
1823 | if self.dirty(): | |
1824 | self.ui.warn( |
|
1824 | self.ui.warn( | |
1825 |
_(b'not removing repo %s because |
|
1825 | _(b'not removing repo %s because it has changes.\n') | |
1826 | % self._relpath |
|
1826 | % self._relpath | |
1827 | ) |
|
1827 | ) | |
1828 | return |
|
1828 | return |
@@ -593,7 +593,7 b' def _tag(' | |||||
593 | repo.hook(b'pretag', throw=True, node=hex(node), tag=name, local=local) |
|
593 | repo.hook(b'pretag', throw=True, node=hex(node), tag=name, local=local) | |
594 | if name in branches: |
|
594 | if name in branches: | |
595 | repo.ui.warn( |
|
595 | repo.ui.warn( | |
596 |
_(b"warning: tag %s conflicts with existing |
|
596 | _(b"warning: tag %s conflicts with existing branch name\n") | |
597 | % name |
|
597 | % name | |
598 | ) |
|
598 | ) | |
599 |
|
599 |
@@ -409,7 +409,7 b' class compressionengine(formatvariant):' | |||||
409 | ) |
|
409 | ) | |
410 |
|
410 | |||
411 | upgrademessage = _( |
|
411 | upgrademessage = _( | |
412 |
b'revlog content will be recompressed with the new |
|
412 | b'revlog content will be recompressed with the new algorithm.' | |
413 | ) |
|
413 | ) | |
414 |
|
414 | |||
415 | @classmethod |
|
415 | @classmethod | |
@@ -1106,7 +1106,7 b' def upgraderepo(' | |||||
1106 | missingreqs = requiredsourcerequirements(repo) - repo.requirements |
|
1106 | missingreqs = requiredsourcerequirements(repo) - repo.requirements | |
1107 | if missingreqs: |
|
1107 | if missingreqs: | |
1108 | raise error.Abort( |
|
1108 | raise error.Abort( | |
1109 |
_(b'cannot upgrade repository; requirement |
|
1109 | _(b'cannot upgrade repository; requirement missing: %s') | |
1110 | % _(b', ').join(sorted(missingreqs)) |
|
1110 | % _(b', ').join(sorted(missingreqs)) | |
1111 | ) |
|
1111 | ) | |
1112 |
|
1112 | |||
@@ -1173,7 +1173,7 b' def upgraderepo(' | |||||
1173 | raise error.Abort( |
|
1173 | raise error.Abort( | |
1174 | _(b'unknown optimization action requested: %s') |
|
1174 | _(b'unknown optimization action requested: %s') | |
1175 | % b', '.join(sorted(optimize)), |
|
1175 | % b', '.join(sorted(optimize)), | |
1176 |
hint=_(b'run without arguments to see valid |
|
1176 | hint=_(b'run without arguments to see valid optimizations'), | |
1177 | ) |
|
1177 | ) | |
1178 |
|
1178 | |||
1179 | deficiencies = finddeficiencies(repo) |
|
1179 | deficiencies = finddeficiencies(repo) |
@@ -2034,12 +2034,12 b' def checkwinfilename(path):' | |||||
2034 | ) |
|
2034 | ) | |
2035 | if ord(c) <= 31: |
|
2035 | if ord(c) <= 31: | |
2036 | return _( |
|
2036 | return _( | |
2037 |
b"filename contains '%s', which is invalid |
|
2037 | b"filename contains '%s', which is invalid on Windows" | |
2038 | ) % stringutil.escapestr(c) |
|
2038 | ) % stringutil.escapestr(c) | |
2039 | base = n.split(b'.')[0] |
|
2039 | base = n.split(b'.')[0] | |
2040 | if base and base.lower() in _winreservednames: |
|
2040 | if base and base.lower() in _winreservednames: | |
2041 | return ( |
|
2041 | return ( | |
2042 |
_(b"filename contains '%s', which is reserved |
|
2042 | _(b"filename contains '%s', which is reserved on Windows") | |
2043 | % base |
|
2043 | % base | |
2044 | ) |
|
2044 | ) | |
2045 | t = n[-1:] |
|
2045 | t = n[-1:] | |
@@ -3506,7 +3506,7 b' class dirs(object):' | |||||
3506 | addpath(f) |
|
3506 | addpath(f) | |
3507 | elif skip is not None: |
|
3507 | elif skip is not None: | |
3508 | raise error.ProgrammingError( |
|
3508 | raise error.ProgrammingError( | |
3509 |
b"skip character is only supported |
|
3509 | b"skip character is only supported with a dict source" | |
3510 | ) |
|
3510 | ) | |
3511 | else: |
|
3511 | else: | |
3512 | for f in map: |
|
3512 | for f in map: | |
@@ -3583,7 +3583,7 b' def readexactly(stream, n):' | |||||
3583 | s = stream.read(n) |
|
3583 | s = stream.read(n) | |
3584 | if len(s) < n: |
|
3584 | if len(s) < n: | |
3585 | raise error.Abort( |
|
3585 | raise error.Abort( | |
3586 |
_(b"stream ended unexpectedly |
|
3586 | _(b"stream ended unexpectedly (got %d bytes, expected %d)") | |
3587 | % (len(s), n) |
|
3587 | % (len(s), n) | |
3588 | ) |
|
3588 | ) | |
3589 | return s |
|
3589 | return s |
@@ -404,7 +404,7 b' def decodeitem(b, offset=0):' | |||||
404 |
|
404 | |||
405 | if special != SPECIAL_START_ARRAY: |
|
405 | if special != SPECIAL_START_ARRAY: | |
406 | raise CBORDecodeError( |
|
406 | raise CBORDecodeError( | |
407 |
b'expected array after finite set |
|
407 | b'expected array after finite set semantic tag' | |
408 | ) |
|
408 | ) | |
409 |
|
409 | |||
410 | return True, size, readcount + readcount2 + 1, SPECIAL_START_SET |
|
410 | return True, size, readcount + readcount2 + 1, SPECIAL_START_SET | |
@@ -746,7 +746,7 b' class sansiodecoder(object):' | |||||
746 | SPECIAL_START_SET, |
|
746 | SPECIAL_START_SET, | |
747 | ): |
|
747 | ): | |
748 | raise CBORDecodeError( |
|
748 | raise CBORDecodeError( | |
749 |
b'collections not supported as map |
|
749 | b'collections not supported as map keys' | |
750 | ) |
|
750 | ) | |
751 |
|
751 | |||
752 | # We do not allow special values to be used as map keys. |
|
752 | # We do not allow special values to be used as map keys. | |
@@ -841,7 +841,7 b' class sansiodecoder(object):' | |||||
841 | SPECIAL_START_SET, |
|
841 | SPECIAL_START_SET, | |
842 | ): |
|
842 | ): | |
843 | raise CBORDecodeError( |
|
843 | raise CBORDecodeError( | |
844 |
b'collections not allowed as set |
|
844 | b'collections not allowed as set values' | |
845 | ) |
|
845 | ) | |
846 |
|
846 | |||
847 | # We don't allow non-trivial types to exist as set values. |
|
847 | # We don't allow non-trivial types to exist as set values. |
@@ -685,7 +685,7 b' class backgroundfilecloser(object):' | |||||
685 | """Schedule a file for closing.""" |
|
685 | """Schedule a file for closing.""" | |
686 | if not self._entered: |
|
686 | if not self._entered: | |
687 | raise error.Abort( |
|
687 | raise error.Abort( | |
688 |
_(b'can only call close() when context manager |
|
688 | _(b'can only call close() when context manager active') | |
689 | ) |
|
689 | ) | |
690 |
|
690 | |||
691 | # If a background thread encountered an exception, raise now so we fail |
|
691 | # If a background thread encountered an exception, raise now so we fail |
@@ -711,7 +711,7 b' class identitydecoder(object):' | |||||
711 | def __init__(self, ui, extraobjs): |
|
711 | def __init__(self, ui, extraobjs): | |
712 | if extraobjs: |
|
712 | if extraobjs: | |
713 | raise error.Abort( |
|
713 | raise error.Abort( | |
714 |
_(b'identity decoder received unexpected |
|
714 | _(b'identity decoder received unexpected additional values') | |
715 | ) |
|
715 | ) | |
716 |
|
716 | |||
717 | def decode(self, data): |
|
717 | def decode(self, data): | |
@@ -745,7 +745,7 b' class zlibdecoder(object):' | |||||
745 |
|
745 | |||
746 | if extraobjs: |
|
746 | if extraobjs: | |
747 | raise error.Abort( |
|
747 | raise error.Abort( | |
748 |
_(b'zlib decoder received unexpected |
|
748 | _(b'zlib decoder received unexpected additional values') | |
749 | ) |
|
749 | ) | |
750 |
|
750 | |||
751 | self._decompressor = zlib.decompressobj() |
|
751 | self._decompressor = zlib.decompressobj() | |
@@ -802,7 +802,7 b' class zstd8mbdecoder(zstdbasedecoder):' | |||||
802 | def __init__(self, ui, extraobjs): |
|
802 | def __init__(self, ui, extraobjs): | |
803 | if extraobjs: |
|
803 | if extraobjs: | |
804 | raise error.Abort( |
|
804 | raise error.Abort( | |
805 |
_(b'zstd8mb decoder received unexpected |
|
805 | _(b'zstd8mb decoder received unexpected additional values') | |
806 | ) |
|
806 | ) | |
807 |
|
807 | |||
808 | super(zstd8mbdecoder, self).__init__(maxwindowsize=8 * 1048576) |
|
808 | super(zstd8mbdecoder, self).__init__(maxwindowsize=8 * 1048576) | |
@@ -1116,7 +1116,7 b' class serverreactor(object):' | |||||
1116 | # TODO handle decoding frames |
|
1116 | # TODO handle decoding frames | |
1117 | self._state = b'errored' |
|
1117 | self._state = b'errored' | |
1118 | raise error.ProgrammingError( |
|
1118 | raise error.ProgrammingError( | |
1119 |
b'support for decoding stream payloads |
|
1119 | b'support for decoding stream payloads not yet implemented' | |
1120 | ) |
|
1120 | ) | |
1121 |
|
1121 | |||
1122 | if frame.streamflags & STREAM_FLAG_END_STREAM: |
|
1122 | if frame.streamflags & STREAM_FLAG_END_STREAM: | |
@@ -1361,7 +1361,7 b' class serverreactor(object):' | |||||
1361 | if not entry[b'requestdone']: |
|
1361 | if not entry[b'requestdone']: | |
1362 | self._state = b'errored' |
|
1362 | self._state = b'errored' | |
1363 | raise error.ProgrammingError( |
|
1363 | raise error.ProgrammingError( | |
1364 |
b'should not be called without |
|
1364 | b'should not be called without requestdone set' | |
1365 | ) |
|
1365 | ) | |
1366 |
|
1366 | |||
1367 | del self._receivingcommands[requestid] |
|
1367 | del self._receivingcommands[requestid] | |
@@ -1664,9 +1664,7 b' class serverreactor(object):' | |||||
1664 | return self._makeruncommandresult(frame.requestid) |
|
1664 | return self._makeruncommandresult(frame.requestid) | |
1665 | else: |
|
1665 | else: | |
1666 | self._state = b'errored' |
|
1666 | self._state = b'errored' | |
1667 | return self._makeerrorresult( |
|
1667 | return self._makeerrorresult(_(b'command data frame without flags')) | |
1668 | _(b'command data frame without ' b'flags') |
|
|||
1669 | ) |
|
|||
1670 |
|
1668 | |||
1671 | def _onframeerrored(self, frame): |
|
1669 | def _onframeerrored(self, frame): | |
1672 | return self._makeerrorresult(_(b'server already errored')) |
|
1670 | return self._makeerrorresult(_(b'server already errored')) | |
@@ -1796,7 +1794,7 b' class clientreactor(object):' | |||||
1796 | else: |
|
1794 | else: | |
1797 | if not self._cansend: |
|
1795 | if not self._cansend: | |
1798 | raise error.ProgrammingError( |
|
1796 | raise error.ProgrammingError( | |
1799 |
b'sends cannot be performed on |
|
1797 | b'sends cannot be performed on this instance' | |
1800 | ) |
|
1798 | ) | |
1801 |
|
1799 | |||
1802 | if not self._hasmultiplesend: |
|
1800 | if not self._hasmultiplesend: | |
@@ -1824,7 +1822,7 b' class clientreactor(object):' | |||||
1824 |
|
1822 | |||
1825 | if not self._cansend: |
|
1823 | if not self._cansend: | |
1826 | raise error.ProgrammingError( |
|
1824 | raise error.ProgrammingError( | |
1827 |
b'sends cannot be performed on this |
|
1825 | b'sends cannot be performed on this instance' | |
1828 | ) |
|
1826 | ) | |
1829 |
|
1827 | |||
1830 | # If the instance only allows sending once, mark that we have fired |
|
1828 | # If the instance only allows sending once, mark that we have fired |
@@ -679,7 +679,7 b' def _runsshserver(ui, repo, fin, fout, e' | |||||
679 | _sshv1respondooberror( |
|
679 | _sshv1respondooberror( | |
680 | fout, |
|
680 | fout, | |
681 | ui.ferr, |
|
681 | ui.ferr, | |
682 |
b'cannot upgrade protocols multiple |
|
682 | b'cannot upgrade protocols multiple times', | |
683 | ) |
|
683 | ) | |
684 | state = b'shutdown' |
|
684 | state = b'shutdown' | |
685 | continue |
|
685 | continue | |
@@ -787,7 +787,7 b' def _runsshserver(ui, repo, fin, fout, e' | |||||
787 | _sshv1respondooberror( |
|
787 | _sshv1respondooberror( | |
788 | fout, |
|
788 | fout, | |
789 | ui.ferr, |
|
789 | ui.ferr, | |
790 |
b'malformed handshake protocol: |
|
790 | b'malformed handshake protocol: missing %s' % line, | |
791 | ) |
|
791 | ) | |
792 | ok = False |
|
792 | ok = False | |
793 | state = b'shutdown' |
|
793 | state = b'shutdown' |
@@ -143,12 +143,12 b' class peerexecutor(object):' | |||||
143 | def callcommand(self, command, args): |
|
143 | def callcommand(self, command, args): | |
144 | if self._sent: |
|
144 | if self._sent: | |
145 | raise error.ProgrammingError( |
|
145 | raise error.ProgrammingError( | |
146 |
b'callcommand() cannot be used |
|
146 | b'callcommand() cannot be used after commands are sent' | |
147 | ) |
|
147 | ) | |
148 |
|
148 | |||
149 | if self._closed: |
|
149 | if self._closed: | |
150 | raise error.ProgrammingError( |
|
150 | raise error.ProgrammingError( | |
151 |
b'callcommand() cannot be used |
|
151 | b'callcommand() cannot be used after close()' | |
152 | ) |
|
152 | ) | |
153 |
|
153 | |||
154 | # Commands are dispatched through methods on the peer. |
|
154 | # Commands are dispatched through methods on the peer. |
@@ -41,7 +41,7 b' urlreq = util.urlreq' | |||||
41 |
|
41 | |||
42 | bundle2requiredmain = _(b'incompatible Mercurial client; bundle2 required') |
|
42 | bundle2requiredmain = _(b'incompatible Mercurial client; bundle2 required') | |
43 | bundle2requiredhint = _( |
|
43 | bundle2requiredhint = _( | |
44 |
b'see https://www.mercurial-scm.org/wiki/ |
|
44 | b'see https://www.mercurial-scm.org/wiki/IncompatibleClient' | |
45 | ) |
|
45 | ) | |
46 | bundle2required = b'%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint) |
|
46 | bundle2required = b'%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint) | |
47 |
|
47 | |||
@@ -165,13 +165,13 b' def wireprotocommand(name, args=None, pe' | |||||
165 |
|
165 | |||
166 | if not isinstance(args, bytes): |
|
166 | if not isinstance(args, bytes): | |
167 | raise error.ProgrammingError( |
|
167 | raise error.ProgrammingError( | |
168 |
b'arguments for version 1 commands |
|
168 | b'arguments for version 1 commands must be declared as bytes' | |
169 | ) |
|
169 | ) | |
170 |
|
170 | |||
171 | def register(func): |
|
171 | def register(func): | |
172 | if name in commands: |
|
172 | if name in commands: | |
173 | raise error.ProgrammingError( |
|
173 | raise error.ProgrammingError( | |
174 |
b'%s command already registered |
|
174 | b'%s command already registered for version 1' % name | |
175 | ) |
|
175 | ) | |
176 | commands[name] = wireprototypes.commandentry( |
|
176 | commands[name] = wireprototypes.commandentry( | |
177 | func, args=args, transports=transports, permission=permission |
|
177 | func, args=args, transports=transports, permission=permission |
@@ -140,7 +140,7 b' def handlehttpv2request(rctx, req, res, ' | |||||
140 | # since client does Accept it. |
|
140 | # since client does Accept it. | |
141 | res.headers[b'Content-Type'] = b'text/plain' |
|
141 | res.headers[b'Content-Type'] = b'text/plain' | |
142 | res.setbodybytes( |
|
142 | res.setbodybytes( | |
143 |
_(b'client MUST send Content-Type header with |
|
143 | _(b'client MUST send Content-Type header with value: %s\n') | |
144 | % FRAMINGTYPE |
|
144 | % FRAMINGTYPE | |
145 | ) |
|
145 | ) | |
146 | return |
|
146 | return | |
@@ -324,7 +324,7 b' def _httpv2runcommand(' | |||||
324 | res.status = b'403 Forbidden' |
|
324 | res.status = b'403 Forbidden' | |
325 | res.headers[b'Content-Type'] = b'text/plain' |
|
325 | res.headers[b'Content-Type'] = b'text/plain' | |
326 | res.setbodybytes( |
|
326 | res.setbodybytes( | |
327 |
_(b'insufficient permissions to execute |
|
327 | _(b'insufficient permissions to execute command: %s') | |
328 | % command[b'command'] |
|
328 | % command[b'command'] | |
329 | ) |
|
329 | ) | |
330 | return True |
|
330 | return True | |
@@ -340,7 +340,7 b' def _httpv2runcommand(' | |||||
340 | res.status = b'200 OK' |
|
340 | res.status = b'200 OK' | |
341 | res.headers[b'Content-Type'] = b'text/plain' |
|
341 | res.headers[b'Content-Type'] = b'text/plain' | |
342 | res.setbodybytes( |
|
342 | res.setbodybytes( | |
343 |
_(b'multiple commands cannot be issued to this |
|
343 | _(b'multiple commands cannot be issued to this URL') | |
344 | ) |
|
344 | ) | |
345 | return True |
|
345 | return True | |
346 |
|
346 | |||
@@ -725,13 +725,13 b' def wireprotocommand(' | |||||
725 |
|
725 | |||
726 | if not isinstance(args, dict): |
|
726 | if not isinstance(args, dict): | |
727 | raise error.ProgrammingError( |
|
727 | raise error.ProgrammingError( | |
728 |
b'arguments for version 2 commands |
|
728 | b'arguments for version 2 commands must be declared as dicts' | |
729 | ) |
|
729 | ) | |
730 |
|
730 | |||
731 | for arg, meta in args.items(): |
|
731 | for arg, meta in args.items(): | |
732 | if arg == b'*': |
|
732 | if arg == b'*': | |
733 | raise error.ProgrammingError( |
|
733 | raise error.ProgrammingError( | |
734 |
b'* argument name not allowed on |
|
734 | b'* argument name not allowed on version 2 commands' | |
735 | ) |
|
735 | ) | |
736 |
|
736 | |||
737 | if not isinstance(meta, dict): |
|
737 | if not isinstance(meta, dict): | |
@@ -773,7 +773,7 b' def wireprotocommand(' | |||||
773 | def register(func): |
|
773 | def register(func): | |
774 | if name in COMMANDS: |
|
774 | if name in COMMANDS: | |
775 | raise error.ProgrammingError( |
|
775 | raise error.ProgrammingError( | |
776 |
b'%s command already registered |
|
776 | b'%s command already registered for version 2' % name | |
777 | ) |
|
777 | ) | |
778 |
|
778 | |||
779 | COMMANDS[name] = wireprototypes.commandentry( |
|
779 | COMMANDS[name] = wireprototypes.commandentry( | |
@@ -890,7 +890,7 b' def resolvenodes(repo, revisions):' | |||||
890 |
|
890 | |||
891 | if not isinstance(revisions, list): |
|
891 | if not isinstance(revisions, list): | |
892 | raise error.WireprotoCommandError( |
|
892 | raise error.WireprotoCommandError( | |
893 |
b'revisions must be defined as an |
|
893 | b'revisions must be defined as an array' | |
894 | ) |
|
894 | ) | |
895 |
|
895 | |||
896 | for spec in revisions: |
|
896 | for spec in revisions: |
General Comments 0
You need to be logged in to leave comments.
Login now