Show More
@@ -1379,7 +1379,7 b' def perfmanifest(ui, repo, rev, manifest' | |||
|
1379 | 1379 | t = repo.manifestlog._revlog.lookup(rev) |
|
1380 | 1380 | except ValueError: |
|
1381 | 1381 | raise error.Abort( |
|
1382 |
b'manifest revision must be integer or full |
|
|
1382 | b'manifest revision must be integer or full node' | |
|
1383 | 1383 | ) |
|
1384 | 1384 | |
|
1385 | 1385 | def d(): |
@@ -439,7 +439,7 b' def _txnhook(ui, repo, hooktype, node, s' | |||
|
439 | 439 | branch = ctx.branch() |
|
440 | 440 | if denybranches and denybranches(branch): |
|
441 | 441 | raise error.Abort( |
|
442 |
_(b'acl: user "%s" denied on branch "%s" |
|
|
442 | _(b'acl: user "%s" denied on branch "%s" (changeset "%s")') | |
|
443 | 443 | % (user, branch, ctx) |
|
444 | 444 | ) |
|
445 | 445 | if allowbranches and not allowbranches(branch): |
@@ -457,7 +457,7 b' def _txnhook(ui, repo, hooktype, node, s' | |||
|
457 | 457 | for f in ctx.files(): |
|
458 | 458 | if deny and deny(f): |
|
459 | 459 | raise error.Abort( |
|
460 |
_(b'acl: user "%s" denied on "%s" |
|
|
460 | _(b'acl: user "%s" denied on "%s" (changeset "%s")') | |
|
461 | 461 | % (user, f, ctx) |
|
462 | 462 | ) |
|
463 | 463 | if allow and not allow(f): |
@@ -169,7 +169,7 b' class converter_source(object):' | |||
|
169 | 169 | """ |
|
170 | 170 | if not re.match(br'[0-9a-fA-F]{40,40}$', revstr): |
|
171 | 171 | raise error.Abort( |
|
172 |
_(b'%s entry %s is not a valid revision |
|
|
172 | _(b'%s entry %s is not a valid revision identifier') | |
|
173 | 173 | % (mapname, revstr) |
|
174 | 174 | ) |
|
175 | 175 |
@@ -124,7 +124,7 b' def createlog(ui, directory=None, root=b' | |||
|
124 | 124 | re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$') |
|
125 | 125 | re_02 = re.compile(b'cvs (r?log|server): (.+)\n$') |
|
126 | 126 | re_03 = re.compile( |
|
127 |
b"(Cannot access.+CVSROOT)| |
|
|
127 | b"(Cannot access.+CVSROOT)|(can't create temporary directory.+)$" | |
|
128 | 128 | ) |
|
129 | 129 | re_10 = re.compile(b'Working file: (.+)$') |
|
130 | 130 | re_20 = re.compile(b'symbolic names:') |
@@ -328,7 +328,7 b' def createlog(ui, directory=None, root=b' | |||
|
328 | 328 | state = 5 |
|
329 | 329 | else: |
|
330 | 330 | assert not re_32.match(line), _( |
|
331 |
b'must have at least |
|
|
331 | b'must have at least some revisions' | |
|
332 | 332 | ) |
|
333 | 333 | |
|
334 | 334 | elif state == 5: |
@@ -563,7 +563,7 b' def createlog(ui, directory=None, root=b' | |||
|
563 | 563 | raise error.Abort( |
|
564 | 564 | inst, |
|
565 | 565 | hint=_( |
|
566 |
b'check convert.cvsps.logencoding |
|
|
566 | b'check convert.cvsps.logencoding configuration' | |
|
567 | 567 | ), |
|
568 | 568 | ) |
|
569 | 569 | else: |
@@ -573,9 +573,7 b' def createlog(ui, directory=None, root=b' | |||
|
573 | 573 | b" CVS log message for %s of %s" |
|
574 | 574 | ) |
|
575 | 575 | % (revstr(entry.revision), entry.file), |
|
576 | hint=_( | |
|
577 | b'check convert.cvsps.logencoding' b' configuration' | |
|
578 | ), | |
|
576 | hint=_(b'check convert.cvsps.logencoding configuration'), | |
|
579 | 577 | ) |
|
580 | 578 | |
|
581 | 579 | hook.hook(ui, None, b"cvslog", True, log=log) |
@@ -259,7 +259,7 b' class convert_git(common.converter_sourc' | |||
|
259 | 259 | # This can happen if a file is in the repo that has permissions |
|
260 | 260 | # 160000, but there is no .gitmodules file. |
|
261 | 261 | self.ui.warn( |
|
262 |
_(b"warning: cannot read submodules config file in |
|
|
262 | _(b"warning: cannot read submodules config file in %s\n") | |
|
263 | 263 | % version |
|
264 | 264 | ) |
|
265 | 265 | return |
@@ -263,7 +263,7 b' class monotone_source(common.converter_s' | |||
|
263 | 263 | def getchanges(self, rev, full): |
|
264 | 264 | if full: |
|
265 | 265 | raise error.Abort( |
|
266 |
_(b"convert from monotone does not support |
|
|
266 | _(b"convert from monotone does not support --full") | |
|
267 | 267 | ) |
|
268 | 268 | revision = self.mtnrun(b"get_revision", rev).split(b"\n\n") |
|
269 | 269 | files = {} |
@@ -369,7 +369,7 b' class monotone_source(common.converter_s' | |||
|
369 | 369 | version = float(versionstr) |
|
370 | 370 | except Exception: |
|
371 | 371 | raise error.Abort( |
|
372 |
_(b"unable to determine mtn automate interface |
|
|
372 | _(b"unable to determine mtn automate interface version") | |
|
373 | 373 | ) |
|
374 | 374 | |
|
375 | 375 | if version >= 12.0: |
@@ -189,7 +189,7 b' def debugsvnlog(ui, **opts):' | |||
|
189 | 189 | """ |
|
190 | 190 | if svn is None: |
|
191 | 191 | raise error.Abort( |
|
192 |
_(b'debugsvnlog could not load Subversion python |
|
|
192 | _(b'debugsvnlog could not load Subversion python bindings') | |
|
193 | 193 | ) |
|
194 | 194 | |
|
195 | 195 | args = decodeargs(ui.fin.read()) |
@@ -647,7 +647,7 b' class svn_source(converter_source):' | |||
|
647 | 647 | revstr, |
|
648 | 648 | ): |
|
649 | 649 | raise error.Abort( |
|
650 |
_(b'%s entry %s is not a valid revision |
|
|
650 | _(b'%s entry %s is not a valid revision identifier') | |
|
651 | 651 | % (mapname, revstr) |
|
652 | 652 | ) |
|
653 | 653 |
@@ -460,7 +460,7 b' def reposetup(ui, repo):' | |||
|
460 | 460 | continue |
|
461 | 461 | if inconsistenteol(data): |
|
462 | 462 | raise errormod.Abort( |
|
463 |
_(b"inconsistent newline style |
|
|
463 | _(b"inconsistent newline style in %s\n") % f | |
|
464 | 464 | ) |
|
465 | 465 | return super(eolrepo, self).commitctx(ctx, error, origctx) |
|
466 | 466 |
@@ -93,7 +93,7 b' fastannotatecommandargs = {' | |||
|
93 | 93 | b'l', |
|
94 | 94 | b'line-number', |
|
95 | 95 | None, |
|
96 |
_(b'show line number at the first |
|
|
96 | _(b'show line number at the first appearance'), | |
|
97 | 97 | ), |
|
98 | 98 | ( |
|
99 | 99 | b'e', |
@@ -127,7 +127,7 b' fastannotatecommandargs = {' | |||
|
127 | 127 | b'', |
|
128 | 128 | b'rebuild', |
|
129 | 129 | None, |
|
130 |
_(b'rebuild cache even if it exists |
|
|
130 | _(b'rebuild cache even if it exists (EXPERIMENTAL)'), | |
|
131 | 131 | ), |
|
132 | 132 | ] |
|
133 | 133 | + commands.diffwsopts |
@@ -116,7 +116,7 b' def _fctxannotate(' | |||
|
116 | 116 | return _doannotate(self, follow, diffopts) |
|
117 | 117 | except Exception as ex: |
|
118 | 118 | self._repo.ui.debug( |
|
119 |
b'fastannotate: falling back to the vanilla |
|
|
119 | b'fastannotate: falling back to the vanilla annotate: %r\n' % ex | |
|
120 | 120 | ) |
|
121 | 121 | return orig(self, follow=follow, skiprevs=skiprevs, diffopts=diffopts) |
|
122 | 122 |
@@ -188,7 +188,7 b" def fetch(ui, repo, source=b'default', *" | |||
|
188 | 188 | message, opts[b'user'], opts[b'date'], editor=editor |
|
189 | 189 | ) |
|
190 | 190 | ui.status( |
|
191 |
_(b'new changeset %d:%s merges remote changes |
|
|
191 | _(b'new changeset %d:%s merges remote changes with local\n') | |
|
192 | 192 | % (repo.changelog.rev(n), short(n)) |
|
193 | 193 | ) |
|
194 | 194 |
@@ -423,7 +423,7 b' def checknodescendants(repo, revs):' | |||
|
423 | 423 | b'(%ld::) - (%ld)', revs, revs |
|
424 | 424 | ): |
|
425 | 425 | raise error.Abort( |
|
426 |
_(b'can only fix a changeset together |
|
|
426 | _(b'can only fix a changeset together with all its descendants') | |
|
427 | 427 | ) |
|
428 | 428 | |
|
429 | 429 |
@@ -66,7 +66,7 b' def githelp(ui, repo, *args, **kwargs):' | |||
|
66 | 66 | |
|
67 | 67 | if len(args) == 0 or (len(args) == 1 and args[0] == b'git'): |
|
68 | 68 | raise error.Abort( |
|
69 |
_(b'missing git command - |
|
|
69 | _(b'missing git command - usage: hg githelp -- <git command>') | |
|
70 | 70 | ) |
|
71 | 71 | |
|
72 | 72 | if args[0] == b'git': |
@@ -76,7 +76,7 b' class gpg(object):' | |||
|
76 | 76 | fp = os.fdopen(fd, r'wb') |
|
77 | 77 | fp.write(data) |
|
78 | 78 | fp.close() |
|
79 |
gpgcmd = b"%s --logger-fd 1 --status-fd 1 --verify |
|
|
79 | gpgcmd = b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\"" % ( | |
|
80 | 80 | self.path, |
|
81 | 81 | sigfile, |
|
82 | 82 | datafile, |
@@ -179,12 +179,12 b' def getkeys(ui, repo, mygpg, sigdata, co' | |||
|
179 | 179 | continue |
|
180 | 180 | if key[0] == b"EXPSIG": |
|
181 | 181 | ui.write( |
|
182 |
_(b"%s Note: Signature has expired |
|
|
182 | _(b"%s Note: Signature has expired (signed by: \"%s\")\n") | |
|
183 | 183 | % (prefix, key[2]) |
|
184 | 184 | ) |
|
185 | 185 | elif key[0] == b"EXPKEYSIG": |
|
186 | 186 | ui.write( |
|
187 |
_(b"%s Note: This key has expired |
|
|
187 | _(b"%s Note: This key has expired (signed by: \"%s\")\n") | |
|
188 | 188 | % (prefix, key[2]) |
|
189 | 189 | ) |
|
190 | 190 | validkeys.append((key[1], key[2], key[3])) |
@@ -304,7 +304,7 b' def _dosign(ui, repo, *revs, **opts):' | |||
|
304 | 304 | ] |
|
305 | 305 | if len(nodes) > 1: |
|
306 | 306 | raise error.Abort( |
|
307 |
_(b'uncommitted merge - please provide a |
|
|
307 | _(b'uncommitted merge - please provide a specific revision') | |
|
308 | 308 | ) |
|
309 | 309 | if not nodes: |
|
310 | 310 | nodes = [repo.changelog.tip()] |
@@ -39,7 +39,7 b' TextLexer = pygments.lexers.TextLexer' | |||
|
39 | 39 | HtmlFormatter = pygments.formatters.HtmlFormatter |
|
40 | 40 | |
|
41 | 41 | SYNTAX_CSS = ( |
|
42 |
b'\n<link rel="stylesheet" href="{url}highlightcss" |
|
|
42 | b'\n<link rel="stylesheet" href="{url}highlightcss" type="text/css" />' | |
|
43 | 43 | ) |
|
44 | 44 | |
|
45 | 45 |
@@ -1884,7 +1884,7 b' def _validateargs(ui, repo, state, freea' | |||
|
1884 | 1884 | elif goal == b'edit-plan': |
|
1885 | 1885 | if any((outg, revs, freeargs)): |
|
1886 | 1886 | raise error.Abort( |
|
1887 |
_(b'only --commands argument allowed with |
|
|
1887 | _(b'only --commands argument allowed with --edit-plan') | |
|
1888 | 1888 | ) |
|
1889 | 1889 | else: |
|
1890 | 1890 | if state.inprogress(): |
@@ -70,7 +70,7 b' class sqlindexapi(indexapi.indexapi):' | |||
|
70 | 70 | raise indexapi.indexexception(b"SQL connection already open") |
|
71 | 71 | if self.sqlcursor: |
|
72 | 72 | raise indexapi.indexexception( |
|
73 |
b"SQL cursor already open without |
|
|
73 | b"SQL cursor already open without connection" | |
|
74 | 74 | ) |
|
75 | 75 | retry = 3 |
|
76 | 76 | while True: |
@@ -126,7 +126,7 b' class sqlindexapi(indexapi.indexapi):' | |||
|
126 | 126 | self.sqlconnect() |
|
127 | 127 | self.log.info(b"ADD BUNDLE %r %r" % (self.reponame, bundleid)) |
|
128 | 128 | self.sqlcursor.execute( |
|
129 |
b"INSERT INTO bundles(bundle, reponame) VALUES |
|
|
129 | b"INSERT INTO bundles(bundle, reponame) VALUES (%s, %s)", | |
|
130 | 130 | params=(bundleid, self.reponame), |
|
131 | 131 | ) |
|
132 | 132 | for ctx in nodesctx: |
@@ -593,7 +593,7 b' class queue(object):' | |||
|
593 | 593 | diffopts.git = self.gitmode == b'yes' |
|
594 | 594 | else: |
|
595 | 595 | raise error.Abort( |
|
596 |
_(b'mq.git option can be auto/keep/yes/no |
|
|
596 | _(b'mq.git option can be auto/keep/yes/no got %s') | |
|
597 | 597 | % self.gitmode |
|
598 | 598 | ) |
|
599 | 599 | if patchfn: |
@@ -1113,7 +1113,7 b' class queue(object):' | |||
|
1113 | 1113 | |
|
1114 | 1114 | if patcherr: |
|
1115 | 1115 | self.ui.warn( |
|
1116 |
_(b"patch failed, rejects left in working |
|
|
1116 | _(b"patch failed, rejects left in working directory\n") | |
|
1117 | 1117 | ) |
|
1118 | 1118 | err = 2 |
|
1119 | 1119 | break |
@@ -1214,7 +1214,7 b' class queue(object):' | |||
|
1214 | 1214 | def delete(self, repo, patches, opts): |
|
1215 | 1215 | if not patches and not opts.get(b'rev'): |
|
1216 | 1216 | raise error.Abort( |
|
1217 |
_(b'qdelete requires at least one revision or |
|
|
1217 | _(b'qdelete requires at least one revision or patch name') | |
|
1218 | 1218 | ) |
|
1219 | 1219 | |
|
1220 | 1220 | realpatches = [] |
@@ -1295,7 +1295,7 b' class queue(object):' | |||
|
1295 | 1295 | if name != name.strip(): |
|
1296 | 1296 | # whitespace is stripped by parseseries() |
|
1297 | 1297 | raise error.Abort( |
|
1298 |
_(b'patch name cannot begin or end with |
|
|
1298 | _(b'patch name cannot begin or end with whitespace') | |
|
1299 | 1299 | ) |
|
1300 | 1300 | for prefix in (b'.hg', b'.mq'): |
|
1301 | 1301 | if name.startswith(prefix): |
@@ -1629,11 +1629,11 b' class queue(object):' | |||
|
1629 | 1629 | ) |
|
1630 | 1630 | if move: |
|
1631 | 1631 | raise error.Abort( |
|
1632 |
_(b'cannot use --exact and --move |
|
|
1632 | _(b'cannot use --exact and --move together') | |
|
1633 | 1633 | ) |
|
1634 | 1634 | if self.applied: |
|
1635 | 1635 | raise error.Abort( |
|
1636 |
_(b'cannot push --exact with applied |
|
|
1636 | _(b'cannot push --exact with applied patches') | |
|
1637 | 1637 | ) |
|
1638 | 1638 | root = self.series[start] |
|
1639 | 1639 | target = patchheader(self.join(root), self.plainmode).parent |
@@ -2390,7 +2390,7 b' class queue(object):' | |||
|
2390 | 2390 | if rev: |
|
2391 | 2391 | if files: |
|
2392 | 2392 | raise error.Abort( |
|
2393 |
_(b'option "-r" not valid when importing |
|
|
2393 | _(b'option "-r" not valid when importing files') | |
|
2394 | 2394 | ) |
|
2395 | 2395 | rev = scmutil.revrange(repo, rev) |
|
2396 | 2396 | rev.sort(reverse=True) |
@@ -2398,7 +2398,7 b' class queue(object):' | |||
|
2398 | 2398 | raise error.Abort(_(b'no files or revisions specified')) |
|
2399 | 2399 | if (len(files) > 1 or len(rev) > 1) and patchname: |
|
2400 | 2400 | raise error.Abort( |
|
2401 |
_(b'option "-n" not valid when importing multiple |
|
|
2401 | _(b'option "-n" not valid when importing multiple patches') | |
|
2402 | 2402 | ) |
|
2403 | 2403 | imported = [] |
|
2404 | 2404 | if rev: |
@@ -2408,7 +2408,7 b' class queue(object):' | |||
|
2408 | 2408 | heads = repo.changelog.heads(repo.changelog.node(rev.first())) |
|
2409 | 2409 | if len(heads) > 1: |
|
2410 | 2410 | raise error.Abort( |
|
2411 |
_(b'revision %d is the root of more than one |
|
|
2411 | _(b'revision %d is the root of more than one branch') | |
|
2412 | 2412 | % rev.last() |
|
2413 | 2413 | ) |
|
2414 | 2414 | if self.applied: |
@@ -2419,7 +2419,7 b' class queue(object):' | |||
|
2419 | 2419 | ) |
|
2420 | 2420 | if heads != [self.applied[-1].node]: |
|
2421 | 2421 | raise error.Abort( |
|
2422 |
_(b'revision %d is not the parent of |
|
|
2422 | _(b'revision %d is not the parent of the queue') | |
|
2423 | 2423 | % rev.first() |
|
2424 | 2424 | ) |
|
2425 | 2425 | base = repo.changelog.rev(self.applied[0].node) |
@@ -2447,7 +2447,7 b' class queue(object):' | |||
|
2447 | 2447 | ) |
|
2448 | 2448 | if lastparent and lastparent != r: |
|
2449 | 2449 | raise error.Abort( |
|
2450 |
_(b'revision %d is not the parent of |
|
|
2450 | _(b'revision %d is not the parent of %d') | |
|
2451 | 2451 | % (r, lastparent) |
|
2452 | 2452 | ) |
|
2453 | 2453 | lastparent = p1 |
@@ -2849,7 +2849,7 b' def clone(ui, source, dest=None, **opts)' | |||
|
2849 | 2849 | hg.peer(ui, opts, patchespath) |
|
2850 | 2850 | except error.RepoError: |
|
2851 | 2851 | raise error.Abort( |
|
2852 |
_(b'versioned patch repository not found |
|
|
2852 | _(b'versioned patch repository not found (see init --mq)') | |
|
2853 | 2853 | ) |
|
2854 | 2854 | qbase, destrev = None, None |
|
2855 | 2855 | if sr.local(): |
@@ -3359,7 +3359,7 b' def guard(ui, repo, *args, **opts):' | |||
|
3359 | 3359 | if opts.get(r'list'): |
|
3360 | 3360 | if args or opts.get(r'none'): |
|
3361 | 3361 | raise error.Abort( |
|
3362 |
_(b'cannot mix -l/--list with options or |
|
|
3362 | _(b'cannot mix -l/--list with options or arguments') | |
|
3363 | 3363 | ) |
|
3364 | 3364 | for i in pycompat.xrange(len(q.series)): |
|
3365 | 3365 | status(i) |
@@ -3671,13 +3671,12 b' def save(ui, repo, **opts):' | |||
|
3671 | 3671 | if os.path.exists(newpath): |
|
3672 | 3672 | if not os.path.isdir(newpath): |
|
3673 | 3673 | raise error.Abort( |
|
3674 |
_(b'destination %s exists and is not |
|
|
3674 | _(b'destination %s exists and is not a directory') | |
|
3675 | 3675 | % newpath |
|
3676 | 3676 | ) |
|
3677 | 3677 | if not opts.get(b'force'): |
|
3678 | 3678 | raise error.Abort( |
|
3679 |
_(b'destination %s exists, |
|
|
3680 | % newpath | |
|
3679 | _(b'destination %s exists, use -f to force') % newpath | |
|
3681 | 3680 | ) |
|
3682 | 3681 | else: |
|
3683 | 3682 | newpath = savename(path) |
@@ -4157,7 +4156,7 b' def reposetup(ui, repo):' | |||
|
4157 | 4156 | for patch in mqtags: |
|
4158 | 4157 | if patch[1] in tags: |
|
4159 | 4158 | self.ui.warn( |
|
4160 |
_(b'tag %s overrides mq patch of the same |
|
|
4159 | _(b'tag %s overrides mq patch of the same name\n') | |
|
4161 | 4160 | % patch[1] |
|
4162 | 4161 | ) |
|
4163 | 4162 | else: |
@@ -4191,13 +4190,13 b' def mqinit(orig, ui, *args, **kwargs):' | |||
|
4191 | 4190 | repopath = args[0] |
|
4192 | 4191 | if not hg.islocal(repopath): |
|
4193 | 4192 | raise error.Abort( |
|
4194 |
_(b'only a local queue repository |
|
|
4193 | _(b'only a local queue repository may be initialized') | |
|
4195 | 4194 | ) |
|
4196 | 4195 | else: |
|
4197 | 4196 | repopath = cmdutil.findrepo(encoding.getcwd()) |
|
4198 | 4197 | if not repopath: |
|
4199 | 4198 | raise error.Abort( |
|
4200 |
_(b'there is no Mercurial repository here |
|
|
4199 | _(b'there is no Mercurial repository here (.hg not found)') | |
|
4201 | 4200 | ) |
|
4202 | 4201 | repo = hg.repository(ui, repopath) |
|
4203 | 4202 | return qinit(ui, repo, True) |
@@ -249,7 +249,7 b' def _narrow(' | |||
|
249 | 249 | if not force: |
|
250 | 250 | raise error.Abort( |
|
251 | 251 | _(b'local changes found'), |
|
252 |
hint=_(b'use --force-delete-local-changes to |
|
|
252 | hint=_(b'use --force-delete-local-changes to ignore'), | |
|
253 | 253 | ) |
|
254 | 254 | |
|
255 | 255 | with ui.uninterruptible(): |
@@ -377,7 +377,7 b' class notifier(object):' | |||
|
377 | 377 | continue |
|
378 | 378 | if len(subs) == 0: |
|
379 | 379 | self.ui.debug( |
|
380 |
b'notify: no subscribers to selected repo |
|
|
380 | b'notify: no subscribers to selected repo and revset\n' | |
|
381 | 381 | ) |
|
382 | 382 | return |
|
383 | 383 |
@@ -162,7 +162,7 b' def _addpullheader(seq, ctx):' | |||
|
162 | 162 | # destination before patchbombing anything. |
|
163 | 163 | publicurl = repo.ui.config(b'patchbomb', b'publicurl') |
|
164 | 164 | if publicurl: |
|
165 |
return b'Available At %s\n |
|
|
165 | return b'Available At %s\n# hg pull %s -r %s' % ( | |
|
166 | 166 | publicurl, |
|
167 | 167 | publicurl, |
|
168 | 168 | ctx, |
@@ -343,9 +343,7 b' def _getpatches(repo, revs, **opts):' | |||
|
343 | 343 | prev = repo[b'.'].rev() |
|
344 | 344 | for r in revs: |
|
345 | 345 | if r == prev and (repo[None].files() or repo[None].deleted()): |
|
346 | ui.warn( | |
|
347 | _(b'warning: working directory has ' b'uncommitted changes\n') | |
|
348 | ) | |
|
346 | ui.warn(_(b'warning: working directory has uncommitted changes\n')) | |
|
349 | 347 | output = stringio() |
|
350 | 348 | cmdutil.exportfile( |
|
351 | 349 | repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True) |
@@ -391,7 +389,7 b' def _getdescription(repo, defaultbody, s' | |||
|
391 | 389 | body = open(opts.get(r'desc')).read() |
|
392 | 390 | else: |
|
393 | 391 | ui.write( |
|
394 |
_(b'\nWrite the introductory message for the |
|
|
392 | _(b'\nWrite the introductory message for the patch series.\n\n') | |
|
395 | 393 | ) |
|
396 | 394 | body = ui.edit( |
|
397 | 395 | defaultbody, sender, repopath=repo.path, action=b'patchbombbody' |
@@ -911,7 +909,7 b' def email(ui, repo, *revs, **opts):' | |||
|
911 | 909 | ui.write(ds, label=b'patchbomb.diffstats') |
|
912 | 910 | ui.write(b'\n') |
|
913 | 911 | if ui.promptchoice( |
|
914 |
_(b'are you sure you want to send (yn)? |
|
|
912 | _(b'are you sure you want to send (yn)?$$ &Yes $$ &No') | |
|
915 | 913 | ): |
|
916 | 914 | raise error.Abort(_(b'patchbomb canceled')) |
|
917 | 915 |
@@ -830,7 +830,7 b' def _confirmbeforesend(repo, revs, oldma' | |||
|
830 | 830 | ) |
|
831 | 831 | |
|
832 | 832 | if ui.promptchoice( |
|
833 |
_(b'Send the above changes to %s (yn)? |
|
|
833 | _(b'Send the above changes to %s (yn)?$$ &Yes $$ &No') % url | |
|
834 | 834 | ): |
|
835 | 835 | return False |
|
836 | 836 |
@@ -469,7 +469,7 b' class rebaseruntime(object):' | |||
|
469 | 469 | branches.add(repo[rev].branch()) |
|
470 | 470 | if len(branches) > 1: |
|
471 | 471 | raise error.Abort( |
|
472 |
_(b'cannot collapse multiple named |
|
|
472 | _(b'cannot collapse multiple named branches') | |
|
473 | 473 | ) |
|
474 | 474 | |
|
475 | 475 | # Calculate self.obsoletenotrebased |
@@ -589,14 +589,11 b' class rebaseruntime(object):' | |||
|
589 | 589 | elif rev in self.obsoletenotrebased: |
|
590 | 590 | succ = self.obsoletenotrebased[rev] |
|
591 | 591 | if succ is None: |
|
592 | msg = ( | |
|
593 | _(b'note: not rebasing %s, it has no ' b'successor\n') | |
|
594 | % desc | |
|
595 | ) | |
|
592 | msg = _(b'note: not rebasing %s, it has no successor\n') % desc | |
|
596 | 593 | else: |
|
597 | 594 | succdesc = _ctxdesc(repo[succ]) |
|
598 | 595 | msg = _( |
|
599 |
b'note: not rebasing %s, already in |
|
|
596 | b'note: not rebasing %s, already in destination as %s\n' | |
|
600 | 597 | ) % (desc, succdesc) |
|
601 | 598 | repo.ui.status(msg) |
|
602 | 599 | # Make clearrebased aware state[rev] is not a true successor |
@@ -1111,7 +1108,7 b' def _dryrunrebase(ui, repo, action, opts' | |||
|
1111 | 1108 | ui.status(_(b'starting in-memory rebase\n')) |
|
1112 | 1109 | else: |
|
1113 | 1110 | ui.status( |
|
1114 |
_(b'starting dry-run rebase; repository will not be |
|
|
1111 | _(b'starting dry-run rebase; repository will not be changed\n') | |
|
1115 | 1112 | ) |
|
1116 | 1113 | with repo.wlock(), repo.lock(): |
|
1117 | 1114 | needsabort = True |
@@ -1136,9 +1133,7 b' def _dryrunrebase(ui, repo, action, opts' | |||
|
1136 | 1133 | else: |
|
1137 | 1134 | if confirm: |
|
1138 | 1135 | ui.status(_(b'rebase completed successfully\n')) |
|
1139 | if not ui.promptchoice( | |
|
1140 | _(b'apply changes (yn)?' b'$$ &Yes $$ &No') | |
|
1141 | ): | |
|
1136 | if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')): | |
|
1142 | 1137 | # finish unfinished rebase |
|
1143 | 1138 | rbsrt._finishrebase() |
|
1144 | 1139 | else: |
@@ -1415,8 +1410,7 b' def _definedestmap(' | |||
|
1415 | 1410 | ui.note(_(b'skipping %s - empty destination\n') % repo[r]) |
|
1416 | 1411 | else: |
|
1417 | 1412 | raise error.Abort( |
|
1418 |
_(b'rebase destination for %s is not |
|
|
1419 | % repo[r] | |
|
1413 | _(b'rebase destination for %s is not unique') % repo[r] | |
|
1420 | 1414 | ) |
|
1421 | 1415 | |
|
1422 | 1416 | if dest is not None: |
@@ -1638,7 +1632,7 b' def _checkobsrebase(repo, ui, rebaseobsr' | |||
|
1638 | 1632 | |
|
1639 | 1633 | if divergencebasecandidates and not divergenceok: |
|
1640 | 1634 | divhashes = (bytes(repo[r]) for r in divergencebasecandidates) |
|
1641 |
msg = _(b"this rebase will cause |
|
|
1635 | msg = _(b"this rebase will cause divergences from: %s") | |
|
1642 | 1636 | h = _( |
|
1643 | 1637 | b"to force the rebase please set " |
|
1644 | 1638 | b"experimental.evolution.allowdivergence=True" |
@@ -300,7 +300,7 b' def checkadmonitions(ui, repo, directive' | |||
|
300 | 300 | continue |
|
301 | 301 | else: |
|
302 | 302 | ui.write( |
|
303 |
_(b"Invalid admonition '%s' present in changeset %s |
|
|
303 | _(b"Invalid admonition '%s' present in changeset %s\n") | |
|
304 | 304 | % (admonition.group(1), ctx.hex()[:12]) |
|
305 | 305 | ) |
|
306 | 306 | sim = lambda x: difflib.SequenceMatcher( |
@@ -376,7 +376,7 b' def parsenotesfromrevisions(repo, direct' | |||
|
376 | 376 | # TODO consider using title as paragraph for more concise notes. |
|
377 | 377 | if not paragraphs: |
|
378 | 378 | repo.ui.warn( |
|
379 |
_(b"error parsing releasenotes for revision: |
|
|
379 | _(b"error parsing releasenotes for revision: '%s'\n") | |
|
380 | 380 | % node.hex(ctx.node()) |
|
381 | 381 | ) |
|
382 | 382 | if title: |
@@ -422,7 +422,7 b' def parsereleasenotesfile(sections, text' | |||
|
422 | 422 | continue |
|
423 | 423 | elif block[b'type'] != b'paragraph': |
|
424 | 424 | raise error.Abort( |
|
425 |
_(b'unexpected block type in release notes: |
|
|
425 | _(b'unexpected block type in release notes: %s') | |
|
426 | 426 | % block[b'type'] |
|
427 | 427 | ) |
|
428 | 428 | if title: |
@@ -273,7 +273,7 b' def uisetup(ui):' | |||
|
273 | 273 | b'', |
|
274 | 274 | b'shallow', |
|
275 | 275 | None, |
|
276 |
_(b"create a shallow clone which uses remote file |
|
|
276 | _(b"create a shallow clone which uses remote file history"), | |
|
277 | 277 | ) |
|
278 | 278 | ) |
|
279 | 279 | |
@@ -796,8 +796,7 b' def walkfilerevs(orig, repo, match, foll' | |||
|
796 | 796 | for filename in match.files(): |
|
797 | 797 | if filename not in pctx: |
|
798 | 798 | raise error.Abort( |
|
799 |
_(b'cannot follow file not in parent |
|
|
800 | % filename | |
|
799 | _(b'cannot follow file not in parent revision: "%s"') % filename | |
|
801 | 800 | ) |
|
802 | 801 | fctx = pctx[filename] |
|
803 | 802 |
@@ -137,7 +137,7 b' class unioncontentstore(basestore.baseun' | |||
|
137 | 137 | |
|
138 | 138 | def add(self, name, node, data): |
|
139 | 139 | raise RuntimeError( |
|
140 |
b"cannot add content only to remotefilelog |
|
|
140 | b"cannot add content only to remotefilelog contentstore" | |
|
141 | 141 | ) |
|
142 | 142 | |
|
143 | 143 | def getmissing(self, keys): |
@@ -209,7 +209,7 b' class remotefilelogcontentstore(basestor' | |||
|
209 | 209 | |
|
210 | 210 | def add(self, name, node, data): |
|
211 | 211 | raise RuntimeError( |
|
212 |
b"cannot add content only to remotefilelog |
|
|
212 | b"cannot add content only to remotefilelog contentstore" | |
|
213 | 213 | ) |
|
214 | 214 | |
|
215 | 215 | def _sanitizemetacache(self): |
@@ -414,7 +414,7 b' class fileserverclient(object):' | |||
|
414 | 414 | ): |
|
415 | 415 | if not isinstance(remote, _sshv1peer): |
|
416 | 416 | raise error.Abort( |
|
417 |
b'remotefilelog requires ssh |
|
|
417 | b'remotefilelog requires ssh servers' | |
|
418 | 418 | ) |
|
419 | 419 | step = self.ui.configint( |
|
420 | 420 | b'remotefilelog', b'getfilesstep' |
@@ -99,7 +99,7 b' class unionmetadatastore(basestore.baseu' | |||
|
99 | 99 | |
|
100 | 100 | def add(self, name, node, data): |
|
101 | 101 | raise RuntimeError( |
|
102 |
b"cannot add content only to remotefilelog |
|
|
102 | b"cannot add content only to remotefilelog contentstore" | |
|
103 | 103 | ) |
|
104 | 104 | |
|
105 | 105 | def getmissing(self, keys): |
@@ -136,7 +136,7 b' class remotefilelogmetadatastore(basesto' | |||
|
136 | 136 | |
|
137 | 137 | def add(self, name, node, parents, linknode): |
|
138 | 138 | raise RuntimeError( |
|
139 |
b"cannot add metadata only to remotefilelog |
|
|
139 | b"cannot add metadata only to remotefilelog metadatastore" | |
|
140 | 140 | ) |
|
141 | 141 | |
|
142 | 142 |
@@ -194,7 +194,7 b' def onetimesetup(ui):' | |||
|
194 | 194 | # since it would require fetching every version of every |
|
195 | 195 | # file in order to create the revlogs. |
|
196 | 196 | raise error.Abort( |
|
197 |
_(b"Cannot clone from a shallow repo |
|
|
197 | _(b"Cannot clone from a shallow repo to a full repo.") | |
|
198 | 198 | ) |
|
199 | 199 | else: |
|
200 | 200 | for x in orig(repo, matcher): |
@@ -55,7 +55,7 b' def getcachepath(ui, allowempty=False):' | |||
|
55 | 55 | return None |
|
56 | 56 | else: |
|
57 | 57 | raise error.Abort( |
|
58 |
_(b"could not find config option |
|
|
58 | _(b"could not find config option remotefilelog.cachepath") | |
|
59 | 59 | ) |
|
60 | 60 | return util.expandpath(cachepath) |
|
61 | 61 | |
@@ -427,7 +427,7 b' def readexactly(stream, n):' | |||
|
427 | 427 | s = stream.read(n) |
|
428 | 428 | if len(s) < n: |
|
429 | 429 | raise error.Abort( |
|
430 |
_(b"stream ended unexpectedly |
|
|
430 | _(b"stream ended unexpectedly (got %d bytes, expected %d)") | |
|
431 | 431 | % (len(s), n) |
|
432 | 432 | ) |
|
433 | 433 | return s |
@@ -70,7 +70,7 b" testedwith = b'ships-with-hg-core'" | |||
|
70 | 70 | b'', |
|
71 | 71 | b'relative', |
|
72 | 72 | None, |
|
73 |
_(b'point to source using a relative path |
|
|
73 | _(b'point to source using a relative path (EXPERIMENTAL)'), | |
|
74 | 74 | ), |
|
75 | 75 | ], |
|
76 | 76 | _(b'[-U] [-B] SOURCE [DEST]'), |
@@ -108,7 +108,7 b' def split(ui, repo, *revs, **opts):' | |||
|
108 | 108 | ) |
|
109 | 109 | if not alloworphaned and len(torebase) != len(descendants): |
|
110 | 110 | raise error.Abort( |
|
111 |
_(b'split would leave orphaned changesets |
|
|
111 | _(b'split would leave orphaned changesets behind') | |
|
112 | 112 | ) |
|
113 | 113 | else: |
|
114 | 114 | if not alloworphaned and descendants: |
@@ -164,7 +164,7 b' def dosplit(ui, repo, tr, ctx, opts):' | |||
|
164 | 164 | firstline = c.description().split(b'\n', 1)[0] |
|
165 | 165 | header += _(b'HG: - %s: %s\n') % (short(c.node()), firstline) |
|
166 | 166 | header += _( |
|
167 |
b'HG: Write commit message for the next split |
|
|
167 | b'HG: Write commit message for the next split changeset.\n' | |
|
168 | 168 | ) |
|
169 | 169 | else: |
|
170 | 170 | header = _( |
@@ -358,7 +358,7 b' class sqlitefilestore(object):' | |||
|
358 | 358 | |
|
359 | 359 | if i != rev: |
|
360 | 360 | raise SQLiteStoreError( |
|
361 |
_(b'sqlite database has inconsistent |
|
|
361 | _(b'sqlite database has inconsistent revision numbers') | |
|
362 | 362 | ) |
|
363 | 363 | |
|
364 | 364 | if p1rev == nullrev: |
@@ -772,7 +772,7 b' class sqlitefilestore(object):' | |||
|
772 | 772 | # SQLite, since columns can be resized at will. |
|
773 | 773 | if len(tombstone) > len(self.rawdata(censornode)): |
|
774 | 774 | raise error.Abort( |
|
775 |
_(b'censor tombstone must be no longer than |
|
|
775 | _(b'censor tombstone must be no longer than censored data') | |
|
776 | 776 | ) |
|
777 | 777 | |
|
778 | 778 | # We need to replace the censored revision's data with the tombstone. |
@@ -1161,7 +1161,7 b' def newreporequirements(orig, ui, create' | |||
|
1161 | 1161 | # This restriction can be lifted once we have more confidence. |
|
1162 | 1162 | if b'sharedrepo' in createopts: |
|
1163 | 1163 | raise error.Abort( |
|
1164 |
_(b'shared repositories not supported with SQLite |
|
|
1164 | _(b'shared repositories not supported with SQLite store') | |
|
1165 | 1165 | ) |
|
1166 | 1166 | |
|
1167 | 1167 | # This filtering is out of an abundance of caution: we want to ensure |
@@ -1176,7 +1176,7 b' def newreporequirements(orig, ui, create' | |||
|
1176 | 1176 | unsupported = set(createopts) - known |
|
1177 | 1177 | if unsupported: |
|
1178 | 1178 | raise error.Abort( |
|
1179 |
_(b'SQLite store does not support repo creation |
|
|
1179 | _(b'SQLite store does not support repo creation option: %s') | |
|
1180 | 1180 | % b', '.join(sorted(unsupported)) |
|
1181 | 1181 | ) |
|
1182 | 1182 |
@@ -124,24 +124,19 b' def strip(' | |||
|
124 | 124 | ), |
|
125 | 125 | ), |
|
126 | 126 | (b'', b'no-backup', None, _(b'do not save backup bundle')), |
|
127 | ( | |
|
128 | b'', | |
|
129 | b'nobackup', | |
|
130 | None, | |
|
131 | _(b'do not save backup bundle ' b'(DEPRECATED)'), | |
|
132 | ), | |
|
127 | (b'', b'nobackup', None, _(b'do not save backup bundle (DEPRECATED)'),), | |
|
133 | 128 | (b'n', b'', None, _(b'ignored (DEPRECATED)')), |
|
134 | 129 | ( |
|
135 | 130 | b'k', |
|
136 | 131 | b'keep', |
|
137 | 132 | None, |
|
138 |
_(b"do not modify working directory during |
|
|
133 | _(b"do not modify working directory during strip"), | |
|
139 | 134 | ), |
|
140 | 135 | ( |
|
141 | 136 | b'B', |
|
142 | 137 | b'bookmark', |
|
143 | 138 | [], |
|
144 |
_(b"remove revs only reachable from given |
|
|
139 | _(b"remove revs only reachable from given bookmark"), | |
|
145 | 140 | _(b'BOOKMARK'), |
|
146 | 141 | ), |
|
147 | 142 | ( |
@@ -438,7 +438,7 b' class transplanter(object):' | |||
|
438 | 438 | p1 = repo.dirstate.p1() |
|
439 | 439 | if p1 != parent: |
|
440 | 440 | raise error.Abort( |
|
441 |
_(b'working directory not at transplant |
|
|
441 | _(b'working directory not at transplant parent %s') | |
|
442 | 442 | % nodemod.hex(parent) |
|
443 | 443 | ) |
|
444 | 444 | if merge: |
@@ -661,7 +661,7 b' def browserevs(ui, repo, nodes, opts):' | |||
|
661 | 661 | b'c', |
|
662 | 662 | b'continue', |
|
663 | 663 | None, |
|
664 |
_(b'continue last transplant session |
|
|
664 | _(b'continue last transplant session after fixing conflicts'), | |
|
665 | 665 | ), |
|
666 | 666 | ( |
|
667 | 667 | b'', |
@@ -788,7 +788,7 b' def _dotransplant(ui, repo, *revs, **opt' | |||
|
788 | 788 | raise error.Abort(_(b'--all requires a branch revision')) |
|
789 | 789 | if revs: |
|
790 | 790 | raise error.Abort( |
|
791 |
_(b'--all is incompatible with a |
|
|
791 | _(b'--all is incompatible with a revision list') | |
|
792 | 792 | ) |
|
793 | 793 | |
|
794 | 794 | opts = pycompat.byteskwargs(opts) |
@@ -165,7 +165,7 b' def uncommit(ui, repo, *pats, **opts):' | |||
|
165 | 165 | if not allowdirtywcopy and (not pats or isdirtypath): |
|
166 | 166 | cmdutil.bailifchanged( |
|
167 | 167 | repo, |
|
168 |
hint=_(b'requires |
|
|
168 | hint=_(b'requires --allow-dirty-working-copy to uncommit'), | |
|
169 | 169 | ) |
|
170 | 170 | old = repo[b'.'] |
|
171 | 171 | rewriteutil.precheck(repo, [old.rev()], b'uncommit') |
@@ -190,7 +190,7 b' def uncommit(ui, repo, *pats, **opts):' | |||
|
190 | 190 | for f in sorted(badfiles): |
|
191 | 191 | if f in s.clean: |
|
192 | 192 | hint = _( |
|
193 |
b"file was not changed in working directory |
|
|
193 | b"file was not changed in working directory parent" | |
|
194 | 194 | ) |
|
195 | 195 | elif repo.wvfs.exists(f): |
|
196 | 196 | hint = _(b"file was untracked in working directory parent") |
@@ -128,7 +128,7 b' def basewrapper(func, argtype, enc, dec,' | |||
|
128 | 128 | return enc(func(*dec(args), **dec(kwds))) |
|
129 | 129 | except UnicodeError: |
|
130 | 130 | raise error.Abort( |
|
131 |
_(b"[win32mbcs] filename conversion failed with |
|
|
131 | _(b"[win32mbcs] filename conversion failed with %s encoding\n") | |
|
132 | 132 | % _encoding |
|
133 | 133 | ) |
|
134 | 134 |
@@ -302,7 +302,7 b' class bmstore(object):' | |||
|
302 | 302 | ) |
|
303 | 303 | return delbms |
|
304 | 304 | raise error.Abort( |
|
305 |
_(b"bookmark '%s' already exists |
|
|
305 | _(b"bookmark '%s' already exists (use -f to force)") % mark | |
|
306 | 306 | ) |
|
307 | 307 | if ( |
|
308 | 308 | mark in self._repo.branchmap() |
@@ -893,7 +893,7 b' def checkformat(repo, mark):' | |||
|
893 | 893 | mark = mark.strip() |
|
894 | 894 | if not mark: |
|
895 | 895 | raise error.Abort( |
|
896 |
_(b"bookmark names cannot consist entirely of |
|
|
896 | _(b"bookmark names cannot consist entirely of whitespace") | |
|
897 | 897 | ) |
|
898 | 898 | scmutil.checknewlabel(repo, mark, b'bookmark') |
|
899 | 899 | return mark |
@@ -1233,7 +1233,7 b' class interrupthandler(unpackermixin):' | |||
|
1233 | 1233 | def __call__(self): |
|
1234 | 1234 | |
|
1235 | 1235 | self.ui.debug( |
|
1236 |
b'bundle2-input-stream-interrupt: |
|
|
1236 | b'bundle2-input-stream-interrupt: opening out of band context\n' | |
|
1237 | 1237 | ) |
|
1238 | 1238 | indebug(self.ui, b'bundle2 stream interruption, looking for a part.') |
|
1239 | 1239 | headerblock = self._readpartheader() |
@@ -1252,7 +1252,7 b' class interrupthandler(unpackermixin):' | |||
|
1252 | 1252 | if not hardabort: |
|
1253 | 1253 | part.consume() |
|
1254 | 1254 | self.ui.debug( |
|
1255 |
b'bundle2-input-stream-interrupt: |
|
|
1255 | b'bundle2-input-stream-interrupt: closing out of band context\n' | |
|
1256 | 1256 | ) |
|
1257 | 1257 | |
|
1258 | 1258 | |
@@ -1320,7 +1320,7 b' def decodepayloadchunks(ui, fh):' | |||
|
1320 | 1320 | s = read(headersize) |
|
1321 | 1321 | if len(s) < headersize: |
|
1322 | 1322 | raise error.Abort( |
|
1323 |
_(b'stream ended unexpectedly |
|
|
1323 | _(b'stream ended unexpectedly (got %d bytes, expected %d)') | |
|
1324 | 1324 | % (len(s), chunksize) |
|
1325 | 1325 | ) |
|
1326 | 1326 | |
@@ -1889,7 +1889,7 b' def writebundle(' | |||
|
1889 | 1889 | assert compression is None |
|
1890 | 1890 | if cg.version != b'01': |
|
1891 | 1891 | raise error.Abort( |
|
1892 |
_(b'old bundle types only supports v1 |
|
|
1892 | _(b'old bundle types only supports v1 changegroups') | |
|
1893 | 1893 | ) |
|
1894 | 1894 | header, comp = bundletypes[bundletype] |
|
1895 | 1895 | if comp not in util.compengines.supportedbundletypes: |
@@ -2136,7 +2136,7 b' def handlecheckheads(op, inpart):' | |||
|
2136 | 2136 | op.gettransaction() |
|
2137 | 2137 | if sorted(heads) != sorted(op.repo.heads()): |
|
2138 | 2138 | raise error.PushRaced( |
|
2139 |
b'remote repository changed while pushing - |
|
|
2139 | b'remote repository changed while pushing - please try again' | |
|
2140 | 2140 | ) |
|
2141 | 2141 | |
|
2142 | 2142 |
@@ -269,7 +269,7 b' class bundlerepository(object):' | |||
|
269 | 269 | if part.type == b'changegroup': |
|
270 | 270 | if cgpart: |
|
271 | 271 | raise NotImplementedError( |
|
272 |
b"can't process |
|
|
272 | b"can't process multiple changegroups" | |
|
273 | 273 | ) |
|
274 | 274 | cgpart = part |
|
275 | 275 |
@@ -1551,7 +1551,7 b' def getbundler(' | |||
|
1551 | 1551 | |
|
1552 | 1552 | if version == b'01' and not matcher.always(): |
|
1553 | 1553 | raise error.ProgrammingError( |
|
1554 |
b'version 01 changegroups do not support |
|
|
1554 | b'version 01 changegroups do not support sparse file matchers' | |
|
1555 | 1555 | ) |
|
1556 | 1556 | |
|
1557 | 1557 | if ellipses and version in (b'01', b'02'): |
@@ -257,13 +257,9 b' def resolvecommitoptions(ui, opts):' | |||
|
257 | 257 | the ``date`` option is set. |
|
258 | 258 | """ |
|
259 | 259 | if opts.get(b'date') and opts.get(b'currentdate'): |
|
260 | raise error.Abort( | |
|
261 | _(b'--date and --currentdate are mutually ' b'exclusive') | |
|
262 | ) | |
|
260 | raise error.Abort(_(b'--date and --currentdate are mutually exclusive')) | |
|
263 | 261 | if opts.get(b'user') and opts.get(b'currentuser'): |
|
264 | raise error.Abort( | |
|
265 | _(b'--user and --currentuser are mutually ' b'exclusive') | |
|
266 | ) | |
|
262 | raise error.Abort(_(b'--user and --currentuser are mutually exclusive')) | |
|
267 | 263 | |
|
268 | 264 | datemaydiffer = False # date-only change should be ignored? |
|
269 | 265 | |
@@ -1010,7 +1006,7 b' def logmessage(ui, opts):' | |||
|
1010 | 1006 | |
|
1011 | 1007 | if message and logfile: |
|
1012 | 1008 | raise error.Abort( |
|
1013 |
_(b'options --message and --logfile are mutually |
|
|
1009 | _(b'options --message and --logfile are mutually exclusive') | |
|
1014 | 1010 | ) |
|
1015 | 1011 | if not message and logfile: |
|
1016 | 1012 | try: |
@@ -1180,7 +1176,7 b' def _buildfntemplate(pat, total=None, se' | |||
|
1180 | 1176 | newname.append(stringutil.escapestr(pat[i:n])) |
|
1181 | 1177 | if n + 2 > end: |
|
1182 | 1178 | raise error.Abort( |
|
1183 |
_(b"incomplete format spec in output |
|
|
1179 | _(b"incomplete format spec in output filename") | |
|
1184 | 1180 | ) |
|
1185 | 1181 | c = pat[n + 1 : n + 2] |
|
1186 | 1182 | i = n + 2 |
@@ -1188,7 +1184,7 b' def _buildfntemplate(pat, total=None, se' | |||
|
1188 | 1184 | newname.append(expander[c]) |
|
1189 | 1185 | except KeyError: |
|
1190 | 1186 | raise error.Abort( |
|
1191 |
_(b"invalid format spec '%%%s' in output |
|
|
1187 | _(b"invalid format spec '%%%s' in output filename") % c | |
|
1192 | 1188 | ) |
|
1193 | 1189 | return b''.join(newname) |
|
1194 | 1190 | |
@@ -2264,7 +2260,7 b' def walkchangerevs(repo, match, opts, pr' | |||
|
2264 | 2260 | |
|
2265 | 2261 | if follow: |
|
2266 | 2262 | raise error.Abort( |
|
2267 |
_(b'can only follow copies/renames for explicit |
|
|
2263 | _(b'can only follow copies/renames for explicit filenames') | |
|
2268 | 2264 | ) |
|
2269 | 2265 | |
|
2270 | 2266 | # The slow path checks files modified in every changeset. |
@@ -830,7 +830,7 b' def _dobackout(ui, repo, node=None, rev=' | |||
|
830 | 830 | hg._showstats(repo, stats) |
|
831 | 831 | if stats.unresolvedcount: |
|
832 | 832 | repo.ui.status( |
|
833 |
_(b"use 'hg resolve' to retry unresolved |
|
|
833 | _(b"use 'hg resolve' to retry unresolved file merges\n") | |
|
834 | 834 | ) |
|
835 | 835 | return 1 |
|
836 | 836 | else: |
@@ -839,7 +839,7 b' def _dobackout(ui, repo, node=None, rev=' | |||
|
839 | 839 | cmdutil.revert(ui, repo, rctx, repo.dirstate.parents()) |
|
840 | 840 | |
|
841 | 841 | if opts.get(b'no_commit'): |
|
842 |
msg = _(b"changeset %s backed out, |
|
|
842 | msg = _(b"changeset %s backed out, don't forget to commit.\n") | |
|
843 | 843 | ui.status(msg % short(node)) |
|
844 | 844 | return 0 |
|
845 | 845 | |
@@ -1353,7 +1353,7 b' def branch(ui, repo, label=None, **opts)' | |||
|
1353 | 1353 | if not opts.get(b'force') and label in repo.branchmap(): |
|
1354 | 1354 | if label not in [p.branch() for p in repo[None].parents()]: |
|
1355 | 1355 | raise error.Abort( |
|
1356 |
_(b'a branch of the same name already |
|
|
1356 | _(b'a branch of the same name already exists'), | |
|
1357 | 1357 | # i18n: "it" refers to an existing branch |
|
1358 | 1358 | hint=_(b"use 'hg update' to switch to it"), |
|
1359 | 1359 | ) |
@@ -1562,9 +1562,7 b' def bundle(ui, repo, fname, dest=None, *' | |||
|
1562 | 1562 | except error.UnsupportedBundleSpecification as e: |
|
1563 | 1563 | raise error.Abort( |
|
1564 | 1564 | pycompat.bytestr(e), |
|
1565 | hint=_( | |
|
1566 | b"see 'hg help bundlespec' for supported " b"values for --type" | |
|
1567 | ), | |
|
1565 | hint=_(b"see 'hg help bundlespec' for supported values for --type"), | |
|
1568 | 1566 | ) |
|
1569 | 1567 | cgversion = bundlespec.contentopts[b"cg.version"] |
|
1570 | 1568 | |
@@ -1578,7 +1576,7 b' def bundle(ui, repo, fname, dest=None, *' | |||
|
1578 | 1576 | if opts.get(b'all'): |
|
1579 | 1577 | if dest: |
|
1580 | 1578 | raise error.Abort( |
|
1581 |
_(b"--all is incompatible with specifying |
|
|
1579 | _(b"--all is incompatible with specifying a destination") | |
|
1582 | 1580 | ) |
|
1583 | 1581 | if opts.get(b'base'): |
|
1584 | 1582 | ui.warn(_(b"ignoring --base because --all was specified\n")) |
@@ -1593,7 +1591,7 b' def bundle(ui, repo, fname, dest=None, *' | |||
|
1593 | 1591 | if base: |
|
1594 | 1592 | if dest: |
|
1595 | 1593 | raise error.Abort( |
|
1596 |
_(b"--base is incompatible with specifying |
|
|
1594 | _(b"--base is incompatible with specifying a destination") | |
|
1597 | 1595 | ) |
|
1598 | 1596 | common = [repo[rev].node() for rev in base] |
|
1599 | 1597 | heads = [repo[r].node() for r in revs] if revs else None |
@@ -2048,7 +2046,7 b' def _docommit(ui, repo, *pats, **opts):' | |||
|
2048 | 2046 | |
|
2049 | 2047 | if repo[b'.'].closesbranch(): |
|
2050 | 2048 | raise error.Abort( |
|
2051 |
_(b'current revision is already a branch closing |
|
|
2049 | _(b'current revision is already a branch closing head') | |
|
2052 | 2050 | ) |
|
2053 | 2051 | elif not bheads: |
|
2054 | 2052 | raise error.Abort(_(b'branch "%s" has no heads to close') % branch) |
@@ -2298,7 +2296,7 b' def continuecmd(ui, repo, **opts):' | |||
|
2298 | 2296 | if not contstate.continuefunc: |
|
2299 | 2297 | raise error.Abort( |
|
2300 | 2298 | ( |
|
2301 |
_(b"%s in progress but does not support |
|
|
2299 | _(b"%s in progress but does not support 'hg continue'") | |
|
2302 | 2300 | % (contstate._opname) |
|
2303 | 2301 | ), |
|
2304 | 2302 | hint=contstate.continuemsg(), |
@@ -2987,19 +2985,19 b' def _dograft(ui, repo, *revs, **opts):' | |||
|
2987 | 2985 | if opts.get(b'no_commit'): |
|
2988 | 2986 | if opts.get(b'edit'): |
|
2989 | 2987 | raise error.Abort( |
|
2990 |
_(b"cannot specify --no-commit and |
|
|
2988 | _(b"cannot specify --no-commit and --edit together") | |
|
2991 | 2989 | ) |
|
2992 | 2990 | if opts.get(b'currentuser'): |
|
2993 | 2991 | raise error.Abort( |
|
2994 |
_(b"cannot specify --no-commit and |
|
|
2992 | _(b"cannot specify --no-commit and --currentuser together") | |
|
2995 | 2993 | ) |
|
2996 | 2994 | if opts.get(b'currentdate'): |
|
2997 | 2995 | raise error.Abort( |
|
2998 |
_(b"cannot specify --no-commit and |
|
|
2996 | _(b"cannot specify --no-commit and --currentdate together") | |
|
2999 | 2997 | ) |
|
3000 | 2998 | if opts.get(b'log'): |
|
3001 | 2999 | raise error.Abort( |
|
3002 |
_(b"cannot specify --no-commit and |
|
|
3000 | _(b"cannot specify --no-commit and --log together") | |
|
3003 | 3001 | ) |
|
3004 | 3002 | |
|
3005 | 3003 | graftstate = statemod.cmdstate(repo, b'graftstate') |
@@ -3007,7 +3005,7 b' def _dograft(ui, repo, *revs, **opts):' | |||
|
3007 | 3005 | if opts.get(b'stop'): |
|
3008 | 3006 | if opts.get(b'continue'): |
|
3009 | 3007 | raise error.Abort( |
|
3010 |
_(b"cannot use '--continue' and |
|
|
3008 | _(b"cannot use '--continue' and '--stop' together") | |
|
3011 | 3009 | ) |
|
3012 | 3010 | if opts.get(b'abort'): |
|
3013 | 3011 | raise error.Abort(_(b"cannot use '--abort' and '--stop' together")) |
@@ -3028,7 +3026,7 b' def _dograft(ui, repo, *revs, **opts):' | |||
|
3028 | 3026 | elif opts.get(b'abort'): |
|
3029 | 3027 | if opts.get(b'continue'): |
|
3030 | 3028 | raise error.Abort( |
|
3031 |
_(b"cannot use '--continue' and |
|
|
3029 | _(b"cannot use '--continue' and '--abort' together") | |
|
3032 | 3030 | ) |
|
3033 | 3031 | if any( |
|
3034 | 3032 | ( |
@@ -3866,7 +3864,7 b' def identify(' | |||
|
3866 | 3864 | opts = pycompat.byteskwargs(opts) |
|
3867 | 3865 | if not repo and not source: |
|
3868 | 3866 | raise error.Abort( |
|
3869 |
_(b"there is no Mercurial repository here |
|
|
3867 | _(b"there is no Mercurial repository here (.hg not found)") | |
|
3870 | 3868 | ) |
|
3871 | 3869 | |
|
3872 | 3870 | default = not (num or id or branch or tags or bookmarks) |
@@ -4904,7 +4902,7 b' statemod.addunfinished(' | |||
|
4904 | 4902 | cmdmsg=_(b'outstanding uncommitted merge'), |
|
4905 | 4903 | abortfunc=hg.abortmerge, |
|
4906 | 4904 | statushint=_( |
|
4907 |
b'To continue: hg commit\n |
|
|
4905 | b'To continue: hg commit\nTo abort: hg merge --abort' | |
|
4908 | 4906 | ), |
|
4909 | 4907 | cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"), |
|
4910 | 4908 | ) |
@@ -5305,7 +5303,7 b' def postincoming(ui, repo, modheads, opt' | |||
|
5305 | 5303 | ) |
|
5306 | 5304 | elif currentbranchheads > 1: |
|
5307 | 5305 | ui.status( |
|
5308 |
_(b"(run 'hg heads .' to see heads, 'hg merge' to |
|
|
5306 | _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n") | |
|
5309 | 5307 | ) |
|
5310 | 5308 | else: |
|
5311 | 5309 | ui.status(_(b"(run 'hg heads' to see heads)\n")) |
@@ -5615,7 +5613,7 b' def push(ui, repo, dest=None, **opts):' | |||
|
5615 | 5613 | revs = [repo[rev].node() for rev in revs] |
|
5616 | 5614 | if not revs: |
|
5617 | 5615 | raise error.Abort( |
|
5618 |
_(b'default push revset for path evaluates to an |
|
|
5616 | _(b'default push revset for path evaluates to an empty set') | |
|
5619 | 5617 | ) |
|
5620 | 5618 | |
|
5621 | 5619 | repo._subtoppath = dest |
@@ -5882,7 +5880,7 b' def resolve(ui, repo, *pats, **opts):' | |||
|
5882 | 5880 | if confirm: |
|
5883 | 5881 | if all: |
|
5884 | 5882 | if ui.promptchoice( |
|
5885 |
_(b're-merge all unresolved files (yn)? |
|
|
5883 | _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No') | |
|
5886 | 5884 | ): |
|
5887 | 5885 | raise error.Abort(_(b'user quit')) |
|
5888 | 5886 | if mark and not pats: |
@@ -6474,7 +6472,7 b' def serve(ui, repo, **opts):' | |||
|
6474 | 6472 | if opts[b"stdio"]: |
|
6475 | 6473 | if repo is None: |
|
6476 | 6474 | raise error.RepoError( |
|
6477 |
_(b"there is no Mercurial repository here |
|
|
6475 | _(b"there is no Mercurial repository here (.hg not found)") | |
|
6478 | 6476 | ) |
|
6479 | 6477 | s = wireprotoserver.sshserver(ui, repo) |
|
6480 | 6478 | s.serve_forever() |
@@ -7234,7 +7232,7 b' def tag(ui, repo, name1, *names, **opts)' | |||
|
7234 | 7232 | scmutil.checknewlabel(repo, n, b'tag') |
|
7235 | 7233 | if not n: |
|
7236 | 7234 | raise error.Abort( |
|
7237 |
_(b'tag names cannot consist entirely of |
|
|
7235 | _(b'tag names cannot consist entirely of whitespace') | |
|
7238 | 7236 | ) |
|
7239 | 7237 | if opts.get(b'rev') and opts.get(b'remove'): |
|
7240 | 7238 | raise error.Abort(_(b"--rev and --remove are incompatible")) |
@@ -7269,7 +7267,7 b' def tag(ui, repo, name1, *names, **opts)' | |||
|
7269 | 7267 | for n in names: |
|
7270 | 7268 | if n in repo.tags(): |
|
7271 | 7269 | raise error.Abort( |
|
7272 |
_(b"tag '%s' already exists |
|
|
7270 | _(b"tag '%s' already exists (use -f to force)") % n | |
|
7273 | 7271 | ) |
|
7274 | 7272 | if not opts.get(b'local'): |
|
7275 | 7273 | p1, p2 = repo.dirstate.parents() |
@@ -1619,7 +1619,7 b' class workingctx(committablectx):' | |||
|
1619 | 1619 | return |
|
1620 | 1620 | if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): |
|
1621 | 1621 | self._repo.ui.warn( |
|
1622 |
_(b"copy failed: %s is not a file or a |
|
|
1622 | _(b"copy failed: %s is not a file or a symbolic link\n") | |
|
1623 | 1623 | % self._repo.dirstate.pathto(dest) |
|
1624 | 1624 | ) |
|
1625 | 1625 | else: |
@@ -1678,7 +1678,7 b' class workingctx(committablectx):' | |||
|
1678 | 1678 | or stringutil.binary(d) |
|
1679 | 1679 | ): |
|
1680 | 1680 | self._repo.ui.debug( |
|
1681 |
b'ignoring suspect symlink placeholder |
|
|
1681 | b'ignoring suspect symlink placeholder "%s"\n' % f | |
|
1682 | 1682 | ) |
|
1683 | 1683 | continue |
|
1684 | 1684 | sane.append(f) |
@@ -1750,7 +1750,7 b' class workingctx(committablectx):' | |||
|
1750 | 1750 | # already changed simultaneously after last |
|
1751 | 1751 | # caching (see also issue5584 for detail) |
|
1752 | 1752 | self._repo.ui.debug( |
|
1753 |
b'skip updating dirstate: |
|
|
1753 | b'skip updating dirstate: identity mismatch\n' | |
|
1754 | 1754 | ) |
|
1755 | 1755 | except error.LockError: |
|
1756 | 1756 | pass |
@@ -677,7 +677,7 b' def _fullcopytracing(repo, c1, c2, base)' | |||
|
677 | 677 | if df not in copy: |
|
678 | 678 | movewithdir[f] = df |
|
679 | 679 | repo.ui.debug( |
|
680 |
|
|
|
680 | b" pending file src: '%s' -> dst: '%s'\n" | |
|
681 | 681 | % (f, df) |
|
682 | 682 | ) |
|
683 | 683 | break |
@@ -275,7 +275,7 b' def parsedag(desc):' | |||
|
275 | 275 | i += 1 |
|
276 | 276 | c = nextch() |
|
277 | 277 | raise error.Abort( |
|
278 |
_(b'invalid character in dag description: |
|
|
278 | _(b'invalid character in dag description: %s...') % s | |
|
279 | 279 | ) |
|
280 | 280 | |
|
281 | 281 | |
@@ -381,7 +381,7 b' def dagtextlines(' | |||
|
381 | 381 | yield b'\n' |
|
382 | 382 | else: |
|
383 | 383 | raise error.Abort( |
|
384 |
_(b"invalid event type in dag: |
|
|
384 | _(b"invalid event type in dag: ('%s', '%s')") | |
|
385 | 385 | % ( |
|
386 | 386 | stringutil.escapestr(kind), |
|
387 | 387 | stringutil.escapestr(data), |
@@ -109,7 +109,7 b' def debugancestor(ui, repo, *args):' | |||
|
109 | 109 | elif len(args) == 2: |
|
110 | 110 | if not repo: |
|
111 | 111 | raise error.Abort( |
|
112 |
_(b'there is no Mercurial repository here |
|
|
112 | _(b'there is no Mercurial repository here (.hg not found)') | |
|
113 | 113 | ) |
|
114 | 114 | rev1, rev2 = args |
|
115 | 115 | r = repo.changelog |
@@ -1464,7 +1464,7 b' def debuginstall(ui, **opts):' | |||
|
1464 | 1464 | fm.condwrite( |
|
1465 | 1465 | err, |
|
1466 | 1466 | b'encodingerror', |
|
1467 |
_(b" %s\n |
|
|
1467 | _(b" %s\n (check that your locale is properly set)\n"), | |
|
1468 | 1468 | err, |
|
1469 | 1469 | ) |
|
1470 | 1470 | |
@@ -1577,7 +1577,7 b' def debuginstall(ui, **opts):' | |||
|
1577 | 1577 | ) |
|
1578 | 1578 | fm.write( |
|
1579 | 1579 | b'compenginesavail', |
|
1580 |
_(b'checking available compression engines |
|
|
1580 | _(b'checking available compression engines (%s)\n'), | |
|
1581 | 1581 | fm.formatlist( |
|
1582 | 1582 | sorted(e.name() for e in compengines if e.available()), |
|
1583 | 1583 | name=b'compengine', |
@@ -1701,7 +1701,7 b' def debuginstall(ui, **opts):' | |||
|
1701 | 1701 | fm.condwrite( |
|
1702 | 1702 | problems, |
|
1703 | 1703 | b'problems', |
|
1704 |
_(b"%d problems detected, |
|
|
1704 | _(b"%d problems detected, please check your install!\n"), | |
|
1705 | 1705 | problems, |
|
1706 | 1706 | ) |
|
1707 | 1707 | fm.end() |
@@ -2071,7 +2071,7 b' def debugnamecomplete(ui, repo, *args):' | |||
|
2071 | 2071 | b'', |
|
2072 | 2072 | b'exclusive', |
|
2073 | 2073 | False, |
|
2074 |
_(b'restrict display to markers only |
|
|
2074 | _(b'restrict display to markers only relevant to REV'), | |
|
2075 | 2075 | ), |
|
2076 | 2076 | (b'', b'index', False, _(b'display index of the marker')), |
|
2077 | 2077 | (b'', b'delete', [], _(b'delete markers specified by indices')), |
@@ -2115,7 +2115,7 b' def debugobsolete(ui, repo, precursor=No' | |||
|
2115 | 2115 | |
|
2116 | 2116 | if repo.currenttransaction(): |
|
2117 | 2117 | raise error.Abort( |
|
2118 |
_(b'cannot delete obsmarkers in the middle |
|
|
2118 | _(b'cannot delete obsmarkers in the middle of transaction.') | |
|
2119 | 2119 | ) |
|
2120 | 2120 | |
|
2121 | 2121 | with repo.lock(): |
@@ -2949,7 +2949,7 b' def debugrevlogindex(ui, repo, file_=Non' | |||
|
2949 | 2949 | if format == 0: |
|
2950 | 2950 | if ui.verbose: |
|
2951 | 2951 | ui.writenoi18n( |
|
2952 |
|
|
|
2952 | b" rev offset length linkrev %s %s p2\n" | |
|
2953 | 2953 | % (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) |
|
2954 | 2954 | ) |
|
2955 | 2955 | else: |
@@ -3096,7 +3096,7 b' def debugrevspec(ui, repo, expr, **opts)' | |||
|
3096 | 3096 | stages = stages[:-1] |
|
3097 | 3097 | if opts[b'verify_optimized'] and opts[b'no_optimized']: |
|
3098 | 3098 | raise error.Abort( |
|
3099 |
_(b'cannot use --verify-optimized with |
|
|
3099 | _(b'cannot use --verify-optimized with --no-optimized') | |
|
3100 | 3100 | ) |
|
3101 | 3101 | stagenames = set(n for n, f in stages) |
|
3102 | 3102 | |
@@ -3276,7 +3276,7 b' def debugssl(ui, repo, source=None, **op' | |||
|
3276 | 3276 | ''' |
|
3277 | 3277 | if not pycompat.iswindows: |
|
3278 | 3278 | raise error.Abort( |
|
3279 |
_(b'certificate chain building is only possible on |
|
|
3279 | _(b'certificate chain building is only possible on Windows') | |
|
3280 | 3280 | ) |
|
3281 | 3281 | |
|
3282 | 3282 | if not source: |
@@ -3422,7 +3422,7 b' def debugtemplate(ui, repo, tmpl, **opts' | |||
|
3422 | 3422 | if opts[r'rev']: |
|
3423 | 3423 | if repo is None: |
|
3424 | 3424 | raise error.RepoError( |
|
3425 |
_(b'there is no Mercurial repository here |
|
|
3425 | _(b'there is no Mercurial repository here (.hg not found)') | |
|
3426 | 3426 | ) |
|
3427 | 3427 | revs = scmutil.revrange(repo, opts[r'rev']) |
|
3428 | 3428 | |
@@ -3882,9 +3882,7 b' def debugwireproto(ui, repo, path=None, ' | |||
|
3882 | 3882 | ) |
|
3883 | 3883 | |
|
3884 | 3884 | if path and opts[b'localssh']: |
|
3885 | raise error.Abort( | |
|
3886 | _(b'cannot specify --localssh with an explicit ' b'path') | |
|
3887 | ) | |
|
3885 | raise error.Abort(_(b'cannot specify --localssh with an explicit path')) | |
|
3888 | 3886 | |
|
3889 | 3887 | if ui.interactive(): |
|
3890 | 3888 | ui.write(_(b'(waiting for commands on stdin)\n')) |
@@ -4153,7 +4151,7 b' def debugwireproto(ui, repo, path=None, ' | |||
|
4153 | 4151 | elif action.startswith(b'httprequest '): |
|
4154 | 4152 | if not opener: |
|
4155 | 4153 | raise error.Abort( |
|
4156 |
_(b'cannot use httprequest without an HTTP |
|
|
4154 | _(b'cannot use httprequest without an HTTP peer') | |
|
4157 | 4155 | ) |
|
4158 | 4156 | |
|
4159 | 4157 | request = action.split(b' ', 2) |
@@ -129,9 +129,9 b' def _destupdatebranchfallback(repo, clea' | |||
|
129 | 129 | heads = repo.branchheads(currentbranch, closed=True) |
|
130 | 130 | assert heads, b"any branch has at least one head" |
|
131 | 131 | node = repo.revs(b'max(.::(%ln))', heads).first() |
|
132 |
assert |
|
|
133 | b"any revision has at least " b"one descendant branch head" | |
|
134 | ) | |
|
132 | assert ( | |
|
133 | node is not None | |
|
134 | ), b"any revision has at least one descendant branch head" | |
|
135 | 135 | if bookmarks.isactivewdirparent(repo): |
|
136 | 136 | movemark = repo[b'.'].node() |
|
137 | 137 | else: |
@@ -467,7 +467,7 b' class dirstate(object):' | |||
|
467 | 467 | '''Mark as coming from the other parent, always dirty.''' |
|
468 | 468 | if self._pl[1] == nullid: |
|
469 | 469 | raise error.Abort( |
|
470 |
_(b"setting %r to other parent |
|
|
470 | _(b"setting %r to other parent only allowed in merges") % f | |
|
471 | 471 | ) |
|
472 | 472 | if f in self and self[f] == b'n': |
|
473 | 473 | # merge-like |
@@ -1470,7 +1470,7 b' class dirstatemap(object):' | |||
|
1470 | 1470 | if self._pendingmode is not None and self._pendingmode != mode: |
|
1471 | 1471 | fp.close() |
|
1472 | 1472 | raise error.Abort( |
|
1473 |
_(b'working directory state may be |
|
|
1473 | _(b'working directory state may be changed parallelly') | |
|
1474 | 1474 | ) |
|
1475 | 1475 | self._pendingmode = mode |
|
1476 | 1476 | return fp |
@@ -1494,7 +1494,7 b' class dirstatemap(object):' | |||
|
1494 | 1494 | self._parents = (nullid, nullid) |
|
1495 | 1495 | else: |
|
1496 | 1496 | raise error.Abort( |
|
1497 |
_(b'working directory state appears |
|
|
1497 | _(b'working directory state appears damaged!') | |
|
1498 | 1498 | ) |
|
1499 | 1499 | |
|
1500 | 1500 | return self._parents |
@@ -1671,7 +1671,7 b' if rustmod is not None:' | |||
|
1671 | 1671 | if self._pendingmode is not None and self._pendingmode != mode: |
|
1672 | 1672 | fp.close() |
|
1673 | 1673 | raise error.Abort( |
|
1674 |
_(b'working directory state may be |
|
|
1674 | _(b'working directory state may be changed parallelly') | |
|
1675 | 1675 | ) |
|
1676 | 1676 | self._pendingmode = mode |
|
1677 | 1677 | return fp |
@@ -1697,7 +1697,7 b' if rustmod is not None:' | |||
|
1697 | 1697 | self._parents = self._rustmap.parents(st) |
|
1698 | 1698 | except ValueError: |
|
1699 | 1699 | raise error.Abort( |
|
1700 |
_(b'working directory state appears |
|
|
1700 | _(b'working directory state appears damaged!') | |
|
1701 | 1701 | ) |
|
1702 | 1702 | |
|
1703 | 1703 | return self._parents |
@@ -412,11 +412,11 b' def checkheads(pushop):' | |||
|
412 | 412 | heads = scmutil.nodesummaries(repo, unsyncedheads) |
|
413 | 413 | if heads is None: |
|
414 | 414 | repo.ui.status( |
|
415 |
_(b"remote has heads that are |
|
|
415 | _(b"remote has heads that are not known locally\n") | |
|
416 | 416 | ) |
|
417 | 417 | elif branch is None: |
|
418 | 418 | repo.ui.status( |
|
419 |
_(b"remote has heads that are |
|
|
419 | _(b"remote has heads that are not known locally: %s\n") | |
|
420 | 420 | % heads |
|
421 | 421 | ) |
|
422 | 422 | else: |
@@ -447,7 +447,7 b' def checkheads(pushop):' | |||
|
447 | 447 | if errormsg is None: |
|
448 | 448 | if branch not in (b'default', None): |
|
449 | 449 | errormsg = _( |
|
450 |
b"push creates new remote head %s |
|
|
450 | b"push creates new remote head %s on branch '%s'!" | |
|
451 | 451 | ) % (short(dhs[0]), branch) |
|
452 | 452 | elif repo[dhs[0]].bookmarks(): |
|
453 | 453 | errormsg = _( |
@@ -631,7 +631,7 b' class cmdalias(object):' | |||
|
631 | 631 | continue |
|
632 | 632 | if not encoding.isasciistr(v): |
|
633 | 633 | self.badalias = _( |
|
634 |
b"non-ASCII character in alias definition |
|
|
634 | b"non-ASCII character in alias definition '%s:%s'" | |
|
635 | 635 | ) % (name, k) |
|
636 | 636 | return |
|
637 | 637 | cfg[k] = v |
@@ -319,7 +319,7 b' def getbundlespec(ui, fh):' | |||
|
319 | 319 | b'a known bundlespec' |
|
320 | 320 | ) |
|
321 | 321 | % version, |
|
322 |
hint=_(b'try upgrading your Mercurial |
|
|
322 | hint=_(b'try upgrading your Mercurial client'), | |
|
323 | 323 | ) |
|
324 | 324 | elif part.type == b'stream2' and version is None: |
|
325 | 325 | # A stream2 part requires to be part of a v2 bundle |
@@ -330,7 +330,7 b' def getbundlespec(ui, fh):' | |||
|
330 | 330 | |
|
331 | 331 | if not version: |
|
332 | 332 | raise error.Abort( |
|
333 |
_(b'could not identify changegroup version in |
|
|
333 | _(b'could not identify changegroup version in bundle') | |
|
334 | 334 | ) |
|
335 | 335 | |
|
336 | 336 | return b'%s-%s' % (comp, version) |
@@ -383,7 +383,7 b' def _checkpublish(pushop):' | |||
|
383 | 383 | ) |
|
384 | 384 | elif behavior == b'confirm': |
|
385 | 385 | if ui.promptchoice( |
|
386 |
_(b'push and publish %i changesets (yn)? |
|
|
386 | _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No') | |
|
387 | 387 | % len(published) |
|
388 | 388 | ): |
|
389 | 389 | raise error.Abort(_(b'user quit')) |
@@ -1166,8 +1166,7 b' def _abortonsecretctx(pushop, node, b):' | |||
|
1166 | 1166 | """abort if a given bookmark points to a secret changeset""" |
|
1167 | 1167 | if node and pushop.repo[node].phase() == phases.secret: |
|
1168 | 1168 | raise error.Abort( |
|
1169 |
_(b'cannot push bookmark %s as it points to a secret' b |
|
|
1170 | % b | |
|
1169 | _(b'cannot push bookmark %s as it points to a secret changeset') % b | |
|
1171 | 1170 | ) |
|
1172 | 1171 | |
|
1173 | 1172 | |
@@ -2696,7 +2695,7 b' def check_heads(repo, their_heads, conte' | |||
|
2696 | 2695 | # someone else committed/pushed/unbundled while we |
|
2697 | 2696 | # were transferring data |
|
2698 | 2697 | raise error.PushRaced( |
|
2699 |
b'repository changed while %s - |
|
|
2698 | b'repository changed while %s - please try again' % context | |
|
2700 | 2699 | ) |
|
2701 | 2700 | |
|
2702 | 2701 | |
@@ -2842,7 +2841,7 b' def _maybeapplyclonebundle(pullop):' | |||
|
2842 | 2841 | ) |
|
2843 | 2842 | ) |
|
2844 | 2843 | repo.ui.warn( |
|
2845 |
_(b'(you may want to report this to the server |
|
|
2844 | _(b'(you may want to report this to the server operator)\n') | |
|
2846 | 2845 | ) |
|
2847 | 2846 | return |
|
2848 | 2847 |
@@ -219,7 +219,7 b' def _fetchrawstorefiles(repo, remote):' | |||
|
219 | 219 | vfs = repo.svfs |
|
220 | 220 | else: |
|
221 | 221 | raise error.Abort( |
|
222 |
_(b'invalid location for raw file data: |
|
|
222 | _(b'invalid location for raw file data: %s') | |
|
223 | 223 | % filemeta[b'location'] |
|
224 | 224 | ) |
|
225 | 225 |
@@ -315,7 +315,7 b' def _iprompt(repo, mynode, orig, fcd, fc' | |||
|
315 | 315 | # conflicts. |
|
316 | 316 | if fcd.changectx().isinmemory(): |
|
317 | 317 | raise error.InMemoryMergeConflictsError( |
|
318 |
b'in-memory merge does not |
|
|
318 | b'in-memory merge does not support file conflicts' | |
|
319 | 319 | ) |
|
320 | 320 | |
|
321 | 321 | prompts = partextras(labels) |
@@ -415,7 +415,7 b' def _premerge(repo, fcd, fco, fca, toolc' | |||
|
415 | 415 | if premerge not in validkeep: |
|
416 | 416 | _valid = b', '.join([b"'" + v + b"'" for v in validkeep]) |
|
417 | 417 | raise error.ConfigError( |
|
418 |
_(b"%s.premerge not valid |
|
|
418 | _(b"%s.premerge not valid ('%s' is neither boolean nor %s)") | |
|
419 | 419 | % (tool, premerge, _valid) |
|
420 | 420 | ) |
|
421 | 421 | |
@@ -440,7 +440,7 b' def _mergecheck(repo, mynode, orig, fcd,' | |||
|
440 | 440 | uipathfn = scmutil.getuipathfn(repo) |
|
441 | 441 | if symlink: |
|
442 | 442 | repo.ui.warn( |
|
443 |
_(b'warning: internal %s cannot merge symlinks |
|
|
443 | _(b'warning: internal %s cannot merge symlinks for %s\n') | |
|
444 | 444 | % (tool, uipathfn(fcd.path())) |
|
445 | 445 | ) |
|
446 | 446 | return False |
@@ -606,7 +606,7 b' def _idump(repo, mynode, orig, fcd, fco,' | |||
|
606 | 606 | |
|
607 | 607 | if isinstance(fcd, context.overlayworkingfilectx): |
|
608 | 608 | raise error.InMemoryMergeConflictsError( |
|
609 |
b'in-memory merge does not |
|
|
609 | b'in-memory merge does not support the :dump tool.' | |
|
610 | 610 | ) |
|
611 | 611 | |
|
612 | 612 | util.writefile(a + b".local", fcd.decodeddata()) |
@@ -635,7 +635,7 b' def _xmergeimm(repo, mynode, orig, fcd, ' | |||
|
635 | 635 | # directory and tell the user how to get it is my best idea, but it's |
|
636 | 636 | # clunky.) |
|
637 | 637 | raise error.InMemoryMergeConflictsError( |
|
638 |
b'in-memory merge does not support |
|
|
638 | b'in-memory merge does not support external merge tools' | |
|
639 | 639 | ) |
|
640 | 640 | |
|
641 | 641 | |
@@ -698,7 +698,7 b' def _xmerge(repo, mynode, orig, fcd, fco' | |||
|
698 | 698 | uipathfn = scmutil.getuipathfn(repo) |
|
699 | 699 | if fcd.isabsent() or fco.isabsent(): |
|
700 | 700 | repo.ui.warn( |
|
701 |
_(b'warning: %s cannot merge change/delete conflict |
|
|
701 | _(b'warning: %s cannot merge change/delete conflict for %s\n') | |
|
702 | 702 | % (tool, uipathfn(fcd.path())) |
|
703 | 703 | ) |
|
704 | 704 | return False, 1, None |
@@ -1064,7 +1064,7 b' def _filemerge(premerge, repo, wctx, myn' | |||
|
1064 | 1064 | if onfailure: |
|
1065 | 1065 | if wctx.isinmemory(): |
|
1066 | 1066 | raise error.InMemoryMergeConflictsError( |
|
1067 |
b'in-memory merge does |
|
|
1067 | b'in-memory merge does not support merge conflicts' | |
|
1068 | 1068 | ) |
|
1069 | 1069 | ui.warn(onfailure % fduipath) |
|
1070 | 1070 | return True, 1, False |
@@ -1150,7 +1150,7 b' def _haltmerge():' | |||
|
1150 | 1150 | def _onfilemergefailure(ui): |
|
1151 | 1151 | action = ui.config(b'merge', b'on-failure') |
|
1152 | 1152 | if action == b'prompt': |
|
1153 |
msg = _(b'continue merge operation (yn)? |
|
|
1153 | msg = _(b'continue merge operation (yn)?$$ &Yes $$ &No') | |
|
1154 | 1154 | if ui.promptchoice(msg, 0) == 1: |
|
1155 | 1155 | _haltmerge() |
|
1156 | 1156 | if action == b'halt': |
@@ -1180,7 +1180,7 b' def _check(repo, r, ui, tool, fcd, files' | |||
|
1180 | 1180 | if b'prompt' in _toollist(ui, tool, b"check"): |
|
1181 | 1181 | checked = True |
|
1182 | 1182 | if ui.promptchoice( |
|
1183 |
_(b"was merge of '%s' successful (yn)? |
|
|
1183 | _(b"was merge of '%s' successful (yn)?$$ &Yes $$ &No") | |
|
1184 | 1184 | % uipathfn(fd), |
|
1185 | 1185 | 1, |
|
1186 | 1186 | ): |
@@ -860,7 +860,7 b' def help_(' | |||
|
860 | 860 | ) |
|
861 | 861 | if name == b'shortlist': |
|
862 | 862 | rst.append( |
|
863 |
_(b"\n(use 'hg help' for the full list |
|
|
863 | _(b"\n(use 'hg help' for the full list of commands)\n") | |
|
864 | 864 | ) |
|
865 | 865 | else: |
|
866 | 866 | if name == b'shortlist': |
@@ -872,7 +872,7 b' def help_(' | |||
|
872 | 872 | ) |
|
873 | 873 | elif name and not full: |
|
874 | 874 | rst.append( |
|
875 |
_(b"\n(use 'hg help %s' to show the full help |
|
|
875 | _(b"\n(use 'hg help %s' to show the full help text)\n") | |
|
876 | 876 | % name |
|
877 | 877 | ) |
|
878 | 878 | elif name and syns and name in syns.keys(): |
@@ -929,7 +929,7 b' def help_(' | |||
|
929 | 929 | try: |
|
930 | 930 | cmdutil.findcmd(name, commands.table) |
|
931 | 931 | rst.append( |
|
932 |
_(b"\nuse 'hg help -c %s' to see help for |
|
|
932 | _(b"\nuse 'hg help -c %s' to see help for the %s command\n") | |
|
933 | 933 | % (name, name) |
|
934 | 934 | ) |
|
935 | 935 | except error.UnknownCommand: |
@@ -985,7 +985,7 b' def help_(' | |||
|
985 | 985 | doc = doc.splitlines()[0] |
|
986 | 986 | |
|
987 | 987 | rst = listexts( |
|
988 |
_(b"'%s' is provided by the following |
|
|
988 | _(b"'%s' is provided by the following extension:") % cmd, | |
|
989 | 989 | {ext: doc}, |
|
990 | 990 | indent=4, |
|
991 | 991 | showdeprecated=True, |
@@ -379,7 +379,7 b' def postshare(sourcerepo, destrepo, defa' | |||
|
379 | 379 | """ |
|
380 | 380 | default = defaultpath or sourcerepo.ui.config(b'paths', b'default') |
|
381 | 381 | if default: |
|
382 |
template = b'[paths]\n |
|
|
382 | template = b'[paths]\ndefault = %s\n' | |
|
383 | 383 | destrepo.vfs.write(b'hgrc', util.tonativeeol(template % default)) |
|
384 | 384 | if repositorymod.NARROW_REQUIREMENT in sourcerepo.requirements: |
|
385 | 385 | with destrepo.wlock(): |
@@ -1182,9 +1182,7 b' def abortmerge(ui, repo):' | |||
|
1182 | 1182 | # there were no conficts, mergestate was not stored |
|
1183 | 1183 | node = repo[b'.'].hex() |
|
1184 | 1184 | |
|
1185 | repo.ui.status( | |
|
1186 | _(b"aborting the merge, updating back to" b" %s\n") % node[:12] | |
|
1187 | ) | |
|
1185 | repo.ui.status(_(b"aborting the merge, updating back to %s\n") % node[:12]) | |
|
1188 | 1186 | stats = mergemod.update(repo, node, branchmerge=False, force=True) |
|
1189 | 1187 | _showstats(repo, stats) |
|
1190 | 1188 | return stats.unresolvedcount > 0 |
@@ -119,6 +119,6 b' def createapp(baseui, repo, webconf):' | |||
|
119 | 119 | else: |
|
120 | 120 | if not repo: |
|
121 | 121 | raise error.RepoError( |
|
122 |
_(b"there is no Mercurial repository |
|
|
122 | _(b"there is no Mercurial repository here (.hg not found)") | |
|
123 | 123 | ) |
|
124 | 124 | return hgweb_mod.hgweb(repo, baseui=baseui) |
@@ -516,7 +516,7 b' class wsgiresponse(object):' | |||
|
516 | 516 | |
|
517 | 517 | if self._bodygen is not None or self._bodywillwrite: |
|
518 | 518 | raise error.ProgrammingError( |
|
519 |
b"must use setbodybytes('') with |
|
|
519 | b"must use setbodybytes('') with 304 responses" | |
|
520 | 520 | ) |
|
521 | 521 | |
|
522 | 522 | # Various HTTP clients (notably httplib) won't read the HTTP response |
@@ -242,7 +242,7 b' class _httprequesthandler(httpservermod.' | |||
|
242 | 242 | def send_headers(self): |
|
243 | 243 | if not self.saved_status: |
|
244 | 244 | raise AssertionError( |
|
245 |
b"Sending headers before |
|
|
245 | b"Sending headers before start_response() called" | |
|
246 | 246 | ) |
|
247 | 247 | saved_status = self.saved_status.split(None, 1) |
|
248 | 248 | saved_status[0] = int(saved_status[0]) |
@@ -1295,7 +1295,7 b' def archive(web):' | |||
|
1295 | 1295 | web.res.setbodywillwrite() |
|
1296 | 1296 | if list(web.res.sendresponse()): |
|
1297 | 1297 | raise error.ProgrammingError( |
|
1298 |
b'sendresponse() should not emit data |
|
|
1298 | b'sendresponse() should not emit data if writing later' | |
|
1299 | 1299 | ) |
|
1300 | 1300 | |
|
1301 | 1301 | bodyfh = web.res.getbodyfile() |
@@ -117,7 +117,7 b' def pythonhook(ui, repo, htype, hname, f' | |||
|
117 | 117 | ui.warn(_(b'error: %s hook failed: %s\n') % (hname, exc.args[0])) |
|
118 | 118 | else: |
|
119 | 119 | ui.warn( |
|
120 |
_(b'error: %s hook raised an exception: |
|
|
120 | _(b'error: %s hook raised an exception: %s\n') | |
|
121 | 121 | % (hname, stringutil.forcebytestr(exc)) |
|
122 | 122 | ) |
|
123 | 123 | if throw: |
@@ -383,14 +383,13 b' def parsev1commandresponse(' | |||
|
383 | 383 | return respurl, proto, resp |
|
384 | 384 | else: |
|
385 | 385 | raise error.RepoError( |
|
386 |
_(b'unexpected CBOR response from |
|
|
386 | _(b'unexpected CBOR response from server') | |
|
387 | 387 | ) |
|
388 | 388 | |
|
389 | 389 | version_info = tuple([int(n) for n in subtype.split(b'.')]) |
|
390 | 390 | except ValueError: |
|
391 | 391 | raise error.RepoError( |
|
392 |
_(b"'%s' sent a broken Content-Type |
|
|
393 | % (safeurl, proto) | |
|
392 | _(b"'%s' sent a broken Content-Type header (%s)") % (safeurl, proto) | |
|
394 | 393 | ) |
|
395 | 394 | |
|
396 | 395 | # TODO consider switching to a decompression reader that uses |
@@ -685,12 +684,12 b' class httpv2executor(object):' | |||
|
685 | 684 | def callcommand(self, command, args): |
|
686 | 685 | if self._sent: |
|
687 | 686 | raise error.ProgrammingError( |
|
688 |
b'callcommand() cannot be used after |
|
|
687 | b'callcommand() cannot be used after commands are sent' | |
|
689 | 688 | ) |
|
690 | 689 | |
|
691 | 690 | if self._closed: |
|
692 | 691 | raise error.ProgrammingError( |
|
693 |
b'callcommand() cannot be used after |
|
|
692 | b'callcommand() cannot be used after close()' | |
|
694 | 693 | ) |
|
695 | 694 | |
|
696 | 695 | # The service advertises which commands are available. So if we attempt |
@@ -763,7 +762,7 b' class httpv2executor(object):' | |||
|
763 | 762 | |
|
764 | 763 | if len(permissions) > 1: |
|
765 | 764 | raise error.RepoError( |
|
766 |
_(b'cannot make request requiring multiple |
|
|
765 | _(b'cannot make request requiring multiple permissions: %s') | |
|
767 | 766 | % _(b', ').join(sorted(permissions)) |
|
768 | 767 | ) |
|
769 | 768 | |
@@ -1101,7 +1100,7 b' def instance(ui, path, create, intents=N' | |||
|
1101 | 1100 | try: |
|
1102 | 1101 | if path.startswith(b'https:') and not urlmod.has_https: |
|
1103 | 1102 | raise error.Abort( |
|
1104 |
_(b'Python support for SSL and HTTPS |
|
|
1103 | _(b'Python support for SSL and HTTPS is not installed') | |
|
1105 | 1104 | ) |
|
1106 | 1105 | |
|
1107 | 1106 | inst = makepeer(ui, path) |
@@ -296,7 +296,7 b' class KeepAliveHandler(object):' | |||
|
296 | 296 | # a DIFFERENT exception |
|
297 | 297 | if DEBUG: |
|
298 | 298 | DEBUG.error( |
|
299 |
b"unexpected exception - closing |
|
|
299 | b"unexpected exception - closing connection to %s (%d)", | |
|
300 | 300 | host, |
|
301 | 301 | id(h), |
|
302 | 302 | ) |
@@ -223,12 +223,12 b' class localcommandexecutor(object):' | |||
|
223 | 223 | def callcommand(self, command, args): |
|
224 | 224 | if self._sent: |
|
225 | 225 | raise error.ProgrammingError( |
|
226 |
b'callcommand() cannot be used after |
|
|
226 | b'callcommand() cannot be used after sendcommands()' | |
|
227 | 227 | ) |
|
228 | 228 | |
|
229 | 229 | if self._closed: |
|
230 | 230 | raise error.ProgrammingError( |
|
231 |
b'callcommand() cannot be used after |
|
|
231 | b'callcommand() cannot be used after close()' | |
|
232 | 232 | ) |
|
233 | 233 | |
|
234 | 234 | # We don't need to support anything fancy. Just call the named |
@@ -343,9 +343,7 b' class localpeer(repository.peer):' | |||
|
343 | 343 | return self._repo.pushkey(namespace, key, old, new) |
|
344 | 344 | |
|
345 | 345 | def stream_out(self): |
|
346 | raise error.Abort( | |
|
347 | _(b'cannot perform stream clone against local ' b'peer') | |
|
348 | ) | |
|
346 | raise error.Abort(_(b'cannot perform stream clone against local peer')) | |
|
349 | 347 | |
|
350 | 348 | def unbundle(self, bundle, heads, url): |
|
351 | 349 | """apply a bundle on a repo |
@@ -568,7 +566,7 b' def makelocalrepository(baseui, path, in' | |||
|
568 | 566 | |
|
569 | 567 | if not sharedvfs.exists(): |
|
570 | 568 | raise error.RepoError( |
|
571 |
_(b'.hg/sharedpath points to nonexistent |
|
|
569 | _(b'.hg/sharedpath points to nonexistent directory %s') | |
|
572 | 570 | % sharedvfs.base |
|
573 | 571 | ) |
|
574 | 572 | |
@@ -1453,7 +1451,7 b' class localrepository(object):' | |||
|
1453 | 1451 | if not self._dirstatevalidatewarned: |
|
1454 | 1452 | self._dirstatevalidatewarned = True |
|
1455 | 1453 | self.ui.warn( |
|
1456 |
_(b"warning: ignoring unknown |
|
|
1454 | _(b"warning: ignoring unknown working parent %s!\n") | |
|
1457 | 1455 | % short(node) |
|
1458 | 1456 | ) |
|
1459 | 1457 | return nullid |
@@ -2302,7 +2300,7 b' class localrepository(object):' | |||
|
2302 | 2300 | ) % (oldtip, desc, detail) |
|
2303 | 2301 | else: |
|
2304 | 2302 | msg = _( |
|
2305 |
b'repository tip rolled back to revision %d |
|
|
2303 | b'repository tip rolled back to revision %d (undo %s)\n' | |
|
2306 | 2304 | ) % (oldtip, desc) |
|
2307 | 2305 | except IOError: |
|
2308 | 2306 | msg = _(b'rolling back unknown transaction\n') |
@@ -2367,8 +2365,7 b' class localrepository(object):' | |||
|
2367 | 2365 | ) |
|
2368 | 2366 | else: |
|
2369 | 2367 | ui.status( |
|
2370 |
_(b'working directory now based on |
|
|
2371 | % parents | |
|
2368 | _(b'working directory now based on revision %d\n') % parents | |
|
2372 | 2369 | ) |
|
2373 | 2370 | mergemod.mergestate.clean(self, self[b'.'].node()) |
|
2374 | 2371 | |
@@ -3600,7 +3597,7 b' def createrepository(ui, path, createopt' | |||
|
3600 | 3597 | |
|
3601 | 3598 | if not isinstance(unknownopts, dict): |
|
3602 | 3599 | raise error.ProgrammingError( |
|
3603 |
b'filterknowncreateopts() did not return |
|
|
3600 | b'filterknowncreateopts() did not return a dict' | |
|
3604 | 3601 | ) |
|
3605 | 3602 | |
|
3606 | 3603 | if unknownopts: |
@@ -3687,7 +3684,7 b' def poisonrepository(repo):' | |||
|
3687 | 3684 | return object.__getattribute__(self, item) |
|
3688 | 3685 | |
|
3689 | 3686 | raise error.ProgrammingError( |
|
3690 |
b'repo instances should not be used |
|
|
3687 | b'repo instances should not be used after unshare' | |
|
3691 | 3688 | ) |
|
3692 | 3689 | |
|
3693 | 3690 | def close(self): |
@@ -925,8 +925,7 b' def getlinerangerevs(repo, userrevs, opt' | |||
|
925 | 925 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): |
|
926 | 926 | if fname not in wctx: |
|
927 | 927 | raise error.Abort( |
|
928 |
_(b'cannot follow file not in parent |
|
|
929 | % fname | |
|
928 | _(b'cannot follow file not in parent revision: "%s"') % fname | |
|
930 | 929 | ) |
|
931 | 930 | fctx = wctx.filectx(fname) |
|
932 | 931 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): |
@@ -236,8 +236,7 b' def validateconfig(ui):' | |||
|
236 | 236 | else: |
|
237 | 237 | if not procutil.findexe(method): |
|
238 | 238 | raise error.Abort( |
|
239 |
_(b'%r specified as email transport, |
|
|
240 | % method | |
|
239 | _(b'%r specified as email transport, but not in PATH') % method | |
|
241 | 240 | ) |
|
242 | 241 | |
|
243 | 242 |
@@ -778,7 +778,7 b' class treemanifest(object):' | |||
|
778 | 778 | |
|
779 | 779 | def readsubtree(subdir, subm): |
|
780 | 780 | raise AssertionError( |
|
781 |
b'treemanifest constructor only accepts |
|
|
781 | b'treemanifest constructor only accepts flat manifests' | |
|
782 | 782 | ) |
|
783 | 783 | |
|
784 | 784 | self.parse(text, readsubtree) |
@@ -66,7 +66,7 b' def _expandsets(kindpats, ctx=None, list' | |||
|
66 | 66 | if kind == b'set': |
|
67 | 67 | if ctx is None: |
|
68 | 68 | raise error.ProgrammingError( |
|
69 |
b"fileset expression with no |
|
|
69 | b"fileset expression with no context" | |
|
70 | 70 | ) |
|
71 | 71 | matchers.append(ctx.matchfileset(pat, badfn=badfn)) |
|
72 | 72 | |
@@ -546,7 +546,7 b' class predicatematcher(basematcher):' | |||
|
546 | 546 | def normalizerootdir(dir, funcname): |
|
547 | 547 | if dir == b'.': |
|
548 | 548 | util.nouideprecwarn( |
|
549 |
b"match.%s() no longer accepts |
|
|
549 | b"match.%s() no longer accepts '.', use '' instead." % funcname, | |
|
550 | 550 | b'5.1', |
|
551 | 551 | ) |
|
552 | 552 | return b'' |
@@ -83,7 +83,7 b' class diffopts(object):' | |||
|
83 | 83 | self.context = int(self.context) |
|
84 | 84 | except ValueError: |
|
85 | 85 | raise error.Abort( |
|
86 |
_(b'diff context lines count must be |
|
|
86 | _(b'diff context lines count must be an integer, not %r') | |
|
87 | 87 | % pycompat.bytestr(self.context) |
|
88 | 88 | ) |
|
89 | 89 |
@@ -760,7 +760,7 b' def _getcheckunknownconfig(repo, section' | |||
|
760 | 760 | if config not in valid: |
|
761 | 761 | validstr = b', '.join([b"'" + v + b"'" for v in valid]) |
|
762 | 762 | raise error.ConfigError( |
|
763 |
_(b"%s.%s not valid |
|
|
763 | _(b"%s.%s not valid ('%s' is none of %s)") | |
|
764 | 764 | % (section, name, config, validstr) |
|
765 | 765 | ) |
|
766 | 766 | return config |
@@ -1048,7 +1048,7 b' def _checkcollision(repo, wmf, actions):' | |||
|
1048 | 1048 | if fold.startswith(foldprefix) and not f.startswith(unfoldprefix): |
|
1049 | 1049 | # the folded prefix matches but actual casing is different |
|
1050 | 1050 | raise error.Abort( |
|
1051 |
_(b"case-folding collision between |
|
|
1051 | _(b"case-folding collision between %s and directory of %s") | |
|
1052 | 1052 | % (lastfull, f) |
|
1053 | 1053 | ) |
|
1054 | 1054 | foldprefix = fold + b'/' |
@@ -1225,11 +1225,11 b' def _filternarrowactions(narrowmatch, br' | |||
|
1225 | 1225 | b'which is not yet supported' |
|
1226 | 1226 | ) |
|
1227 | 1227 | % f, |
|
1228 |
hint=_(b'merging in the other direction |
|
|
1228 | hint=_(b'merging in the other direction may work'), | |
|
1229 | 1229 | ) |
|
1230 | 1230 | else: |
|
1231 | 1231 | raise error.Abort( |
|
1232 |
_(b'conflict in file \'%s\' is outside |
|
|
1232 | _(b'conflict in file \'%s\' is outside narrow clone') % f | |
|
1233 | 1233 | ) |
|
1234 | 1234 | |
|
1235 | 1235 | |
@@ -1992,7 +1992,7 b' def applyupdates(' | |||
|
1992 | 1992 | if usemergedriver: |
|
1993 | 1993 | if wctx.isinmemory(): |
|
1994 | 1994 | raise error.InMemoryMergeConflictsError( |
|
1995 |
b"in-memory merge does not |
|
|
1995 | b"in-memory merge does not support mergedriver" | |
|
1996 | 1996 | ) |
|
1997 | 1997 | ms.commit() |
|
1998 | 1998 | proceed = driverpreprocess(repo, ms, wctx, labels=labels) |
@@ -2334,7 +2334,7 b' def update(' | |||
|
2334 | 2334 | if not mergeancestor and wc.branch() == p2.branch(): |
|
2335 | 2335 | raise error.Abort( |
|
2336 | 2336 | _(b"nothing to merge"), |
|
2337 |
hint=_(b"use 'hg update' |
|
|
2337 | hint=_(b"use 'hg update' or check 'hg heads'"), | |
|
2338 | 2338 | ) |
|
2339 | 2339 | if not force and (wc.files() or wc.deleted()): |
|
2340 | 2340 | raise error.Abort( |
@@ -15,7 +15,7 b' from . import error' | |||
|
15 | 15 | def checkunresolved(ms): |
|
16 | 16 | if list(ms.unresolved()): |
|
17 | 17 | raise error.Abort( |
|
18 |
_(b"unresolved merge conflicts |
|
|
18 | _(b"unresolved merge conflicts (see 'hg help resolve')") | |
|
19 | 19 | ) |
|
20 | 20 | if ms.mdstate() != b's' or list(ms.driverresolved()): |
|
21 | 21 | raise error.Abort( |
@@ -110,7 +110,7 b' def validatepatterns(pats):' | |||
|
110 | 110 | """ |
|
111 | 111 | if not isinstance(pats, set): |
|
112 | 112 | raise error.ProgrammingError( |
|
113 |
b'narrow patterns should be a set; |
|
|
113 | b'narrow patterns should be a set; got %r' % pats | |
|
114 | 114 | ) |
|
115 | 115 | |
|
116 | 116 | for pat in pats: |
@@ -667,7 +667,7 b' class obsstore(object):' | |||
|
667 | 667 | Return the number of new marker.""" |
|
668 | 668 | if self._readonly: |
|
669 | 669 | raise error.Abort( |
|
670 |
_(b'creating obsolete markers is not enabled on |
|
|
670 | _(b'creating obsolete markers is not enabled on this repo') | |
|
671 | 671 | ) |
|
672 | 672 | known = set() |
|
673 | 673 | getsuccessors = self.successors.get |
@@ -929,7 +929,7 b' filteredmsgtable = {' | |||
|
929 | 929 | b"superseded": _(b"hidden revision '%s' was rewritten as: %s"), |
|
930 | 930 | b"superseded_split": _(b"hidden revision '%s' was split as: %s"), |
|
931 | 931 | b"superseded_split_several": _( |
|
932 |
b"hidden revision '%s' was split as: %s and |
|
|
932 | b"hidden revision '%s' was split as: %s and %d more" | |
|
933 | 933 | ), |
|
934 | 934 | } |
|
935 | 935 |
@@ -163,12 +163,12 b' def buildargsdict(trees, funcname, argsp' | |||
|
163 | 163 | ) |
|
164 | 164 | if kwstart < len(poskeys): |
|
165 | 165 | raise error.ParseError( |
|
166 |
_(b"%(func)s takes at least %(nargs)d positional |
|
|
166 | _(b"%(func)s takes at least %(nargs)d positional arguments") | |
|
167 | 167 | % {b'func': funcname, b'nargs': len(poskeys)} |
|
168 | 168 | ) |
|
169 | 169 | if not varkey and kwstart > len(poskeys) + len(keys): |
|
170 | 170 | raise error.ParseError( |
|
171 |
_(b"%(func)s takes at most %(nargs)d positional |
|
|
171 | _(b"%(func)s takes at most %(nargs)d positional arguments") | |
|
172 | 172 | % {b'func': funcname, b'nargs': len(poskeys) + len(keys)} |
|
173 | 173 | ) |
|
174 | 174 | args = util.sortdict() |
@@ -193,7 +193,7 b' def buildargsdict(trees, funcname, argsp' | |||
|
193 | 193 | d = args |
|
194 | 194 | elif not optkey: |
|
195 | 195 | raise error.ParseError( |
|
196 |
_(b"%(func)s got an unexpected keyword |
|
|
196 | _(b"%(func)s got an unexpected keyword argument '%(key)s'") | |
|
197 | 197 | % {b'func': funcname, b'key': k} |
|
198 | 198 | ) |
|
199 | 199 | else: |
@@ -713,7 +713,7 b' class basealiasrules(object):' | |||
|
713 | 713 | raise error.Abort(a.error) |
|
714 | 714 | if a in expanding: |
|
715 | 715 | raise error.ParseError( |
|
716 |
_(b'infinite expansion of %(section)s |
|
|
716 | _(b'infinite expansion of %(section)s "%(name)s" detected') | |
|
717 | 717 | % {b'section': cls._section, b'name': a.name} |
|
718 | 718 | ) |
|
719 | 719 | # get cacheable replacement tree by expanding aliases recursively |
@@ -51,7 +51,7 b' stringio = util.stringio' | |||
|
51 | 51 | gitre = re.compile(br'diff --git a/(.*) b/(.*)') |
|
52 | 52 | tabsplitter = re.compile(br'(\t+|[^\t]+)') |
|
53 | 53 | wordsplitter = re.compile( |
|
54 |
br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+| |
|
|
54 | br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])' | |
|
55 | 55 | ) |
|
56 | 56 | |
|
57 | 57 | PatchError = error.PatchError |
@@ -805,7 +805,7 b' class patchfile(object):' | |||
|
805 | 805 | if self.exists and self.create: |
|
806 | 806 | if self.copysource: |
|
807 | 807 | self.ui.warn( |
|
808 |
_(b"cannot create %s: destination already |
|
|
808 | _(b"cannot create %s: destination already exists\n") | |
|
809 | 809 | % self.fname |
|
810 | 810 | ) |
|
811 | 811 | else: |
@@ -3191,7 +3191,7 b' def diffstat(lines, width=80):' | |||
|
3191 | 3191 | |
|
3192 | 3192 | if stats: |
|
3193 | 3193 | output.append( |
|
3194 |
_(b' %d files changed, %d insertions(+), |
|
|
3194 | _(b' %d files changed, %d insertions(+), %d deletions(-)\n') | |
|
3195 | 3195 | % (len(stats), totaladds, totalremoves) |
|
3196 | 3196 | ) |
|
3197 | 3197 |
@@ -43,9 +43,7 b' def lsprofile(ui, fp):' | |||
|
43 | 43 | climit = ui.configint(b'profiling', b'nested') |
|
44 | 44 | |
|
45 | 45 | if format not in [b'text', b'kcachegrind']: |
|
46 | ui.warn( | |
|
47 | _(b"unrecognized profiling format '%s'" b" - Ignored\n") % format | |
|
48 | ) | |
|
46 | ui.warn(_(b"unrecognized profiling format '%s' - Ignored\n") % format) | |
|
49 | 47 | format = b'text' |
|
50 | 48 | |
|
51 | 49 | try: |
@@ -361,7 +361,7 b' else:' | |||
|
361 | 361 | setattr = setattr |
|
362 | 362 | |
|
363 | 363 | # this can't be parsed on Python 3 |
|
364 |
exec(b'def raisewithtb(exc, tb):\n |
|
|
364 | exec(b'def raisewithtb(exc, tb):\n raise exc, None, tb\n') | |
|
365 | 365 | |
|
366 | 366 | def fsencode(filename): |
|
367 | 367 | """ |
@@ -359,7 +359,7 b' def safestriproots(ui, repo, nodes):' | |||
|
359 | 359 | if notstrip: |
|
360 | 360 | nodestr = b', '.join(sorted(short(repo[n].node()) for n in notstrip)) |
|
361 | 361 | ui.warn( |
|
362 |
_(b'warning: orphaned descendants detected, |
|
|
362 | _(b'warning: orphaned descendants detected, not stripping %s\n') | |
|
363 | 363 | % nodestr |
|
364 | 364 | ) |
|
365 | 365 | return [c.node() for c in repo.set(b'roots(%ld)', tostrip)] |
@@ -70,7 +70,7 b' class repoloader(object):' | |||
|
70 | 70 | """ |
|
71 | 71 | if self._thread and self._thread.is_alive(): |
|
72 | 72 | raise error.ProgrammingError( |
|
73 |
b'cannot obtain cached repo while |
|
|
73 | b'cannot obtain cached repo while loader is active' | |
|
74 | 74 | ) |
|
75 | 75 | return self._cache.peek(path, None) |
|
76 | 76 |
@@ -244,7 +244,7 b' class revlogoldio(object):' | |||
|
244 | 244 | def packentry(self, entry, node, version, rev): |
|
245 | 245 | if gettype(entry[0]): |
|
246 | 246 | raise error.RevlogError( |
|
247 |
_(b'index entry flags need revlog |
|
|
247 | _(b'index entry flags need revlog version 1') | |
|
248 | 248 | ) |
|
249 | 249 | e2 = ( |
|
250 | 250 | getoffset(entry[0]), |
@@ -451,12 +451,12 b' class revlog(object):' | |||
|
451 | 451 | |
|
452 | 452 | if self._chunkcachesize <= 0: |
|
453 | 453 | raise error.RevlogError( |
|
454 |
_(b'revlog chunk cache size %r is not |
|
|
454 | _(b'revlog chunk cache size %r is not greater than 0') | |
|
455 | 455 | % self._chunkcachesize |
|
456 | 456 | ) |
|
457 | 457 | elif self._chunkcachesize & (self._chunkcachesize - 1): |
|
458 | 458 | raise error.RevlogError( |
|
459 |
_(b'revlog chunk cache size %r is not a |
|
|
459 | _(b'revlog chunk cache size %r is not a power of 2') | |
|
460 | 460 | % self._chunkcachesize |
|
461 | 461 | ) |
|
462 | 462 | |
@@ -492,7 +492,7 b' class revlog(object):' | |||
|
492 | 492 | if fmt == REVLOGV0: |
|
493 | 493 | if flags: |
|
494 | 494 | raise error.RevlogError( |
|
495 |
_(b'unknown flags (%#04x) in version %d |
|
|
495 | _(b'unknown flags (%#04x) in version %d revlog %s') | |
|
496 | 496 | % (flags >> 16, fmt, self.indexfile) |
|
497 | 497 | ) |
|
498 | 498 | |
@@ -502,7 +502,7 b' class revlog(object):' | |||
|
502 | 502 | elif fmt == REVLOGV1: |
|
503 | 503 | if flags & ~REVLOGV1_FLAGS: |
|
504 | 504 | raise error.RevlogError( |
|
505 |
_(b'unknown flags (%#04x) in version %d |
|
|
505 | _(b'unknown flags (%#04x) in version %d revlog %s') | |
|
506 | 506 | % (flags >> 16, fmt, self.indexfile) |
|
507 | 507 | ) |
|
508 | 508 | |
@@ -512,7 +512,7 b' class revlog(object):' | |||
|
512 | 512 | elif fmt == REVLOGV2: |
|
513 | 513 | if flags & ~REVLOGV2_FLAGS: |
|
514 | 514 | raise error.RevlogError( |
|
515 |
_(b'unknown flags (%#04x) in version %d |
|
|
515 | _(b'unknown flags (%#04x) in version %d revlog %s') | |
|
516 | 516 | % (flags >> 16, fmt, self.indexfile) |
|
517 | 517 | ) |
|
518 | 518 | |
@@ -2707,7 +2707,7 b' class revlog(object):' | |||
|
2707 | 2707 | |
|
2708 | 2708 | if len(tombstone) > self.rawsize(censorrev): |
|
2709 | 2709 | raise error.Abort( |
|
2710 |
_(b'censor tombstone must be no longer than |
|
|
2710 | _(b'censor tombstone must be no longer than censored data') | |
|
2711 | 2711 | ) |
|
2712 | 2712 | |
|
2713 | 2713 | # Rewriting the revlog in place is hard. Our strategy for censoring is |
@@ -980,7 +980,7 b' def expectsize(repo, subset, x, order):' | |||
|
980 | 980 | raise error.ParseError(_(b'invalid set of arguments')) |
|
981 | 981 | minsize, maxsize = getintrange( |
|
982 | 982 | args[b'size'], |
|
983 |
_(b'expectsize requires a size range |
|
|
983 | _(b'expectsize requires a size range or a positive integer'), | |
|
984 | 984 | _(b'size range bounds must be integers'), |
|
985 | 985 | minsize, |
|
986 | 986 | maxsize, |
@@ -989,11 +989,13 b' def expectsize(repo, subset, x, order):' | |||
|
989 | 989 | raise error.ParseError(_(b'negative size')) |
|
990 | 990 | rev = getset(repo, fullreposet(repo), args[b'set'], order=order) |
|
991 | 991 | if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize): |
|
992 | err = _( | |
|
993 | b'revset size mismatch.' b' expected between %d and %d, got %d' | |
|
994 |
|
|
|
992 | err = _(b'revset size mismatch. expected between %d and %d, got %d') % ( | |
|
993 | minsize, | |
|
994 | maxsize, | |
|
995 | len(rev), | |
|
996 | ) | |
|
995 | 997 | elif minsize == maxsize and len(rev) != minsize: |
|
996 |
err = _(b'revset size mismatch. |
|
|
998 | err = _(b'revset size mismatch. expected %d, got %d') % ( | |
|
997 | 999 | minsize, |
|
998 | 1000 | len(rev), |
|
999 | 1001 | ) |
@@ -1043,14 +1045,14 b' def extra(repo, subset, x):' | |||
|
1043 | 1045 | raise error.ParseError(_(b'extra takes at least 1 argument')) |
|
1044 | 1046 | # i18n: "extra" is a keyword |
|
1045 | 1047 | label = getstring( |
|
1046 |
args[b'label'], _(b'first argument to extra must be |
|
|
1048 | args[b'label'], _(b'first argument to extra must be a string') | |
|
1047 | 1049 | ) |
|
1048 | 1050 | value = None |
|
1049 | 1051 | |
|
1050 | 1052 | if b'value' in args: |
|
1051 | 1053 | # i18n: "extra" is a keyword |
|
1052 | 1054 | value = getstring( |
|
1053 |
args[b'value'], _(b'second argument to extra must be |
|
|
1055 | args[b'value'], _(b'second argument to extra must be a string') | |
|
1054 | 1056 | ) |
|
1055 | 1057 | kind, value, matcher = stringutil.stringmatcher(value) |
|
1056 | 1058 | |
@@ -1314,7 +1316,7 b' def _matchfiles(repo, subset, x):' | |||
|
1314 | 1316 | elif prefix == b'r:': |
|
1315 | 1317 | if rev is not None: |
|
1316 | 1318 | raise error.ParseError( |
|
1317 |
b'_matchfiles expected at most one |
|
|
1319 | b'_matchfiles expected at most one revision' | |
|
1318 | 1320 | ) |
|
1319 | 1321 | if value == b'': # empty means working directory |
|
1320 | 1322 | rev = node.wdirrev |
@@ -1323,7 +1325,7 b' def _matchfiles(repo, subset, x):' | |||
|
1323 | 1325 | elif prefix == b'd:': |
|
1324 | 1326 | if default is not None: |
|
1325 | 1327 | raise error.ParseError( |
|
1326 |
b'_matchfiles expected at most one |
|
|
1328 | b'_matchfiles expected at most one default mode' | |
|
1327 | 1329 | ) |
|
1328 | 1330 | default = value |
|
1329 | 1331 | else: |
@@ -2127,7 +2129,7 b' def matching(repo, subset, x):' | |||
|
2127 | 2129 | fieldlist = getstring( |
|
2128 | 2130 | l[1], |
|
2129 | 2131 | # i18n: "matching" is a keyword |
|
2130 |
_(b"matching requires a string |
|
|
2132 | _(b"matching requires a string as its second argument"), | |
|
2131 | 2133 | ).split() |
|
2132 | 2134 | |
|
2133 | 2135 | # Make sure that there are no repeated fields, |
@@ -2284,7 +2286,7 b' def _getsortargs(x):' | |||
|
2284 | 2286 | if len(keyflags) > 1 and any(k == b'topo' for k, reverse in keyflags): |
|
2285 | 2287 | # i18n: "topo" is a keyword |
|
2286 | 2288 | raise error.ParseError( |
|
2287 |
_(b'topo sort order cannot be combined |
|
|
2289 | _(b'topo sort order cannot be combined with other sort keys') | |
|
2288 | 2290 | ) |
|
2289 | 2291 | |
|
2290 | 2292 | opts = {} |
@@ -456,7 +456,7 b' def _nothingtoshelvemessaging(ui, repo, ' | |||
|
456 | 456 | stat = repo.status(match=scmutil.match(repo[None], pats, opts)) |
|
457 | 457 | if stat.deleted: |
|
458 | 458 | ui.status( |
|
459 |
_(b"nothing changed (%d missing files, see |
|
|
459 | _(b"nothing changed (%d missing files, see 'hg status')\n") | |
|
460 | 460 | % len(stat.deleted) |
|
461 | 461 | ) |
|
462 | 462 | else: |
@@ -707,7 +707,7 b' def checkparents(repo, state):' | |||
|
707 | 707 | """check parent while resuming an unshelve""" |
|
708 | 708 | if state.parents != repo.dirstate.parents(): |
|
709 | 709 | raise error.Abort( |
|
710 |
_(b'working directory parents do not match unshelve |
|
|
710 | _(b'working directory parents do not match unshelve state') | |
|
711 | 711 | ) |
|
712 | 712 | |
|
713 | 713 |
@@ -121,7 +121,7 b' def patternsforrev(repo, rev):' | |||
|
121 | 121 | |
|
122 | 122 | if rev is None: |
|
123 | 123 | raise error.Abort( |
|
124 |
_(b'cannot parse sparse patterns from working |
|
|
124 | _(b'cannot parse sparse patterns from working directory') | |
|
125 | 125 | ) |
|
126 | 126 | |
|
127 | 127 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') |
@@ -483,7 +483,7 b' def refreshwdir(repo, origstatus, origsp' | |||
|
483 | 483 | |
|
484 | 484 | if abort: |
|
485 | 485 | raise error.Abort( |
|
486 |
_(b'could not update sparseness due to pending |
|
|
486 | _(b'could not update sparseness due to pending changes') | |
|
487 | 487 | ) |
|
488 | 488 | |
|
489 | 489 | # Calculate actions |
@@ -210,7 +210,7 b' def _hostsettings(ui, hostname):' | |||
|
210 | 210 | if not (fingerprint.startswith((b'sha1:', b'sha256:', b'sha512:'))): |
|
211 | 211 | raise error.Abort( |
|
212 | 212 | _(b'invalid fingerprint for %s: %s') % (bhostname, fingerprint), |
|
213 |
hint=_(b'must begin with "sha1:", "sha256:", |
|
|
213 | hint=_(b'must begin with "sha1:", "sha256:", or "sha512:"'), | |
|
214 | 214 | ) |
|
215 | 215 | |
|
216 | 216 | alg, fingerprint = fingerprint.split(b':', 1) |
@@ -328,7 +328,7 b' def protocolsettings(protocol):' | |||
|
328 | 328 | if supportedprotocols == {b'tls1.0'}: |
|
329 | 329 | if protocol != b'tls1.0': |
|
330 | 330 | raise error.Abort( |
|
331 |
_(b'current Python does not support protocol |
|
|
331 | _(b'current Python does not support protocol setting %s') | |
|
332 | 332 | % protocol, |
|
333 | 333 | hint=_( |
|
334 | 334 | b'upgrade Python or disable setting since ' |
@@ -616,7 +616,7 b' def wrapserversocket(' | |||
|
616 | 616 | for f in (certfile, keyfile, cafile): |
|
617 | 617 | if f and not os.path.exists(f): |
|
618 | 618 | raise error.Abort( |
|
619 |
_(b'referenced certificate file (%s) does not |
|
|
619 | _(b'referenced certificate file (%s) does not exist') % f | |
|
620 | 620 | ) |
|
621 | 621 | |
|
622 | 622 | protocol, options, _protocolui = protocolsettings(b'tls1.0') |
@@ -928,7 +928,7 b' def validatesocket(sock):' | |||
|
928 | 928 | |
|
929 | 929 | if not peercert: |
|
930 | 930 | raise error.Abort( |
|
931 |
_(b'%s certificate error: |
|
|
931 | _(b'%s certificate error: no certificate received') % host | |
|
932 | 932 | ) |
|
933 | 933 | |
|
934 | 934 | if settings[b'disablecertverification']: |
@@ -990,7 +990,7 b' def validatesocket(sock):' | |||
|
990 | 990 | section = b'hostsecurity' |
|
991 | 991 | nice = b'%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash])) |
|
992 | 992 | raise error.Abort( |
|
993 |
_(b'certificate for %s has unexpected |
|
|
993 | _(b'certificate for %s has unexpected fingerprint %s') | |
|
994 | 994 | % (host, nice), |
|
995 | 995 | hint=_(b'check %s configuration') % section, |
|
996 | 996 | ) |
@@ -60,7 +60,7 b' class cmdstate(object):' | |||
|
60 | 60 | """ |
|
61 | 61 | if not isinstance(version, int): |
|
62 | 62 | raise error.ProgrammingError( |
|
63 |
b"version of state file should be |
|
|
63 | b"version of state file should be an integer" | |
|
64 | 64 | ) |
|
65 | 65 | |
|
66 | 66 | with self._repo.vfs(self.fname, b'wb', atomictemp=True) as fp: |
@@ -76,7 +76,7 b' class cmdstate(object):' | |||
|
76 | 76 | int(fp.readline()) |
|
77 | 77 | except ValueError: |
|
78 | 78 | raise error.CorruptedState( |
|
79 |
b"unknown version of state file |
|
|
79 | b"unknown version of state file found" | |
|
80 | 80 | ) |
|
81 | 81 | |
|
82 | 82 | return cborutil.decodeall(fp.read())[0] |
@@ -468,14 +468,14 b' def applybundlev1(repo, fp):' | |||
|
468 | 468 | """ |
|
469 | 469 | if len(repo): |
|
470 | 470 | raise error.Abort( |
|
471 |
_(b'cannot apply stream clone bundle on non-empty |
|
|
471 | _(b'cannot apply stream clone bundle on non-empty repo') | |
|
472 | 472 | ) |
|
473 | 473 | |
|
474 | 474 | filecount, bytecount, requirements = readbundle1header(fp) |
|
475 | 475 | missingreqs = requirements - repo.supportedformats |
|
476 | 476 | if missingreqs: |
|
477 | 477 | raise error.Abort( |
|
478 |
_(b'unable to apply stream clone: |
|
|
478 | _(b'unable to apply stream clone: unsupported format: %s') | |
|
479 | 479 | % b', '.join(sorted(missingreqs)) |
|
480 | 480 | ) |
|
481 | 481 | |
@@ -715,7 +715,7 b' def applybundlev2(repo, fp, filecount, f' | |||
|
715 | 715 | missingreqs = [r for r in requirements if r not in repo.supported] |
|
716 | 716 | if missingreqs: |
|
717 | 717 | raise error.Abort( |
|
718 |
_(b'unable to apply stream clone: |
|
|
718 | _(b'unable to apply stream clone: unsupported format: %s') | |
|
719 | 719 | % b', '.join(sorted(missingreqs)) |
|
720 | 720 | ) |
|
721 | 721 |
@@ -1232,7 +1232,7 b' class svnsubrepo(abstractsubrepo):' | |||
|
1232 | 1232 | def remove(self): |
|
1233 | 1233 | if self.dirty(): |
|
1234 | 1234 | self.ui.warn( |
|
1235 |
_(b'not removing repo %s because |
|
|
1235 | _(b'not removing repo %s because it has changes.\n') | |
|
1236 | 1236 | % self._path |
|
1237 | 1237 | ) |
|
1238 | 1238 | return |
@@ -1572,7 +1572,7 b' class gitsubrepo(abstractsubrepo):' | |||
|
1572 | 1572 | self._gitcommand([b'fetch']) |
|
1573 | 1573 | if not self._githavelocally(revision): |
|
1574 | 1574 | raise error.Abort( |
|
1575 |
_(b'revision %s does not exist in subrepository |
|
|
1575 | _(b'revision %s does not exist in subrepository "%s"\n') | |
|
1576 | 1576 | % (revision, self._relpath) |
|
1577 | 1577 | ) |
|
1578 | 1578 | |
@@ -1630,11 +1630,11 b' class gitsubrepo(abstractsubrepo):' | |||
|
1630 | 1630 | def rawcheckout(): |
|
1631 | 1631 | # no branch to checkout, check it out with no branch |
|
1632 | 1632 | self.ui.warn( |
|
1633 |
_(b'checking out detached HEAD in |
|
|
1633 | _(b'checking out detached HEAD in subrepository "%s"\n') | |
|
1634 | 1634 | % self._relpath |
|
1635 | 1635 | ) |
|
1636 | 1636 | self.ui.warn( |
|
1637 |
_(b'check out a git branch if you intend |
|
|
1637 | _(b'check out a git branch if you intend to make changes\n') | |
|
1638 | 1638 | ) |
|
1639 | 1639 | checkout([b'-q', revision]) |
|
1640 | 1640 | |
@@ -1822,7 +1822,7 b' class gitsubrepo(abstractsubrepo):' | |||
|
1822 | 1822 | return |
|
1823 | 1823 | if self.dirty(): |
|
1824 | 1824 | self.ui.warn( |
|
1825 |
_(b'not removing repo %s because |
|
|
1825 | _(b'not removing repo %s because it has changes.\n') | |
|
1826 | 1826 | % self._relpath |
|
1827 | 1827 | ) |
|
1828 | 1828 | return |
@@ -593,7 +593,7 b' def _tag(' | |||
|
593 | 593 | repo.hook(b'pretag', throw=True, node=hex(node), tag=name, local=local) |
|
594 | 594 | if name in branches: |
|
595 | 595 | repo.ui.warn( |
|
596 |
_(b"warning: tag %s conflicts with existing |
|
|
596 | _(b"warning: tag %s conflicts with existing branch name\n") | |
|
597 | 597 | % name |
|
598 | 598 | ) |
|
599 | 599 |
@@ -409,7 +409,7 b' class compressionengine(formatvariant):' | |||
|
409 | 409 | ) |
|
410 | 410 | |
|
411 | 411 | upgrademessage = _( |
|
412 |
b'revlog content will be recompressed with the new |
|
|
412 | b'revlog content will be recompressed with the new algorithm.' | |
|
413 | 413 | ) |
|
414 | 414 | |
|
415 | 415 | @classmethod |
@@ -1106,7 +1106,7 b' def upgraderepo(' | |||
|
1106 | 1106 | missingreqs = requiredsourcerequirements(repo) - repo.requirements |
|
1107 | 1107 | if missingreqs: |
|
1108 | 1108 | raise error.Abort( |
|
1109 |
_(b'cannot upgrade repository; requirement |
|
|
1109 | _(b'cannot upgrade repository; requirement missing: %s') | |
|
1110 | 1110 | % _(b', ').join(sorted(missingreqs)) |
|
1111 | 1111 | ) |
|
1112 | 1112 | |
@@ -1173,7 +1173,7 b' def upgraderepo(' | |||
|
1173 | 1173 | raise error.Abort( |
|
1174 | 1174 | _(b'unknown optimization action requested: %s') |
|
1175 | 1175 | % b', '.join(sorted(optimize)), |
|
1176 |
hint=_(b'run without arguments to see valid |
|
|
1176 | hint=_(b'run without arguments to see valid optimizations'), | |
|
1177 | 1177 | ) |
|
1178 | 1178 | |
|
1179 | 1179 | deficiencies = finddeficiencies(repo) |
@@ -2034,12 +2034,12 b' def checkwinfilename(path):' | |||
|
2034 | 2034 | ) |
|
2035 | 2035 | if ord(c) <= 31: |
|
2036 | 2036 | return _( |
|
2037 |
b"filename contains '%s', which is invalid |
|
|
2037 | b"filename contains '%s', which is invalid on Windows" | |
|
2038 | 2038 | ) % stringutil.escapestr(c) |
|
2039 | 2039 | base = n.split(b'.')[0] |
|
2040 | 2040 | if base and base.lower() in _winreservednames: |
|
2041 | 2041 | return ( |
|
2042 |
_(b"filename contains '%s', which is reserved |
|
|
2042 | _(b"filename contains '%s', which is reserved on Windows") | |
|
2043 | 2043 | % base |
|
2044 | 2044 | ) |
|
2045 | 2045 | t = n[-1:] |
@@ -3506,7 +3506,7 b' class dirs(object):' | |||
|
3506 | 3506 | addpath(f) |
|
3507 | 3507 | elif skip is not None: |
|
3508 | 3508 | raise error.ProgrammingError( |
|
3509 |
b"skip character is only supported |
|
|
3509 | b"skip character is only supported with a dict source" | |
|
3510 | 3510 | ) |
|
3511 | 3511 | else: |
|
3512 | 3512 | for f in map: |
@@ -3583,7 +3583,7 b' def readexactly(stream, n):' | |||
|
3583 | 3583 | s = stream.read(n) |
|
3584 | 3584 | if len(s) < n: |
|
3585 | 3585 | raise error.Abort( |
|
3586 |
_(b"stream ended unexpectedly |
|
|
3586 | _(b"stream ended unexpectedly (got %d bytes, expected %d)") | |
|
3587 | 3587 | % (len(s), n) |
|
3588 | 3588 | ) |
|
3589 | 3589 | return s |
@@ -404,7 +404,7 b' def decodeitem(b, offset=0):' | |||
|
404 | 404 | |
|
405 | 405 | if special != SPECIAL_START_ARRAY: |
|
406 | 406 | raise CBORDecodeError( |
|
407 |
b'expected array after finite set |
|
|
407 | b'expected array after finite set semantic tag' | |
|
408 | 408 | ) |
|
409 | 409 | |
|
410 | 410 | return True, size, readcount + readcount2 + 1, SPECIAL_START_SET |
@@ -746,7 +746,7 b' class sansiodecoder(object):' | |||
|
746 | 746 | SPECIAL_START_SET, |
|
747 | 747 | ): |
|
748 | 748 | raise CBORDecodeError( |
|
749 |
b'collections not supported as map |
|
|
749 | b'collections not supported as map keys' | |
|
750 | 750 | ) |
|
751 | 751 | |
|
752 | 752 | # We do not allow special values to be used as map keys. |
@@ -841,7 +841,7 b' class sansiodecoder(object):' | |||
|
841 | 841 | SPECIAL_START_SET, |
|
842 | 842 | ): |
|
843 | 843 | raise CBORDecodeError( |
|
844 |
b'collections not allowed as set |
|
|
844 | b'collections not allowed as set values' | |
|
845 | 845 | ) |
|
846 | 846 | |
|
847 | 847 | # We don't allow non-trivial types to exist as set values. |
@@ -685,7 +685,7 b' class backgroundfilecloser(object):' | |||
|
685 | 685 | """Schedule a file for closing.""" |
|
686 | 686 | if not self._entered: |
|
687 | 687 | raise error.Abort( |
|
688 |
_(b'can only call close() when context manager |
|
|
688 | _(b'can only call close() when context manager active') | |
|
689 | 689 | ) |
|
690 | 690 | |
|
691 | 691 | # If a background thread encountered an exception, raise now so we fail |
@@ -711,7 +711,7 b' class identitydecoder(object):' | |||
|
711 | 711 | def __init__(self, ui, extraobjs): |
|
712 | 712 | if extraobjs: |
|
713 | 713 | raise error.Abort( |
|
714 |
_(b'identity decoder received unexpected |
|
|
714 | _(b'identity decoder received unexpected additional values') | |
|
715 | 715 | ) |
|
716 | 716 | |
|
717 | 717 | def decode(self, data): |
@@ -745,7 +745,7 b' class zlibdecoder(object):' | |||
|
745 | 745 | |
|
746 | 746 | if extraobjs: |
|
747 | 747 | raise error.Abort( |
|
748 |
_(b'zlib decoder received unexpected |
|
|
748 | _(b'zlib decoder received unexpected additional values') | |
|
749 | 749 | ) |
|
750 | 750 | |
|
751 | 751 | self._decompressor = zlib.decompressobj() |
@@ -802,7 +802,7 b' class zstd8mbdecoder(zstdbasedecoder):' | |||
|
802 | 802 | def __init__(self, ui, extraobjs): |
|
803 | 803 | if extraobjs: |
|
804 | 804 | raise error.Abort( |
|
805 |
_(b'zstd8mb decoder received unexpected |
|
|
805 | _(b'zstd8mb decoder received unexpected additional values') | |
|
806 | 806 | ) |
|
807 | 807 | |
|
808 | 808 | super(zstd8mbdecoder, self).__init__(maxwindowsize=8 * 1048576) |
@@ -1116,7 +1116,7 b' class serverreactor(object):' | |||
|
1116 | 1116 | # TODO handle decoding frames |
|
1117 | 1117 | self._state = b'errored' |
|
1118 | 1118 | raise error.ProgrammingError( |
|
1119 |
b'support for decoding stream payloads |
|
|
1119 | b'support for decoding stream payloads not yet implemented' | |
|
1120 | 1120 | ) |
|
1121 | 1121 | |
|
1122 | 1122 | if frame.streamflags & STREAM_FLAG_END_STREAM: |
@@ -1361,7 +1361,7 b' class serverreactor(object):' | |||
|
1361 | 1361 | if not entry[b'requestdone']: |
|
1362 | 1362 | self._state = b'errored' |
|
1363 | 1363 | raise error.ProgrammingError( |
|
1364 |
b'should not be called without |
|
|
1364 | b'should not be called without requestdone set' | |
|
1365 | 1365 | ) |
|
1366 | 1366 | |
|
1367 | 1367 | del self._receivingcommands[requestid] |
@@ -1664,9 +1664,7 b' class serverreactor(object):' | |||
|
1664 | 1664 | return self._makeruncommandresult(frame.requestid) |
|
1665 | 1665 | else: |
|
1666 | 1666 | self._state = b'errored' |
|
1667 | return self._makeerrorresult( | |
|
1668 | _(b'command data frame without ' b'flags') | |
|
1669 | ) | |
|
1667 | return self._makeerrorresult(_(b'command data frame without flags')) | |
|
1670 | 1668 | |
|
1671 | 1669 | def _onframeerrored(self, frame): |
|
1672 | 1670 | return self._makeerrorresult(_(b'server already errored')) |
@@ -1796,7 +1794,7 b' class clientreactor(object):' | |||
|
1796 | 1794 | else: |
|
1797 | 1795 | if not self._cansend: |
|
1798 | 1796 | raise error.ProgrammingError( |
|
1799 |
b'sends cannot be performed on |
|
|
1797 | b'sends cannot be performed on this instance' | |
|
1800 | 1798 | ) |
|
1801 | 1799 | |
|
1802 | 1800 | if not self._hasmultiplesend: |
@@ -1824,7 +1822,7 b' class clientreactor(object):' | |||
|
1824 | 1822 | |
|
1825 | 1823 | if not self._cansend: |
|
1826 | 1824 | raise error.ProgrammingError( |
|
1827 |
b'sends cannot be performed on this |
|
|
1825 | b'sends cannot be performed on this instance' | |
|
1828 | 1826 | ) |
|
1829 | 1827 | |
|
1830 | 1828 | # If the instance only allows sending once, mark that we have fired |
@@ -679,7 +679,7 b' def _runsshserver(ui, repo, fin, fout, e' | |||
|
679 | 679 | _sshv1respondooberror( |
|
680 | 680 | fout, |
|
681 | 681 | ui.ferr, |
|
682 |
b'cannot upgrade protocols multiple |
|
|
682 | b'cannot upgrade protocols multiple times', | |
|
683 | 683 | ) |
|
684 | 684 | state = b'shutdown' |
|
685 | 685 | continue |
@@ -787,7 +787,7 b' def _runsshserver(ui, repo, fin, fout, e' | |||
|
787 | 787 | _sshv1respondooberror( |
|
788 | 788 | fout, |
|
789 | 789 | ui.ferr, |
|
790 |
b'malformed handshake protocol: |
|
|
790 | b'malformed handshake protocol: missing %s' % line, | |
|
791 | 791 | ) |
|
792 | 792 | ok = False |
|
793 | 793 | state = b'shutdown' |
@@ -143,12 +143,12 b' class peerexecutor(object):' | |||
|
143 | 143 | def callcommand(self, command, args): |
|
144 | 144 | if self._sent: |
|
145 | 145 | raise error.ProgrammingError( |
|
146 |
b'callcommand() cannot be used |
|
|
146 | b'callcommand() cannot be used after commands are sent' | |
|
147 | 147 | ) |
|
148 | 148 | |
|
149 | 149 | if self._closed: |
|
150 | 150 | raise error.ProgrammingError( |
|
151 |
b'callcommand() cannot be used |
|
|
151 | b'callcommand() cannot be used after close()' | |
|
152 | 152 | ) |
|
153 | 153 | |
|
154 | 154 | # Commands are dispatched through methods on the peer. |
@@ -41,7 +41,7 b' urlreq = util.urlreq' | |||
|
41 | 41 | |
|
42 | 42 | bundle2requiredmain = _(b'incompatible Mercurial client; bundle2 required') |
|
43 | 43 | bundle2requiredhint = _( |
|
44 |
b'see https://www.mercurial-scm.org/wiki/ |
|
|
44 | b'see https://www.mercurial-scm.org/wiki/IncompatibleClient' | |
|
45 | 45 | ) |
|
46 | 46 | bundle2required = b'%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint) |
|
47 | 47 | |
@@ -165,13 +165,13 b' def wireprotocommand(name, args=None, pe' | |||
|
165 | 165 | |
|
166 | 166 | if not isinstance(args, bytes): |
|
167 | 167 | raise error.ProgrammingError( |
|
168 |
b'arguments for version 1 commands |
|
|
168 | b'arguments for version 1 commands must be declared as bytes' | |
|
169 | 169 | ) |
|
170 | 170 | |
|
171 | 171 | def register(func): |
|
172 | 172 | if name in commands: |
|
173 | 173 | raise error.ProgrammingError( |
|
174 |
b'%s command already registered |
|
|
174 | b'%s command already registered for version 1' % name | |
|
175 | 175 | ) |
|
176 | 176 | commands[name] = wireprototypes.commandentry( |
|
177 | 177 | func, args=args, transports=transports, permission=permission |
@@ -140,7 +140,7 b' def handlehttpv2request(rctx, req, res, ' | |||
|
140 | 140 | # since client does Accept it. |
|
141 | 141 | res.headers[b'Content-Type'] = b'text/plain' |
|
142 | 142 | res.setbodybytes( |
|
143 |
_(b'client MUST send Content-Type header with |
|
|
143 | _(b'client MUST send Content-Type header with value: %s\n') | |
|
144 | 144 | % FRAMINGTYPE |
|
145 | 145 | ) |
|
146 | 146 | return |
@@ -324,7 +324,7 b' def _httpv2runcommand(' | |||
|
324 | 324 | res.status = b'403 Forbidden' |
|
325 | 325 | res.headers[b'Content-Type'] = b'text/plain' |
|
326 | 326 | res.setbodybytes( |
|
327 |
_(b'insufficient permissions to execute |
|
|
327 | _(b'insufficient permissions to execute command: %s') | |
|
328 | 328 | % command[b'command'] |
|
329 | 329 | ) |
|
330 | 330 | return True |
@@ -340,7 +340,7 b' def _httpv2runcommand(' | |||
|
340 | 340 | res.status = b'200 OK' |
|
341 | 341 | res.headers[b'Content-Type'] = b'text/plain' |
|
342 | 342 | res.setbodybytes( |
|
343 |
_(b'multiple commands cannot be issued to this |
|
|
343 | _(b'multiple commands cannot be issued to this URL') | |
|
344 | 344 | ) |
|
345 | 345 | return True |
|
346 | 346 | |
@@ -725,13 +725,13 b' def wireprotocommand(' | |||
|
725 | 725 | |
|
726 | 726 | if not isinstance(args, dict): |
|
727 | 727 | raise error.ProgrammingError( |
|
728 |
b'arguments for version 2 commands |
|
|
728 | b'arguments for version 2 commands must be declared as dicts' | |
|
729 | 729 | ) |
|
730 | 730 | |
|
731 | 731 | for arg, meta in args.items(): |
|
732 | 732 | if arg == b'*': |
|
733 | 733 | raise error.ProgrammingError( |
|
734 |
b'* argument name not allowed on |
|
|
734 | b'* argument name not allowed on version 2 commands' | |
|
735 | 735 | ) |
|
736 | 736 | |
|
737 | 737 | if not isinstance(meta, dict): |
@@ -773,7 +773,7 b' def wireprotocommand(' | |||
|
773 | 773 | def register(func): |
|
774 | 774 | if name in COMMANDS: |
|
775 | 775 | raise error.ProgrammingError( |
|
776 |
b'%s command already registered |
|
|
776 | b'%s command already registered for version 2' % name | |
|
777 | 777 | ) |
|
778 | 778 | |
|
779 | 779 | COMMANDS[name] = wireprototypes.commandentry( |
@@ -890,7 +890,7 b' def resolvenodes(repo, revisions):' | |||
|
890 | 890 | |
|
891 | 891 | if not isinstance(revisions, list): |
|
892 | 892 | raise error.WireprotoCommandError( |
|
893 |
b'revisions must be defined as an |
|
|
893 | b'revisions must be defined as an array' | |
|
894 | 894 | ) |
|
895 | 895 | |
|
896 | 896 | for spec in revisions: |
General Comments 0
You need to be logged in to leave comments.
Login now