##// END OF EJS Templates
debug: add newlines at the end of three locations that appear to need it...
Kyle Lippincott -
r35482:7906354c default
parent child Browse files
Show More
@@ -1,802 +1,802 b''
1 1 # patchbomb.py - sending Mercurial changesets as patch emails
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to send changesets as (a series of) patch emails
9 9
10 10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 11 describes the series as a whole.
12 12
13 13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 14 first line of the changeset description as the subject text. The
15 15 message contains two or three body parts:
16 16
17 17 - The changeset description.
18 18 - [Optional] The result of running diffstat on the patch.
19 19 - The patch itself, as generated by :hg:`export`.
20 20
21 21 Each message refers to the first in the series using the In-Reply-To
22 22 and References headers, so they will show up as a sequence in threaded
23 23 mail and news readers, and in mail archives.
24 24
25 25 To configure other defaults, add a section like this to your
26 26 configuration file::
27 27
28 28 [email]
29 29 from = My Name <my@email>
30 30 to = recipient1, recipient2, ...
31 31 cc = cc1, cc2, ...
32 32 bcc = bcc1, bcc2, ...
33 33 reply-to = address1, address2, ...
34 34
35 35 Use ``[patchbomb]`` as configuration section name if you need to
36 36 override global ``[email]`` address settings.
37 37
38 38 Then you can use the :hg:`email` command to mail a series of
39 39 changesets as a patchbomb.
40 40
41 41 You can also either configure the method option in the email section
42 42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 43 that the patchbomb extension can automatically send patchbombs
44 44 directly from the commandline. See the [email] and [smtp] sections in
45 45 hgrc(5) for details.
46 46
47 47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 48 you do not supply one via configuration or the command line. You can
49 49 override this to never prompt by configuring an empty value::
50 50
51 51 [email]
52 52 cc =
53 53
54 54 You can control the default inclusion of an introduction message with the
55 55 ``patchbomb.intro`` configuration option. The configuration is always
56 56 overwritten by command line flags like --intro and --desc::
57 57
58 58 [patchbomb]
59 59 intro=auto # include introduction message if more than 1 patch (default)
60 60 intro=never # never include an introduction message
61 61 intro=always # always include an introduction message
62 62
63 63 You can specify a template for flags to be added in subject prefixes. Flags
64 64 specified by --flag option are exported as ``{flags}`` keyword::
65 65
66 66 [patchbomb]
67 67 flagtemplate = "{separate(' ',
68 68 ifeq(branch, 'default', '', branch|upper),
69 69 flags)}"
70 70
71 71 You can set patchbomb to always ask for confirmation by setting
72 72 ``patchbomb.confirm`` to true.
73 73 '''
74 74 from __future__ import absolute_import
75 75
76 76 import email as emailmod
77 77 import errno
78 78 import os
79 79 import socket
80 80 import tempfile
81 81
82 82 from mercurial.i18n import _
83 83 from mercurial import (
84 84 cmdutil,
85 85 commands,
86 86 error,
87 87 formatter,
88 88 hg,
89 89 mail,
90 90 node as nodemod,
91 91 patch,
92 92 pycompat,
93 93 registrar,
94 94 repair,
95 95 scmutil,
96 96 templater,
97 97 util,
98 98 )
99 99 stringio = util.stringio
100 100
101 101 cmdtable = {}
102 102 command = registrar.command(cmdtable)
103 103
104 104 configtable = {}
105 105 configitem = registrar.configitem(configtable)
106 106
107 107 configitem('patchbomb', 'bundletype',
108 108 default=None,
109 109 )
110 110 configitem('patchbomb', 'bcc',
111 111 default=None,
112 112 )
113 113 configitem('patchbomb', 'cc',
114 114 default=None,
115 115 )
116 116 configitem('patchbomb', 'confirm',
117 117 default=False,
118 118 )
119 119 configitem('patchbomb', 'flagtemplate',
120 120 default=None,
121 121 )
122 122 configitem('patchbomb', 'from',
123 123 default=None,
124 124 )
125 125 configitem('patchbomb', 'intro',
126 126 default='auto',
127 127 )
128 128 configitem('patchbomb', 'publicurl',
129 129 default=None,
130 130 )
131 131 configitem('patchbomb', 'reply-to',
132 132 default=None,
133 133 )
134 134 configitem('patchbomb', 'to',
135 135 default=None,
136 136 )
137 137
138 138 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
139 139 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
140 140 # be specifying the version(s) of Mercurial they are tested with, or
141 141 # leave the attribute unspecified.
142 142 testedwith = 'ships-with-hg-core'
143 143
144 144 def _addpullheader(seq, ctx):
145 145 """Add a header pointing to a public URL where the changeset is available
146 146 """
147 147 repo = ctx.repo()
148 148 # experimental config: patchbomb.publicurl
149 149 # waiting for some logic that check that the changeset are available on the
150 150 # destination before patchbombing anything.
151 151 publicurl = repo.ui.config('patchbomb', 'publicurl')
152 152 if publicurl:
153 153 return ('Available At %s\n'
154 154 '# hg pull %s -r %s' % (publicurl, publicurl, ctx))
155 155 return None
156 156
157 157 def uisetup(ui):
158 158 cmdutil.extraexport.append('pullurl')
159 159 cmdutil.extraexportmap['pullurl'] = _addpullheader
160 160
161 161 def reposetup(ui, repo):
162 162 if not repo.local():
163 163 return
164 164 repo._wlockfreeprefix.add('last-email.txt')
165 165
166 166 def prompt(ui, prompt, default=None, rest=':'):
167 167 if default:
168 168 prompt += ' [%s]' % default
169 169 return ui.prompt(prompt + rest, default)
170 170
171 171 def introwanted(ui, opts, number):
172 172 '''is an introductory message apparently wanted?'''
173 173 introconfig = ui.config('patchbomb', 'intro')
174 174 if opts.get('intro') or opts.get('desc'):
175 175 intro = True
176 176 elif introconfig == 'always':
177 177 intro = True
178 178 elif introconfig == 'never':
179 179 intro = False
180 180 elif introconfig == 'auto':
181 181 intro = 1 < number
182 182 else:
183 183 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
184 184 % introconfig)
185 185 ui.write_err(_('(should be one of always, never, auto)\n'))
186 186 intro = 1 < number
187 187 return intro
188 188
189 189 def _formatflags(ui, repo, rev, flags):
190 190 """build flag string optionally by template"""
191 191 tmpl = ui.config('patchbomb', 'flagtemplate')
192 192 if not tmpl:
193 193 return ' '.join(flags)
194 194 out = util.stringio()
195 195 opts = {'template': templater.unquotestring(tmpl)}
196 196 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
197 197 fm.startitem()
198 198 fm.context(ctx=repo[rev])
199 199 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
200 200 return out.getvalue()
201 201
202 202 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
203 203 """build prefix to patch subject"""
204 204 flag = _formatflags(ui, repo, rev, flags)
205 205 if flag:
206 206 flag = ' ' + flag
207 207
208 208 if not numbered:
209 209 return '[PATCH%s]' % flag
210 210 else:
211 211 tlen = len(str(total))
212 212 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
213 213
214 214 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
215 215 patchname=None):
216 216
217 217 desc = []
218 218 node = None
219 219 body = ''
220 220
221 221 for line in patchlines:
222 222 if line.startswith('#'):
223 223 if line.startswith('# Node ID'):
224 224 node = line.split()[-1]
225 225 continue
226 226 if line.startswith('diff -r') or line.startswith('diff --git'):
227 227 break
228 228 desc.append(line)
229 229
230 230 if not patchname and not node:
231 231 raise ValueError
232 232
233 233 if opts.get('attach') and not opts.get('body'):
234 234 body = ('\n'.join(desc[1:]).strip() or
235 235 'Patch subject is complete summary.')
236 236 body += '\n\n\n'
237 237
238 238 if opts.get('plain'):
239 239 while patchlines and patchlines[0].startswith('# '):
240 240 patchlines.pop(0)
241 241 if patchlines:
242 242 patchlines.pop(0)
243 243 while patchlines and not patchlines[0].strip():
244 244 patchlines.pop(0)
245 245
246 246 ds = patch.diffstat(patchlines)
247 247 if opts.get('diffstat'):
248 248 body += ds + '\n\n'
249 249
250 250 addattachment = opts.get('attach') or opts.get('inline')
251 251 if not addattachment or opts.get('body'):
252 252 body += '\n'.join(patchlines)
253 253
254 254 if addattachment:
255 255 msg = emailmod.MIMEMultipart.MIMEMultipart()
256 256 if body:
257 257 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
258 258 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
259 259 opts.get('test'))
260 260 binnode = nodemod.bin(node)
261 261 # if node is mq patch, it will have the patch file's name as a tag
262 262 if not patchname:
263 263 patchtags = [t for t in repo.nodetags(binnode)
264 264 if t.endswith('.patch') or t.endswith('.diff')]
265 265 if patchtags:
266 266 patchname = patchtags[0]
267 267 elif total > 1:
268 268 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
269 269 binnode, seqno=idx,
270 270 total=total)
271 271 else:
272 272 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
273 273 disposition = 'inline'
274 274 if opts.get('attach'):
275 275 disposition = 'attachment'
276 276 p['Content-Disposition'] = disposition + '; filename=' + patchname
277 277 msg.attach(p)
278 278 else:
279 279 msg = mail.mimetextpatch(body, display=opts.get('test'))
280 280
281 281 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
282 282 numbered)
283 283 subj = desc[0].strip().rstrip('. ')
284 284 if not numbered:
285 285 subj = ' '.join([prefix, opts.get('subject') or subj])
286 286 else:
287 287 subj = ' '.join([prefix, subj])
288 288 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
289 289 msg['X-Mercurial-Node'] = node
290 290 msg['X-Mercurial-Series-Index'] = '%i' % idx
291 291 msg['X-Mercurial-Series-Total'] = '%i' % total
292 292 return msg, subj, ds
293 293
294 294 def _getpatches(repo, revs, **opts):
295 295 """return a list of patches for a list of revisions
296 296
297 297 Each patch in the list is itself a list of lines.
298 298 """
299 299 ui = repo.ui
300 300 prev = repo['.'].rev()
301 301 for r in revs:
302 302 if r == prev and (repo[None].files() or repo[None].deleted()):
303 303 ui.warn(_('warning: working directory has '
304 304 'uncommitted changes\n'))
305 305 output = stringio()
306 306 cmdutil.export(repo, [r], fp=output,
307 307 opts=patch.difffeatureopts(ui, opts, git=True))
308 308 yield output.getvalue().split('\n')
309 309 def _getbundle(repo, dest, **opts):
310 310 """return a bundle containing changesets missing in "dest"
311 311
312 312 The `opts` keyword-arguments are the same as the one accepted by the
313 313 `bundle` command.
314 314
315 315 The bundle is a returned as a single in-memory binary blob.
316 316 """
317 317 ui = repo.ui
318 318 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
319 319 tmpfn = os.path.join(tmpdir, 'bundle')
320 320 btype = ui.config('patchbomb', 'bundletype')
321 321 if btype:
322 322 opts[r'type'] = btype
323 323 try:
324 324 commands.bundle(ui, repo, tmpfn, dest, **opts)
325 325 return util.readfile(tmpfn)
326 326 finally:
327 327 try:
328 328 os.unlink(tmpfn)
329 329 except OSError:
330 330 pass
331 331 os.rmdir(tmpdir)
332 332
333 333 def _getdescription(repo, defaultbody, sender, **opts):
334 334 """obtain the body of the introduction message and return it
335 335
336 336 This is also used for the body of email with an attached bundle.
337 337
338 338 The body can be obtained either from the command line option or entered by
339 339 the user through the editor.
340 340 """
341 341 ui = repo.ui
342 342 if opts.get(r'desc'):
343 343 body = open(opts.get(r'desc')).read()
344 344 else:
345 345 ui.write(_('\nWrite the introductory message for the '
346 346 'patch series.\n\n'))
347 347 body = ui.edit(defaultbody, sender, repopath=repo.path,
348 348 action='patchbombbody')
349 349 # Save series description in case sendmail fails
350 350 msgfile = repo.vfs('last-email.txt', 'wb')
351 351 msgfile.write(body)
352 352 msgfile.close()
353 353 return body
354 354
355 355 def _getbundlemsgs(repo, sender, bundle, **opts):
356 356 """Get the full email for sending a given bundle
357 357
358 358 This function returns a list of "email" tuples (subject, content, None).
359 359 The list is always one message long in that case.
360 360 """
361 361 ui = repo.ui
362 362 _charsets = mail._charsets(ui)
363 363 subj = (opts.get(r'subject')
364 364 or prompt(ui, 'Subject:', 'A bundle for your repository'))
365 365
366 366 body = _getdescription(repo, '', sender, **opts)
367 367 msg = emailmod.MIMEMultipart.MIMEMultipart()
368 368 if body:
369 369 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
370 370 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
371 371 datapart.set_payload(bundle)
372 372 bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
373 373 datapart.add_header('Content-Disposition', 'attachment',
374 374 filename=bundlename)
375 375 emailmod.Encoders.encode_base64(datapart)
376 376 msg.attach(datapart)
377 377 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
378 378 return [(msg, subj, None)]
379 379
380 380 def _makeintro(repo, sender, revs, patches, **opts):
381 381 """make an introduction email, asking the user for content if needed
382 382
383 383 email is returned as (subject, body, cumulative-diffstat)"""
384 384 ui = repo.ui
385 385 _charsets = mail._charsets(ui)
386 386
387 387 # use the last revision which is likely to be a bookmarked head
388 388 prefix = _formatprefix(ui, repo, revs.last(), opts.get(r'flag'),
389 389 0, len(patches), numbered=True)
390 390 subj = (opts.get(r'subject') or
391 391 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
392 392 if not subj:
393 393 return None # skip intro if the user doesn't bother
394 394
395 395 subj = prefix + ' ' + subj
396 396
397 397 body = ''
398 398 if opts.get(r'diffstat'):
399 399 # generate a cumulative diffstat of the whole patch series
400 400 diffstat = patch.diffstat(sum(patches, []))
401 401 body = '\n' + diffstat
402 402 else:
403 403 diffstat = None
404 404
405 405 body = _getdescription(repo, body, sender, **opts)
406 406 msg = mail.mimeencode(ui, body, _charsets, opts.get(r'test'))
407 407 msg['Subject'] = mail.headencode(ui, subj, _charsets,
408 408 opts.get(r'test'))
409 409 return (msg, subj, diffstat)
410 410
411 411 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
412 412 """return a list of emails from a list of patches
413 413
414 414 This involves introduction message creation if necessary.
415 415
416 416 This function returns a list of "email" tuples (subject, content, None).
417 417 """
418 418 bytesopts = pycompat.byteskwargs(opts)
419 419 ui = repo.ui
420 420 _charsets = mail._charsets(ui)
421 421 patches = list(_getpatches(repo, revs, **opts))
422 422 msgs = []
423 423
424 424 ui.write(_('this patch series consists of %d patches.\n\n')
425 425 % len(patches))
426 426
427 427 # build the intro message, or skip it if the user declines
428 428 if introwanted(ui, bytesopts, len(patches)):
429 429 msg = _makeintro(repo, sender, revs, patches, **opts)
430 430 if msg:
431 431 msgs.append(msg)
432 432
433 433 # are we going to send more than one message?
434 434 numbered = len(msgs) + len(patches) > 1
435 435
436 436 # now generate the actual patch messages
437 437 name = None
438 438 assert len(revs) == len(patches)
439 439 for i, (r, p) in enumerate(zip(revs, patches)):
440 440 if patchnames:
441 441 name = patchnames[i]
442 442 msg = makepatch(ui, repo, r, p, bytesopts, _charsets,
443 443 i + 1, len(patches), numbered, name)
444 444 msgs.append(msg)
445 445
446 446 return msgs
447 447
448 448 def _getoutgoing(repo, dest, revs):
449 449 '''Return the revisions present locally but not in dest'''
450 450 ui = repo.ui
451 451 url = ui.expandpath(dest or 'default-push', dest or 'default')
452 452 url = hg.parseurl(url)[0]
453 453 ui.status(_('comparing with %s\n') % util.hidepassword(url))
454 454
455 455 revs = [r for r in revs if r >= 0]
456 456 if not revs:
457 457 revs = [len(repo) - 1]
458 458 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
459 459 if not revs:
460 460 ui.status(_("no changes found\n"))
461 461 return revs
462 462
463 463 emailopts = [
464 464 ('', 'body', None, _('send patches as inline message text (default)')),
465 465 ('a', 'attach', None, _('send patches as attachments')),
466 466 ('i', 'inline', None, _('send patches as inline attachments')),
467 467 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
468 468 ('c', 'cc', [], _('email addresses of copy recipients')),
469 469 ('', 'confirm', None, _('ask for confirmation before sending')),
470 470 ('d', 'diffstat', None, _('add diffstat output to messages')),
471 471 ('', 'date', '', _('use the given date as the sending date')),
472 472 ('', 'desc', '', _('use the given file as the series description')),
473 473 ('f', 'from', '', _('email address of sender')),
474 474 ('n', 'test', None, _('print messages that would be sent')),
475 475 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
476 476 ('', 'reply-to', [], _('email addresses replies should be sent to')),
477 477 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
478 478 ('', 'in-reply-to', '', _('message identifier to reply to')),
479 479 ('', 'flag', [], _('flags to add in subject prefixes')),
480 480 ('t', 'to', [], _('email addresses of recipients'))]
481 481
482 482 @command('email',
483 483 [('g', 'git', None, _('use git extended diff format')),
484 484 ('', 'plain', None, _('omit hg patch header')),
485 485 ('o', 'outgoing', None,
486 486 _('send changes not found in the target repository')),
487 487 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
488 488 ('B', 'bookmark', '', _('send changes only reachable by given bookmark')),
489 489 ('', 'bundlename', 'bundle',
490 490 _('name of the bundle attachment file'), _('NAME')),
491 491 ('r', 'rev', [], _('a revision to send'), _('REV')),
492 492 ('', 'force', None, _('run even when remote repository is unrelated '
493 493 '(with -b/--bundle)')),
494 494 ('', 'base', [], _('a base changeset to specify instead of a destination '
495 495 '(with -b/--bundle)'), _('REV')),
496 496 ('', 'intro', None, _('send an introduction email for a single patch')),
497 497 ] + emailopts + cmdutil.remoteopts,
498 498 _('hg email [OPTION]... [DEST]...'))
499 499 def email(ui, repo, *revs, **opts):
500 500 '''send changesets by email
501 501
502 502 By default, diffs are sent in the format generated by
503 503 :hg:`export`, one per message. The series starts with a "[PATCH 0
504 504 of N]" introduction, which describes the series as a whole.
505 505
506 506 Each patch email has a Subject line of "[PATCH M of N] ...", using
507 507 the first line of the changeset description as the subject text.
508 508 The message contains two or three parts. First, the changeset
509 509 description.
510 510
511 511 With the -d/--diffstat option, if the diffstat program is
512 512 installed, the result of running diffstat on the patch is inserted.
513 513
514 514 Finally, the patch itself, as generated by :hg:`export`.
515 515
516 516 With the -d/--diffstat or --confirm options, you will be presented
517 517 with a final summary of all messages and asked for confirmation before
518 518 the messages are sent.
519 519
520 520 By default the patch is included as text in the email body for
521 521 easy reviewing. Using the -a/--attach option will instead create
522 522 an attachment for the patch. With -i/--inline an inline attachment
523 523 will be created. You can include a patch both as text in the email
524 524 body and as a regular or an inline attachment by combining the
525 525 -a/--attach or -i/--inline with the --body option.
526 526
527 527 With -B/--bookmark changesets reachable by the given bookmark are
528 528 selected.
529 529
530 530 With -o/--outgoing, emails will be generated for patches not found
531 531 in the destination repository (or only those which are ancestors
532 532 of the specified revisions if any are provided)
533 533
534 534 With -b/--bundle, changesets are selected as for --outgoing, but a
535 535 single email containing a binary Mercurial bundle as an attachment
536 536 will be sent. Use the ``patchbomb.bundletype`` config option to
537 537 control the bundle type as with :hg:`bundle --type`.
538 538
539 539 With -m/--mbox, instead of previewing each patchbomb message in a
540 540 pager or sending the messages directly, it will create a UNIX
541 541 mailbox file with the patch emails. This mailbox file can be
542 542 previewed with any mail user agent which supports UNIX mbox
543 543 files.
544 544
545 545 With -n/--test, all steps will run, but mail will not be sent.
546 546 You will be prompted for an email recipient address, a subject and
547 547 an introductory message describing the patches of your patchbomb.
548 548 Then when all is done, patchbomb messages are displayed.
549 549
550 550 In case email sending fails, you will find a backup of your series
551 551 introductory message in ``.hg/last-email.txt``.
552 552
553 553 The default behavior of this command can be customized through
554 554 configuration. (See :hg:`help patchbomb` for details)
555 555
556 556 Examples::
557 557
558 558 hg email -r 3000 # send patch 3000 only
559 559 hg email -r 3000 -r 3001 # send patches 3000 and 3001
560 560 hg email -r 3000:3005 # send patches 3000 through 3005
561 561 hg email 3000 # send patch 3000 (deprecated)
562 562
563 563 hg email -o # send all patches not in default
564 564 hg email -o DEST # send all patches not in DEST
565 565 hg email -o -r 3000 # send all ancestors of 3000 not in default
566 566 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
567 567
568 568 hg email -B feature # send all ancestors of feature bookmark
569 569
570 570 hg email -b # send bundle of all patches not in default
571 571 hg email -b DEST # send bundle of all patches not in DEST
572 572 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
573 573 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
574 574
575 575 hg email -o -m mbox && # generate an mbox file...
576 576 mutt -R -f mbox # ... and view it with mutt
577 577 hg email -o -m mbox && # generate an mbox file ...
578 578 formail -s sendmail \\ # ... and use formail to send from the mbox
579 579 -bm -t < mbox # ... using sendmail
580 580
581 581 Before using this command, you will need to enable email in your
582 582 hgrc. See the [email] section in hgrc(5) for details.
583 583 '''
584 584 opts = pycompat.byteskwargs(opts)
585 585
586 586 _charsets = mail._charsets(ui)
587 587
588 588 bundle = opts.get('bundle')
589 589 date = opts.get('date')
590 590 mbox = opts.get('mbox')
591 591 outgoing = opts.get('outgoing')
592 592 rev = opts.get('rev')
593 593 bookmark = opts.get('bookmark')
594 594
595 595 if not (opts.get('test') or mbox):
596 596 # really sending
597 597 mail.validateconfig(ui)
598 598
599 599 if not (revs or rev or outgoing or bundle or bookmark):
600 600 raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
601 601
602 602 if outgoing and bundle:
603 603 raise error.Abort(_("--outgoing mode always on with --bundle;"
604 604 " do not re-specify --outgoing"))
605 605 if rev and bookmark:
606 606 raise error.Abort(_("-r and -B are mutually exclusive"))
607 607
608 608 if outgoing or bundle:
609 609 if len(revs) > 1:
610 610 raise error.Abort(_("too many destinations"))
611 611 if revs:
612 612 dest = revs[0]
613 613 else:
614 614 dest = None
615 615 revs = []
616 616
617 617 if rev:
618 618 if revs:
619 619 raise error.Abort(_('use only one form to specify the revision'))
620 620 revs = rev
621 621 elif bookmark:
622 622 if bookmark not in repo._bookmarks:
623 623 raise error.Abort(_("bookmark '%s' not found") % bookmark)
624 624 revs = repair.stripbmrevset(repo, bookmark)
625 625
626 626 revs = scmutil.revrange(repo, revs)
627 627 if outgoing:
628 628 revs = _getoutgoing(repo, dest, revs)
629 629 if bundle:
630 630 opts['revs'] = [str(r) for r in revs]
631 631
632 632 # check if revision exist on the public destination
633 633 publicurl = repo.ui.config('patchbomb', 'publicurl')
634 634 if publicurl:
635 repo.ui.debug('checking that revision exist in the public repo')
635 repo.ui.debug('checking that revision exist in the public repo\n')
636 636 try:
637 637 publicpeer = hg.peer(repo, {}, publicurl)
638 638 except error.RepoError:
639 639 repo.ui.write_err(_('unable to access public repo: %s\n')
640 640 % publicurl)
641 641 raise
642 642 if not publicpeer.capable('known'):
643 repo.ui.debug('skipping existence checks: public repo too old')
643 repo.ui.debug('skipping existence checks: public repo too old\n')
644 644 else:
645 645 out = [repo[r] for r in revs]
646 646 known = publicpeer.known(h.node() for h in out)
647 647 missing = []
648 648 for idx, h in enumerate(out):
649 649 if not known[idx]:
650 650 missing.append(h)
651 651 if missing:
652 652 if 1 < len(missing):
653 653 msg = _('public "%s" is missing %s and %i others')
654 654 msg %= (publicurl, missing[0], len(missing) - 1)
655 655 else:
656 656 msg = _('public url %s is missing %s')
657 657 msg %= (publicurl, missing[0])
658 658 revhint = ' '.join('-r %s' % h
659 659 for h in repo.set('heads(%ld)', missing))
660 660 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
661 661 raise error.Abort(msg, hint=hint)
662 662
663 663 # start
664 664 if date:
665 665 start_time = util.parsedate(date)
666 666 else:
667 667 start_time = util.makedate()
668 668
669 669 def genmsgid(id):
670 670 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
671 671
672 672 # deprecated config: patchbomb.from
673 673 sender = (opts.get('from') or ui.config('email', 'from') or
674 674 ui.config('patchbomb', 'from') or
675 675 prompt(ui, 'From', ui.username()))
676 676
677 677 if bundle:
678 678 stropts = pycompat.strkwargs(opts)
679 679 bundledata = _getbundle(repo, dest, **stropts)
680 680 bundleopts = stropts.copy()
681 681 bundleopts.pop(r'bundle', None) # already processed
682 682 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
683 683 else:
684 684 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
685 685
686 686 showaddrs = []
687 687
688 688 def getaddrs(header, ask=False, default=None):
689 689 configkey = header.lower()
690 690 opt = header.replace('-', '_').lower()
691 691 addrs = opts.get(opt)
692 692 if addrs:
693 693 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
694 694 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
695 695
696 696 # not on the command line: fallback to config and then maybe ask
697 697 addr = (ui.config('email', configkey) or
698 698 ui.config('patchbomb', configkey))
699 699 if not addr:
700 700 specified = (ui.hasconfig('email', configkey) or
701 701 ui.hasconfig('patchbomb', configkey))
702 702 if not specified and ask:
703 703 addr = prompt(ui, header, default=default)
704 704 if addr:
705 705 showaddrs.append('%s: %s' % (header, addr))
706 706 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
707 707 elif default:
708 708 return mail.addrlistencode(
709 709 ui, [default], _charsets, opts.get('test'))
710 710 return []
711 711
712 712 to = getaddrs('To', ask=True)
713 713 if not to:
714 714 # we can get here in non-interactive mode
715 715 raise error.Abort(_('no recipient addresses provided'))
716 716 cc = getaddrs('Cc', ask=True, default='')
717 717 bcc = getaddrs('Bcc')
718 718 replyto = getaddrs('Reply-To')
719 719
720 720 confirm = ui.configbool('patchbomb', 'confirm')
721 721 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
722 722
723 723 if confirm:
724 724 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
725 725 ui.write(('From: %s\n' % sender), label='patchbomb.from')
726 726 for addr in showaddrs:
727 727 ui.write('%s\n' % addr, label='patchbomb.to')
728 728 for m, subj, ds in msgs:
729 729 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
730 730 if ds:
731 731 ui.write(ds, label='patchbomb.diffstats')
732 732 ui.write('\n')
733 733 if ui.promptchoice(_('are you sure you want to send (yn)?'
734 734 '$$ &Yes $$ &No')):
735 735 raise error.Abort(_('patchbomb canceled'))
736 736
737 737 ui.write('\n')
738 738
739 739 parent = opts.get('in_reply_to') or None
740 740 # angle brackets may be omitted, they're not semantically part of the msg-id
741 741 if parent is not None:
742 742 if not parent.startswith('<'):
743 743 parent = '<' + parent
744 744 if not parent.endswith('>'):
745 745 parent += '>'
746 746
747 747 sender_addr = emailmod.Utils.parseaddr(sender)[1]
748 748 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
749 749 sendmail = None
750 750 firstpatch = None
751 751 for i, (m, subj, ds) in enumerate(msgs):
752 752 try:
753 753 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
754 754 if not firstpatch:
755 755 firstpatch = m['Message-Id']
756 756 m['X-Mercurial-Series-Id'] = firstpatch
757 757 except TypeError:
758 758 m['Message-Id'] = genmsgid('patchbomb')
759 759 if parent:
760 760 m['In-Reply-To'] = parent
761 761 m['References'] = parent
762 762 if not parent or 'X-Mercurial-Node' not in m:
763 763 parent = m['Message-Id']
764 764
765 765 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
766 766 m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)
767 767
768 768 start_time = (start_time[0] + 1, start_time[1])
769 769 m['From'] = sender
770 770 m['To'] = ', '.join(to)
771 771 if cc:
772 772 m['Cc'] = ', '.join(cc)
773 773 if bcc:
774 774 m['Bcc'] = ', '.join(bcc)
775 775 if replyto:
776 776 m['Reply-To'] = ', '.join(replyto)
777 777 if opts.get('test'):
778 778 ui.status(_('displaying '), subj, ' ...\n')
779 779 ui.pager('email')
780 780 generator = emailmod.Generator.Generator(ui, mangle_from_=False)
781 781 try:
782 782 generator.flatten(m, 0)
783 783 ui.write('\n')
784 784 except IOError as inst:
785 785 if inst.errno != errno.EPIPE:
786 786 raise
787 787 else:
788 788 if not sendmail:
789 789 sendmail = mail.connect(ui, mbox=mbox)
790 790 ui.status(_('sending '), subj, ' ...\n')
791 791 ui.progress(_('sending'), i, item=subj, total=len(msgs),
792 792 unit=_('emails'))
793 793 if not mbox:
794 794 # Exim does not remove the Bcc field
795 795 del m['Bcc']
796 796 fp = stringio()
797 797 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
798 798 generator.flatten(m, 0)
799 799 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
800 800
801 801 ui.progress(_('writing'), None)
802 802 ui.progress(_('sending'), None)
@@ -1,2116 +1,2116 b''
1 1 # bundle2.py - generic container format to transmit arbitrary data.
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """Handling of the new bundle2 format
8 8
9 9 The goal of bundle2 is to act as an atomically packet to transmit a set of
10 10 payloads in an application agnostic way. It consist in a sequence of "parts"
11 11 that will be handed to and processed by the application layer.
12 12
13 13
14 14 General format architecture
15 15 ===========================
16 16
17 17 The format is architectured as follow
18 18
19 19 - magic string
20 20 - stream level parameters
21 21 - payload parts (any number)
22 22 - end of stream marker.
23 23
24 24 the Binary format
25 25 ============================
26 26
27 27 All numbers are unsigned and big-endian.
28 28
29 29 stream level parameters
30 30 ------------------------
31 31
32 32 Binary format is as follow
33 33
34 34 :params size: int32
35 35
36 36 The total number of Bytes used by the parameters
37 37
38 38 :params value: arbitrary number of Bytes
39 39
40 40 A blob of `params size` containing the serialized version of all stream level
41 41 parameters.
42 42
43 43 The blob contains a space separated list of parameters. Parameters with value
44 44 are stored in the form `<name>=<value>`. Both name and value are urlquoted.
45 45
46 46 Empty name are obviously forbidden.
47 47
48 48 Name MUST start with a letter. If this first letter is lower case, the
49 49 parameter is advisory and can be safely ignored. However when the first
50 50 letter is capital, the parameter is mandatory and the bundling process MUST
51 51 stop if he is not able to proceed it.
52 52
53 53 Stream parameters use a simple textual format for two main reasons:
54 54
55 55 - Stream level parameters should remain simple and we want to discourage any
56 56 crazy usage.
57 57 - Textual data allow easy human inspection of a bundle2 header in case of
58 58 troubles.
59 59
60 60 Any Applicative level options MUST go into a bundle2 part instead.
61 61
62 62 Payload part
63 63 ------------------------
64 64
65 65 Binary format is as follow
66 66
67 67 :header size: int32
68 68
69 69 The total number of Bytes used by the part header. When the header is empty
70 70 (size = 0) this is interpreted as the end of stream marker.
71 71
72 72 :header:
73 73
74 74 The header defines how to interpret the part. It contains two piece of
75 75 data: the part type, and the part parameters.
76 76
77 77 The part type is used to route an application level handler, that can
78 78 interpret payload.
79 79
80 80 Part parameters are passed to the application level handler. They are
81 81 meant to convey information that will help the application level object to
82 82 interpret the part payload.
83 83
84 84 The binary format of the header is has follow
85 85
86 86 :typesize: (one byte)
87 87
88 88 :parttype: alphanumerical part name (restricted to [a-zA-Z0-9_:-]*)
89 89
90 90 :partid: A 32bits integer (unique in the bundle) that can be used to refer
91 91 to this part.
92 92
93 93 :parameters:
94 94
95 95 Part's parameter may have arbitrary content, the binary structure is::
96 96
97 97 <mandatory-count><advisory-count><param-sizes><param-data>
98 98
99 99 :mandatory-count: 1 byte, number of mandatory parameters
100 100
101 101 :advisory-count: 1 byte, number of advisory parameters
102 102
103 103 :param-sizes:
104 104
105 105 N couple of bytes, where N is the total number of parameters. Each
106 106 couple contains (<size-of-key>, <size-of-value) for one parameter.
107 107
108 108 :param-data:
109 109
110 110 A blob of bytes from which each parameter key and value can be
111 111 retrieved using the list of size couples stored in the previous
112 112 field.
113 113
114 114 Mandatory parameters comes first, then the advisory ones.
115 115
116 116 Each parameter's key MUST be unique within the part.
117 117
118 118 :payload:
119 119
120 120 payload is a series of `<chunksize><chunkdata>`.
121 121
122 122 `chunksize` is an int32, `chunkdata` are plain bytes (as much as
123 123 `chunksize` says)` The payload part is concluded by a zero size chunk.
124 124
125 125 The current implementation always produces either zero or one chunk.
126 126 This is an implementation limitation that will ultimately be lifted.
127 127
128 128 `chunksize` can be negative to trigger special case processing. No such
129 129 processing is in place yet.
130 130
131 131 Bundle processing
132 132 ============================
133 133
134 134 Each part is processed in order using a "part handler". Handler are registered
135 135 for a certain part type.
136 136
137 137 The matching of a part to its handler is case insensitive. The case of the
138 138 part type is used to know if a part is mandatory or advisory. If the Part type
139 139 contains any uppercase char it is considered mandatory. When no handler is
140 140 known for a Mandatory part, the process is aborted and an exception is raised.
141 141 If the part is advisory and no handler is known, the part is ignored. When the
142 142 process is aborted, the full bundle is still read from the stream to keep the
143 143 channel usable. But none of the part read from an abort are processed. In the
144 144 future, dropping the stream may become an option for channel we do not care to
145 145 preserve.
146 146 """
147 147
148 148 from __future__ import absolute_import, division
149 149
150 150 import errno
151 151 import os
152 152 import re
153 153 import string
154 154 import struct
155 155 import sys
156 156
157 157 from .i18n import _
158 158 from . import (
159 159 bookmarks,
160 160 changegroup,
161 161 error,
162 162 node as nodemod,
163 163 obsolete,
164 164 phases,
165 165 pushkey,
166 166 pycompat,
167 167 tags,
168 168 url,
169 169 util,
170 170 )
171 171
172 172 urlerr = util.urlerr
173 173 urlreq = util.urlreq
174 174
175 175 _pack = struct.pack
176 176 _unpack = struct.unpack
177 177
178 178 _fstreamparamsize = '>i'
179 179 _fpartheadersize = '>i'
180 180 _fparttypesize = '>B'
181 181 _fpartid = '>I'
182 182 _fpayloadsize = '>i'
183 183 _fpartparamcount = '>BB'
184 184
185 185 preferedchunksize = 4096
186 186
187 187 _parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
188 188
189 189 def outdebug(ui, message):
190 190 """debug regarding output stream (bundling)"""
191 191 if ui.configbool('devel', 'bundle2.debug'):
192 192 ui.debug('bundle2-output: %s\n' % message)
193 193
194 194 def indebug(ui, message):
195 195 """debug on input stream (unbundling)"""
196 196 if ui.configbool('devel', 'bundle2.debug'):
197 197 ui.debug('bundle2-input: %s\n' % message)
198 198
199 199 def validateparttype(parttype):
200 200 """raise ValueError if a parttype contains invalid character"""
201 201 if _parttypeforbidden.search(parttype):
202 202 raise ValueError(parttype)
203 203
204 204 def _makefpartparamsizes(nbparams):
205 205 """return a struct format to read part parameter sizes
206 206
207 207 The number parameters is variable so we need to build that format
208 208 dynamically.
209 209 """
210 210 return '>'+('BB'*nbparams)
211 211
212 212 parthandlermapping = {}
213 213
214 214 def parthandler(parttype, params=()):
215 215 """decorator that register a function as a bundle2 part handler
216 216
217 217 eg::
218 218
219 219 @parthandler('myparttype', ('mandatory', 'param', 'handled'))
220 220 def myparttypehandler(...):
221 221 '''process a part of type "my part".'''
222 222 ...
223 223 """
224 224 validateparttype(parttype)
225 225 def _decorator(func):
226 226 lparttype = parttype.lower() # enforce lower case matching.
227 227 assert lparttype not in parthandlermapping
228 228 parthandlermapping[lparttype] = func
229 229 func.params = frozenset(params)
230 230 return func
231 231 return _decorator
232 232
233 233 class unbundlerecords(object):
234 234 """keep record of what happens during and unbundle
235 235
236 236 New records are added using `records.add('cat', obj)`. Where 'cat' is a
237 237 category of record and obj is an arbitrary object.
238 238
239 239 `records['cat']` will return all entries of this category 'cat'.
240 240
241 241 Iterating on the object itself will yield `('category', obj)` tuples
242 242 for all entries.
243 243
244 244 All iterations happens in chronological order.
245 245 """
246 246
247 247 def __init__(self):
248 248 self._categories = {}
249 249 self._sequences = []
250 250 self._replies = {}
251 251
252 252 def add(self, category, entry, inreplyto=None):
253 253 """add a new record of a given category.
254 254
255 255 The entry can then be retrieved in the list returned by
256 256 self['category']."""
257 257 self._categories.setdefault(category, []).append(entry)
258 258 self._sequences.append((category, entry))
259 259 if inreplyto is not None:
260 260 self.getreplies(inreplyto).add(category, entry)
261 261
262 262 def getreplies(self, partid):
263 263 """get the records that are replies to a specific part"""
264 264 return self._replies.setdefault(partid, unbundlerecords())
265 265
266 266 def __getitem__(self, cat):
267 267 return tuple(self._categories.get(cat, ()))
268 268
269 269 def __iter__(self):
270 270 return iter(self._sequences)
271 271
272 272 def __len__(self):
273 273 return len(self._sequences)
274 274
275 275 def __nonzero__(self):
276 276 return bool(self._sequences)
277 277
278 278 __bool__ = __nonzero__
279 279
280 280 class bundleoperation(object):
281 281 """an object that represents a single bundling process
282 282
283 283 Its purpose is to carry unbundle-related objects and states.
284 284
285 285 A new object should be created at the beginning of each bundle processing.
286 286 The object is to be returned by the processing function.
287 287
288 288 The object has very little content now it will ultimately contain:
289 289 * an access to the repo the bundle is applied to,
290 290 * a ui object,
291 291 * a way to retrieve a transaction to add changes to the repo,
292 292 * a way to record the result of processing each part,
293 293 * a way to construct a bundle response when applicable.
294 294 """
295 295
296 296 def __init__(self, repo, transactiongetter, captureoutput=True):
297 297 self.repo = repo
298 298 self.ui = repo.ui
299 299 self.records = unbundlerecords()
300 300 self.reply = None
301 301 self.captureoutput = captureoutput
302 302 self.hookargs = {}
303 303 self._gettransaction = transactiongetter
304 304 # carries value that can modify part behavior
305 305 self.modes = {}
306 306
307 307 def gettransaction(self):
308 308 transaction = self._gettransaction()
309 309
310 310 if self.hookargs:
311 311 # the ones added to the transaction supercede those added
312 312 # to the operation.
313 313 self.hookargs.update(transaction.hookargs)
314 314 transaction.hookargs = self.hookargs
315 315
316 316 # mark the hookargs as flushed. further attempts to add to
317 317 # hookargs will result in an abort.
318 318 self.hookargs = None
319 319
320 320 return transaction
321 321
322 322 def addhookargs(self, hookargs):
323 323 if self.hookargs is None:
324 324 raise error.ProgrammingError('attempted to add hookargs to '
325 325 'operation after transaction started')
326 326 self.hookargs.update(hookargs)
327 327
328 328 class TransactionUnavailable(RuntimeError):
329 329 pass
330 330
331 331 def _notransaction():
332 332 """default method to get a transaction while processing a bundle
333 333
334 334 Raise an exception to highlight the fact that no transaction was expected
335 335 to be created"""
336 336 raise TransactionUnavailable()
337 337
338 338 def applybundle(repo, unbundler, tr, source=None, url=None, **kwargs):
339 339 # transform me into unbundler.apply() as soon as the freeze is lifted
340 340 if isinstance(unbundler, unbundle20):
341 341 tr.hookargs['bundle2'] = '1'
342 342 if source is not None and 'source' not in tr.hookargs:
343 343 tr.hookargs['source'] = source
344 344 if url is not None and 'url' not in tr.hookargs:
345 345 tr.hookargs['url'] = url
346 346 return processbundle(repo, unbundler, lambda: tr)
347 347 else:
348 348 # the transactiongetter won't be used, but we might as well set it
349 349 op = bundleoperation(repo, lambda: tr)
350 350 _processchangegroup(op, unbundler, tr, source, url, **kwargs)
351 351 return op
352 352
353 353 class partiterator(object):
354 354 def __init__(self, repo, op, unbundler):
355 355 self.repo = repo
356 356 self.op = op
357 357 self.unbundler = unbundler
358 358 self.iterator = None
359 359 self.count = 0
360 360 self.current = None
361 361
362 362 def __enter__(self):
363 363 def func():
364 364 itr = enumerate(self.unbundler.iterparts())
365 365 for count, p in itr:
366 366 self.count = count
367 367 self.current = p
368 368 yield p
369 369 p.consume()
370 370 self.current = None
371 371 self.iterator = func()
372 372 return self.iterator
373 373
374 374 def __exit__(self, type, exc, tb):
375 375 if not self.iterator:
376 376 return
377 377
378 378 # Only gracefully abort in a normal exception situation. User aborts
379 379 # like Ctrl+C throw a KeyboardInterrupt which is not a base Exception,
380 380 # and should not gracefully cleanup.
381 381 if isinstance(exc, Exception):
382 382 # Any exceptions seeking to the end of the bundle at this point are
383 383 # almost certainly related to the underlying stream being bad.
384 384 # And, chances are that the exception we're handling is related to
385 385 # getting in that bad state. So, we swallow the seeking error and
386 386 # re-raise the original error.
387 387 seekerror = False
388 388 try:
389 389 if self.current:
390 390 # consume the part content to not corrupt the stream.
391 391 self.current.consume()
392 392
393 393 for part in self.iterator:
394 394 # consume the bundle content
395 395 part.consume()
396 396 except Exception:
397 397 seekerror = True
398 398
399 399 # Small hack to let caller code distinguish exceptions from bundle2
400 400 # processing from processing the old format. This is mostly needed
401 401 # to handle different return codes to unbundle according to the type
402 402 # of bundle. We should probably clean up or drop this return code
403 403 # craziness in a future version.
404 404 exc.duringunbundle2 = True
405 405 salvaged = []
406 406 replycaps = None
407 407 if self.op.reply is not None:
408 408 salvaged = self.op.reply.salvageoutput()
409 409 replycaps = self.op.reply.capabilities
410 410 exc._replycaps = replycaps
411 411 exc._bundle2salvagedoutput = salvaged
412 412
413 413 # Re-raising from a variable loses the original stack. So only use
414 414 # that form if we need to.
415 415 if seekerror:
416 416 raise exc
417 417
418 418 self.repo.ui.debug('bundle2-input-bundle: %i parts total\n' %
419 419 self.count)
420 420
421 421 def processbundle(repo, unbundler, transactiongetter=None, op=None):
422 422 """This function process a bundle, apply effect to/from a repo
423 423
424 424 It iterates over each part then searches for and uses the proper handling
425 425 code to process the part. Parts are processed in order.
426 426
427 427 Unknown Mandatory part will abort the process.
428 428
429 429 It is temporarily possible to provide a prebuilt bundleoperation to the
430 430 function. This is used to ensure output is properly propagated in case of
431 431 an error during the unbundling. This output capturing part will likely be
432 432 reworked and this ability will probably go away in the process.
433 433 """
434 434 if op is None:
435 435 if transactiongetter is None:
436 436 transactiongetter = _notransaction
437 437 op = bundleoperation(repo, transactiongetter)
438 438 # todo:
439 439 # - replace this is a init function soon.
440 440 # - exception catching
441 441 unbundler.params
442 442 if repo.ui.debugflag:
443 443 msg = ['bundle2-input-bundle:']
444 444 if unbundler.params:
445 445 msg.append(' %i params' % len(unbundler.params))
446 446 if op._gettransaction is None or op._gettransaction is _notransaction:
447 447 msg.append(' no-transaction')
448 448 else:
449 449 msg.append(' with-transaction')
450 450 msg.append('\n')
451 451 repo.ui.debug(''.join(msg))
452 452
453 453 processparts(repo, op, unbundler)
454 454
455 455 return op
456 456
457 457 def processparts(repo, op, unbundler):
458 458 with partiterator(repo, op, unbundler) as parts:
459 459 for part in parts:
460 460 _processpart(op, part)
461 461
462 462 def _processchangegroup(op, cg, tr, source, url, **kwargs):
463 463 ret = cg.apply(op.repo, tr, source, url, **kwargs)
464 464 op.records.add('changegroup', {
465 465 'return': ret,
466 466 })
467 467 return ret
468 468
469 469 def _gethandler(op, part):
470 470 status = 'unknown' # used by debug output
471 471 try:
472 472 handler = parthandlermapping.get(part.type)
473 473 if handler is None:
474 474 status = 'unsupported-type'
475 475 raise error.BundleUnknownFeatureError(parttype=part.type)
476 476 indebug(op.ui, 'found a handler for part %s' % part.type)
477 477 unknownparams = part.mandatorykeys - handler.params
478 478 if unknownparams:
479 479 unknownparams = list(unknownparams)
480 480 unknownparams.sort()
481 481 status = 'unsupported-params (%s)' % ', '.join(unknownparams)
482 482 raise error.BundleUnknownFeatureError(parttype=part.type,
483 483 params=unknownparams)
484 484 status = 'supported'
485 485 except error.BundleUnknownFeatureError as exc:
486 486 if part.mandatory: # mandatory parts
487 487 raise
488 488 indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
489 489 return # skip to part processing
490 490 finally:
491 491 if op.ui.debugflag:
492 492 msg = ['bundle2-input-part: "%s"' % part.type]
493 493 if not part.mandatory:
494 494 msg.append(' (advisory)')
495 495 nbmp = len(part.mandatorykeys)
496 496 nbap = len(part.params) - nbmp
497 497 if nbmp or nbap:
498 498 msg.append(' (params:')
499 499 if nbmp:
500 500 msg.append(' %i mandatory' % nbmp)
501 501 if nbap:
502 502 msg.append(' %i advisory' % nbmp)
503 503 msg.append(')')
504 504 msg.append(' %s\n' % status)
505 505 op.ui.debug(''.join(msg))
506 506
507 507 return handler
508 508
509 509 def _processpart(op, part):
510 510 """process a single part from a bundle
511 511
512 512 The part is guaranteed to have been fully consumed when the function exits
513 513 (even if an exception is raised)."""
514 514 handler = _gethandler(op, part)
515 515 if handler is None:
516 516 return
517 517
518 518 # handler is called outside the above try block so that we don't
519 519 # risk catching KeyErrors from anything other than the
520 520 # parthandlermapping lookup (any KeyError raised by handler()
521 521 # itself represents a defect of a different variety).
522 522 output = None
523 523 if op.captureoutput and op.reply is not None:
524 524 op.ui.pushbuffer(error=True, subproc=True)
525 525 output = ''
526 526 try:
527 527 handler(op, part)
528 528 finally:
529 529 if output is not None:
530 530 output = op.ui.popbuffer()
531 531 if output:
532 532 outpart = op.reply.newpart('output', data=output,
533 533 mandatory=False)
534 534 outpart.addparam(
535 535 'in-reply-to', pycompat.bytestr(part.id), mandatory=False)
536 536
537 537 def decodecaps(blob):
538 538 """decode a bundle2 caps bytes blob into a dictionary
539 539
540 540 The blob is a list of capabilities (one per line)
541 541 Capabilities may have values using a line of the form::
542 542
543 543 capability=value1,value2,value3
544 544
545 545 The values are always a list."""
546 546 caps = {}
547 547 for line in blob.splitlines():
548 548 if not line:
549 549 continue
550 550 if '=' not in line:
551 551 key, vals = line, ()
552 552 else:
553 553 key, vals = line.split('=', 1)
554 554 vals = vals.split(',')
555 555 key = urlreq.unquote(key)
556 556 vals = [urlreq.unquote(v) for v in vals]
557 557 caps[key] = vals
558 558 return caps
559 559
560 560 def encodecaps(caps):
561 561 """encode a bundle2 caps dictionary into a bytes blob"""
562 562 chunks = []
563 563 for ca in sorted(caps):
564 564 vals = caps[ca]
565 565 ca = urlreq.quote(ca)
566 566 vals = [urlreq.quote(v) for v in vals]
567 567 if vals:
568 568 ca = "%s=%s" % (ca, ','.join(vals))
569 569 chunks.append(ca)
570 570 return '\n'.join(chunks)
571 571
572 572 bundletypes = {
573 573 "": ("", 'UN'), # only when using unbundle on ssh and old http servers
574 574 # since the unification ssh accepts a header but there
575 575 # is no capability signaling it.
576 576 "HG20": (), # special-cased below
577 577 "HG10UN": ("HG10UN", 'UN'),
578 578 "HG10BZ": ("HG10", 'BZ'),
579 579 "HG10GZ": ("HG10GZ", 'GZ'),
580 580 }
581 581
582 582 # hgweb uses this list to communicate its preferred type
583 583 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
584 584
585 585 class bundle20(object):
586 586 """represent an outgoing bundle2 container
587 587
588 588 Use the `addparam` method to add stream level parameter. and `newpart` to
589 589 populate it. Then call `getchunks` to retrieve all the binary chunks of
590 590 data that compose the bundle2 container."""
591 591
592 592 _magicstring = 'HG20'
593 593
594 594 def __init__(self, ui, capabilities=()):
595 595 self.ui = ui
596 596 self._params = []
597 597 self._parts = []
598 598 self.capabilities = dict(capabilities)
599 599 self._compengine = util.compengines.forbundletype('UN')
600 600 self._compopts = None
601 601
602 602 def setcompression(self, alg, compopts=None):
603 603 """setup core part compression to <alg>"""
604 604 if alg in (None, 'UN'):
605 605 return
606 606 assert not any(n.lower() == 'compression' for n, v in self._params)
607 607 self.addparam('Compression', alg)
608 608 self._compengine = util.compengines.forbundletype(alg)
609 609 self._compopts = compopts
610 610
611 611 @property
612 612 def nbparts(self):
613 613 """total number of parts added to the bundler"""
614 614 return len(self._parts)
615 615
616 616 # methods used to defines the bundle2 content
617 617 def addparam(self, name, value=None):
618 618 """add a stream level parameter"""
619 619 if not name:
620 620 raise ValueError(r'empty parameter name')
621 621 if name[0:1] not in pycompat.bytestr(string.ascii_letters):
622 622 raise ValueError(r'non letter first character: %s' % name)
623 623 self._params.append((name, value))
624 624
625 625 def addpart(self, part):
626 626 """add a new part to the bundle2 container
627 627
628 628 Parts contains the actual applicative payload."""
629 629 assert part.id is None
630 630 part.id = len(self._parts) # very cheap counter
631 631 self._parts.append(part)
632 632
633 633 def newpart(self, typeid, *args, **kwargs):
634 634 """create a new part and add it to the containers
635 635
636 636 As the part is directly added to the containers. For now, this means
637 637 that any failure to properly initialize the part after calling
638 638 ``newpart`` should result in a failure of the whole bundling process.
639 639
640 640 You can still fall back to manually create and add if you need better
641 641 control."""
642 642 part = bundlepart(typeid, *args, **kwargs)
643 643 self.addpart(part)
644 644 return part
645 645
646 646 # methods used to generate the bundle2 stream
647 647 def getchunks(self):
648 648 if self.ui.debugflag:
649 649 msg = ['bundle2-output-bundle: "%s",' % self._magicstring]
650 650 if self._params:
651 651 msg.append(' (%i params)' % len(self._params))
652 652 msg.append(' %i parts total\n' % len(self._parts))
653 653 self.ui.debug(''.join(msg))
654 654 outdebug(self.ui, 'start emission of %s stream' % self._magicstring)
655 655 yield self._magicstring
656 656 param = self._paramchunk()
657 657 outdebug(self.ui, 'bundle parameter: %s' % param)
658 658 yield _pack(_fstreamparamsize, len(param))
659 659 if param:
660 660 yield param
661 661 for chunk in self._compengine.compressstream(self._getcorechunk(),
662 662 self._compopts):
663 663 yield chunk
664 664
665 665 def _paramchunk(self):
666 666 """return a encoded version of all stream parameters"""
667 667 blocks = []
668 668 for par, value in self._params:
669 669 par = urlreq.quote(par)
670 670 if value is not None:
671 671 value = urlreq.quote(value)
672 672 par = '%s=%s' % (par, value)
673 673 blocks.append(par)
674 674 return ' '.join(blocks)
675 675
676 676 def _getcorechunk(self):
677 677 """yield chunk for the core part of the bundle
678 678
679 679 (all but headers and parameters)"""
680 680 outdebug(self.ui, 'start of parts')
681 681 for part in self._parts:
682 682 outdebug(self.ui, 'bundle part: "%s"' % part.type)
683 683 for chunk in part.getchunks(ui=self.ui):
684 684 yield chunk
685 685 outdebug(self.ui, 'end of bundle')
686 686 yield _pack(_fpartheadersize, 0)
687 687
688 688
689 689 def salvageoutput(self):
690 690 """return a list with a copy of all output parts in the bundle
691 691
692 692 This is meant to be used during error handling to make sure we preserve
693 693 server output"""
694 694 salvaged = []
695 695 for part in self._parts:
696 696 if part.type.startswith('output'):
697 697 salvaged.append(part.copy())
698 698 return salvaged
699 699
700 700
701 701 class unpackermixin(object):
702 702 """A mixin to extract bytes and struct data from a stream"""
703 703
704 704 def __init__(self, fp):
705 705 self._fp = fp
706 706
707 707 def _unpack(self, format):
708 708 """unpack this struct format from the stream
709 709
710 710 This method is meant for internal usage by the bundle2 protocol only.
711 711 They directly manipulate the low level stream including bundle2 level
712 712 instruction.
713 713
714 714 Do not use it to implement higher-level logic or methods."""
715 715 data = self._readexact(struct.calcsize(format))
716 716 return _unpack(format, data)
717 717
718 718 def _readexact(self, size):
719 719 """read exactly <size> bytes from the stream
720 720
721 721 This method is meant for internal usage by the bundle2 protocol only.
722 722 They directly manipulate the low level stream including bundle2 level
723 723 instruction.
724 724
725 725 Do not use it to implement higher-level logic or methods."""
726 726 return changegroup.readexactly(self._fp, size)
727 727
728 728 def getunbundler(ui, fp, magicstring=None):
729 729 """return a valid unbundler object for a given magicstring"""
730 730 if magicstring is None:
731 731 magicstring = changegroup.readexactly(fp, 4)
732 732 magic, version = magicstring[0:2], magicstring[2:4]
733 733 if magic != 'HG':
734 734 ui.debug(
735 735 "error: invalid magic: %r (version %r), should be 'HG'\n"
736 736 % (magic, version))
737 737 raise error.Abort(_('not a Mercurial bundle'))
738 738 unbundlerclass = formatmap.get(version)
739 739 if unbundlerclass is None:
740 740 raise error.Abort(_('unknown bundle version %s') % version)
741 741 unbundler = unbundlerclass(ui, fp)
742 742 indebug(ui, 'start processing of %s stream' % magicstring)
743 743 return unbundler
744 744
745 745 class unbundle20(unpackermixin):
746 746 """interpret a bundle2 stream
747 747
748 748 This class is fed with a binary stream and yields parts through its
749 749 `iterparts` methods."""
750 750
751 751 _magicstring = 'HG20'
752 752
753 753 def __init__(self, ui, fp):
754 754 """If header is specified, we do not read it out of the stream."""
755 755 self.ui = ui
756 756 self._compengine = util.compengines.forbundletype('UN')
757 757 self._compressed = None
758 758 super(unbundle20, self).__init__(fp)
759 759
760 760 @util.propertycache
761 761 def params(self):
762 762 """dictionary of stream level parameters"""
763 763 indebug(self.ui, 'reading bundle2 stream parameters')
764 764 params = {}
765 765 paramssize = self._unpack(_fstreamparamsize)[0]
766 766 if paramssize < 0:
767 767 raise error.BundleValueError('negative bundle param size: %i'
768 768 % paramssize)
769 769 if paramssize:
770 770 params = self._readexact(paramssize)
771 771 params = self._processallparams(params)
772 772 return params
773 773
774 774 def _processallparams(self, paramsblock):
775 775 """"""
776 776 params = util.sortdict()
777 777 for p in paramsblock.split(' '):
778 778 p = p.split('=', 1)
779 779 p = [urlreq.unquote(i) for i in p]
780 780 if len(p) < 2:
781 781 p.append(None)
782 782 self._processparam(*p)
783 783 params[p[0]] = p[1]
784 784 return params
785 785
786 786
787 787 def _processparam(self, name, value):
788 788 """process a parameter, applying its effect if needed
789 789
790 790 Parameter starting with a lower case letter are advisory and will be
791 791 ignored when unknown. Those starting with an upper case letter are
792 792 mandatory and will this function will raise a KeyError when unknown.
793 793
794 794 Note: no option are currently supported. Any input will be either
795 795 ignored or failing.
796 796 """
797 797 if not name:
798 798 raise ValueError(r'empty parameter name')
799 799 if name[0:1] not in pycompat.bytestr(string.ascii_letters):
800 800 raise ValueError(r'non letter first character: %s' % name)
801 801 try:
802 802 handler = b2streamparamsmap[name.lower()]
803 803 except KeyError:
804 804 if name[0:1].islower():
805 805 indebug(self.ui, "ignoring unknown parameter %s" % name)
806 806 else:
807 807 raise error.BundleUnknownFeatureError(params=(name,))
808 808 else:
809 809 handler(self, name, value)
810 810
811 811 def _forwardchunks(self):
812 812 """utility to transfer a bundle2 as binary
813 813
814 814 This is made necessary by the fact the 'getbundle' command over 'ssh'
815 815 have no way to know then the reply end, relying on the bundle to be
816 816 interpreted to know its end. This is terrible and we are sorry, but we
817 817 needed to move forward to get general delta enabled.
818 818 """
819 819 yield self._magicstring
820 820 assert 'params' not in vars(self)
821 821 paramssize = self._unpack(_fstreamparamsize)[0]
822 822 if paramssize < 0:
823 823 raise error.BundleValueError('negative bundle param size: %i'
824 824 % paramssize)
825 825 yield _pack(_fstreamparamsize, paramssize)
826 826 if paramssize:
827 827 params = self._readexact(paramssize)
828 828 self._processallparams(params)
829 829 yield params
830 830 assert self._compengine.bundletype == 'UN'
831 831 # From there, payload might need to be decompressed
832 832 self._fp = self._compengine.decompressorreader(self._fp)
833 833 emptycount = 0
834 834 while emptycount < 2:
835 835 # so we can brainlessly loop
836 836 assert _fpartheadersize == _fpayloadsize
837 837 size = self._unpack(_fpartheadersize)[0]
838 838 yield _pack(_fpartheadersize, size)
839 839 if size:
840 840 emptycount = 0
841 841 else:
842 842 emptycount += 1
843 843 continue
844 844 if size == flaginterrupt:
845 845 continue
846 846 elif size < 0:
847 847 raise error.BundleValueError('negative chunk size: %i')
848 848 yield self._readexact(size)
849 849
850 850
851 851 def iterparts(self, seekable=False):
852 852 """yield all parts contained in the stream"""
853 853 cls = seekableunbundlepart if seekable else unbundlepart
854 854 # make sure param have been loaded
855 855 self.params
856 856 # From there, payload need to be decompressed
857 857 self._fp = self._compengine.decompressorreader(self._fp)
858 858 indebug(self.ui, 'start extraction of bundle2 parts')
859 859 headerblock = self._readpartheader()
860 860 while headerblock is not None:
861 861 part = cls(self.ui, headerblock, self._fp)
862 862 yield part
863 863 # Ensure part is fully consumed so we can start reading the next
864 864 # part.
865 865 part.consume()
866 866
867 867 headerblock = self._readpartheader()
868 868 indebug(self.ui, 'end of bundle2 stream')
869 869
870 870 def _readpartheader(self):
871 871 """reads a part header size and return the bytes blob
872 872
873 873 returns None if empty"""
874 874 headersize = self._unpack(_fpartheadersize)[0]
875 875 if headersize < 0:
876 876 raise error.BundleValueError('negative part header size: %i'
877 877 % headersize)
878 878 indebug(self.ui, 'part header size: %i' % headersize)
879 879 if headersize:
880 880 return self._readexact(headersize)
881 881 return None
882 882
883 883 def compressed(self):
884 884 self.params # load params
885 885 return self._compressed
886 886
887 887 def close(self):
888 888 """close underlying file"""
889 889 if util.safehasattr(self._fp, 'close'):
890 890 return self._fp.close()
891 891
892 892 formatmap = {'20': unbundle20}
893 893
894 894 b2streamparamsmap = {}
895 895
896 896 def b2streamparamhandler(name):
897 897 """register a handler for a stream level parameter"""
898 898 def decorator(func):
899 899 assert name not in formatmap
900 900 b2streamparamsmap[name] = func
901 901 return func
902 902 return decorator
903 903
904 904 @b2streamparamhandler('compression')
905 905 def processcompression(unbundler, param, value):
906 906 """read compression parameter and install payload decompression"""
907 907 if value not in util.compengines.supportedbundletypes:
908 908 raise error.BundleUnknownFeatureError(params=(param,),
909 909 values=(value,))
910 910 unbundler._compengine = util.compengines.forbundletype(value)
911 911 if value is not None:
912 912 unbundler._compressed = True
913 913
914 914 class bundlepart(object):
915 915 """A bundle2 part contains application level payload
916 916
917 917 The part `type` is used to route the part to the application level
918 918 handler.
919 919
920 920 The part payload is contained in ``part.data``. It could be raw bytes or a
921 921 generator of byte chunks.
922 922
923 923 You can add parameters to the part using the ``addparam`` method.
924 924 Parameters can be either mandatory (default) or advisory. Remote side
925 925 should be able to safely ignore the advisory ones.
926 926
927 927 Both data and parameters cannot be modified after the generation has begun.
928 928 """
929 929
930 930 def __init__(self, parttype, mandatoryparams=(), advisoryparams=(),
931 931 data='', mandatory=True):
932 932 validateparttype(parttype)
933 933 self.id = None
934 934 self.type = parttype
935 935 self._data = data
936 936 self._mandatoryparams = list(mandatoryparams)
937 937 self._advisoryparams = list(advisoryparams)
938 938 # checking for duplicated entries
939 939 self._seenparams = set()
940 940 for pname, __ in self._mandatoryparams + self._advisoryparams:
941 941 if pname in self._seenparams:
942 942 raise error.ProgrammingError('duplicated params: %s' % pname)
943 943 self._seenparams.add(pname)
944 944 # status of the part's generation:
945 945 # - None: not started,
946 946 # - False: currently generated,
947 947 # - True: generation done.
948 948 self._generated = None
949 949 self.mandatory = mandatory
950 950
951 951 def __repr__(self):
952 952 cls = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
953 953 return ('<%s object at %x; id: %s; type: %s; mandatory: %s>'
954 954 % (cls, id(self), self.id, self.type, self.mandatory))
955 955
956 956 def copy(self):
957 957 """return a copy of the part
958 958
959 959 The new part have the very same content but no partid assigned yet.
960 960 Parts with generated data cannot be copied."""
961 961 assert not util.safehasattr(self.data, 'next')
962 962 return self.__class__(self.type, self._mandatoryparams,
963 963 self._advisoryparams, self._data, self.mandatory)
964 964
965 965 # methods used to defines the part content
966 966 @property
967 967 def data(self):
968 968 return self._data
969 969
970 970 @data.setter
971 971 def data(self, data):
972 972 if self._generated is not None:
973 973 raise error.ReadOnlyPartError('part is being generated')
974 974 self._data = data
975 975
976 976 @property
977 977 def mandatoryparams(self):
978 978 # make it an immutable tuple to force people through ``addparam``
979 979 return tuple(self._mandatoryparams)
980 980
981 981 @property
982 982 def advisoryparams(self):
983 983 # make it an immutable tuple to force people through ``addparam``
984 984 return tuple(self._advisoryparams)
985 985
986 986 def addparam(self, name, value='', mandatory=True):
987 987 """add a parameter to the part
988 988
989 989 If 'mandatory' is set to True, the remote handler must claim support
990 990 for this parameter or the unbundling will be aborted.
991 991
992 992 The 'name' and 'value' cannot exceed 255 bytes each.
993 993 """
994 994 if self._generated is not None:
995 995 raise error.ReadOnlyPartError('part is being generated')
996 996 if name in self._seenparams:
997 997 raise ValueError('duplicated params: %s' % name)
998 998 self._seenparams.add(name)
999 999 params = self._advisoryparams
1000 1000 if mandatory:
1001 1001 params = self._mandatoryparams
1002 1002 params.append((name, value))
1003 1003
1004 1004 # methods used to generates the bundle2 stream
1005 1005 def getchunks(self, ui):
1006 1006 if self._generated is not None:
1007 1007 raise error.ProgrammingError('part can only be consumed once')
1008 1008 self._generated = False
1009 1009
1010 1010 if ui.debugflag:
1011 1011 msg = ['bundle2-output-part: "%s"' % self.type]
1012 1012 if not self.mandatory:
1013 1013 msg.append(' (advisory)')
1014 1014 nbmp = len(self.mandatoryparams)
1015 1015 nbap = len(self.advisoryparams)
1016 1016 if nbmp or nbap:
1017 1017 msg.append(' (params:')
1018 1018 if nbmp:
1019 1019 msg.append(' %i mandatory' % nbmp)
1020 1020 if nbap:
1021 1021 msg.append(' %i advisory' % nbmp)
1022 1022 msg.append(')')
1023 1023 if not self.data:
1024 1024 msg.append(' empty payload')
1025 1025 elif (util.safehasattr(self.data, 'next')
1026 1026 or util.safehasattr(self.data, '__next__')):
1027 1027 msg.append(' streamed payload')
1028 1028 else:
1029 1029 msg.append(' %i bytes payload' % len(self.data))
1030 1030 msg.append('\n')
1031 1031 ui.debug(''.join(msg))
1032 1032
1033 1033 #### header
1034 1034 if self.mandatory:
1035 1035 parttype = self.type.upper()
1036 1036 else:
1037 1037 parttype = self.type.lower()
1038 1038 outdebug(ui, 'part %s: "%s"' % (pycompat.bytestr(self.id), parttype))
1039 1039 ## parttype
1040 1040 header = [_pack(_fparttypesize, len(parttype)),
1041 1041 parttype, _pack(_fpartid, self.id),
1042 1042 ]
1043 1043 ## parameters
1044 1044 # count
1045 1045 manpar = self.mandatoryparams
1046 1046 advpar = self.advisoryparams
1047 1047 header.append(_pack(_fpartparamcount, len(manpar), len(advpar)))
1048 1048 # size
1049 1049 parsizes = []
1050 1050 for key, value in manpar:
1051 1051 parsizes.append(len(key))
1052 1052 parsizes.append(len(value))
1053 1053 for key, value in advpar:
1054 1054 parsizes.append(len(key))
1055 1055 parsizes.append(len(value))
1056 1056 paramsizes = _pack(_makefpartparamsizes(len(parsizes) // 2), *parsizes)
1057 1057 header.append(paramsizes)
1058 1058 # key, value
1059 1059 for key, value in manpar:
1060 1060 header.append(key)
1061 1061 header.append(value)
1062 1062 for key, value in advpar:
1063 1063 header.append(key)
1064 1064 header.append(value)
1065 1065 ## finalize header
1066 1066 try:
1067 1067 headerchunk = ''.join(header)
1068 1068 except TypeError:
1069 1069 raise TypeError(r'Found a non-bytes trying to '
1070 1070 r'build bundle part header: %r' % header)
1071 1071 outdebug(ui, 'header chunk size: %i' % len(headerchunk))
1072 1072 yield _pack(_fpartheadersize, len(headerchunk))
1073 1073 yield headerchunk
1074 1074 ## payload
1075 1075 try:
1076 1076 for chunk in self._payloadchunks():
1077 1077 outdebug(ui, 'payload chunk size: %i' % len(chunk))
1078 1078 yield _pack(_fpayloadsize, len(chunk))
1079 1079 yield chunk
1080 1080 except GeneratorExit:
1081 1081 # GeneratorExit means that nobody is listening for our
1082 1082 # results anyway, so just bail quickly rather than trying
1083 1083 # to produce an error part.
1084 1084 ui.debug('bundle2-generatorexit\n')
1085 1085 raise
1086 1086 except BaseException as exc:
1087 1087 bexc = util.forcebytestr(exc)
1088 1088 # backup exception data for later
1089 1089 ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
1090 1090 % bexc)
1091 1091 tb = sys.exc_info()[2]
1092 1092 msg = 'unexpected error: %s' % bexc
1093 1093 interpart = bundlepart('error:abort', [('message', msg)],
1094 1094 mandatory=False)
1095 1095 interpart.id = 0
1096 1096 yield _pack(_fpayloadsize, -1)
1097 1097 for chunk in interpart.getchunks(ui=ui):
1098 1098 yield chunk
1099 1099 outdebug(ui, 'closing payload chunk')
1100 1100 # abort current part payload
1101 1101 yield _pack(_fpayloadsize, 0)
1102 1102 pycompat.raisewithtb(exc, tb)
1103 1103 # end of payload
1104 1104 outdebug(ui, 'closing payload chunk')
1105 1105 yield _pack(_fpayloadsize, 0)
1106 1106 self._generated = True
1107 1107
1108 1108 def _payloadchunks(self):
1109 1109 """yield chunks of a the part payload
1110 1110
1111 1111 Exists to handle the different methods to provide data to a part."""
1112 1112 # we only support fixed size data now.
1113 1113 # This will be improved in the future.
1114 1114 if (util.safehasattr(self.data, 'next')
1115 1115 or util.safehasattr(self.data, '__next__')):
1116 1116 buff = util.chunkbuffer(self.data)
1117 1117 chunk = buff.read(preferedchunksize)
1118 1118 while chunk:
1119 1119 yield chunk
1120 1120 chunk = buff.read(preferedchunksize)
1121 1121 elif len(self.data):
1122 1122 yield self.data
1123 1123
1124 1124
1125 1125 flaginterrupt = -1
1126 1126
1127 1127 class interrupthandler(unpackermixin):
1128 1128 """read one part and process it with restricted capability
1129 1129
1130 1130 This allows to transmit exception raised on the producer size during part
1131 1131 iteration while the consumer is reading a part.
1132 1132
1133 1133 Part processed in this manner only have access to a ui object,"""
1134 1134
1135 1135 def __init__(self, ui, fp):
1136 1136 super(interrupthandler, self).__init__(fp)
1137 1137 self.ui = ui
1138 1138
1139 1139 def _readpartheader(self):
1140 1140 """reads a part header size and return the bytes blob
1141 1141
1142 1142 returns None if empty"""
1143 1143 headersize = self._unpack(_fpartheadersize)[0]
1144 1144 if headersize < 0:
1145 1145 raise error.BundleValueError('negative part header size: %i'
1146 1146 % headersize)
1147 1147 indebug(self.ui, 'part header size: %i\n' % headersize)
1148 1148 if headersize:
1149 1149 return self._readexact(headersize)
1150 1150 return None
1151 1151
1152 1152 def __call__(self):
1153 1153
1154 1154 self.ui.debug('bundle2-input-stream-interrupt:'
1155 1155 ' opening out of band context\n')
1156 1156 indebug(self.ui, 'bundle2 stream interruption, looking for a part.')
1157 1157 headerblock = self._readpartheader()
1158 1158 if headerblock is None:
1159 1159 indebug(self.ui, 'no part found during interruption.')
1160 1160 return
1161 1161 part = unbundlepart(self.ui, headerblock, self._fp)
1162 1162 op = interruptoperation(self.ui)
1163 1163 hardabort = False
1164 1164 try:
1165 1165 _processpart(op, part)
1166 1166 except (SystemExit, KeyboardInterrupt):
1167 1167 hardabort = True
1168 1168 raise
1169 1169 finally:
1170 1170 if not hardabort:
1171 1171 part.consume()
1172 1172 self.ui.debug('bundle2-input-stream-interrupt:'
1173 1173 ' closing out of band context\n')
1174 1174
1175 1175 class interruptoperation(object):
1176 1176 """A limited operation to be use by part handler during interruption
1177 1177
1178 1178 It only have access to an ui object.
1179 1179 """
1180 1180
1181 1181 def __init__(self, ui):
1182 1182 self.ui = ui
1183 1183 self.reply = None
1184 1184 self.captureoutput = False
1185 1185
1186 1186 @property
1187 1187 def repo(self):
1188 1188 raise error.ProgrammingError('no repo access from stream interruption')
1189 1189
1190 1190 def gettransaction(self):
1191 1191 raise TransactionUnavailable('no repo access from stream interruption')
1192 1192
1193 1193 def decodepayloadchunks(ui, fh):
1194 1194 """Reads bundle2 part payload data into chunks.
1195 1195
1196 1196 Part payload data consists of framed chunks. This function takes
1197 1197 a file handle and emits those chunks.
1198 1198 """
1199 1199 dolog = ui.configbool('devel', 'bundle2.debug')
1200 1200 debug = ui.debug
1201 1201
1202 1202 headerstruct = struct.Struct(_fpayloadsize)
1203 1203 headersize = headerstruct.size
1204 1204 unpack = headerstruct.unpack
1205 1205
1206 1206 readexactly = changegroup.readexactly
1207 1207 read = fh.read
1208 1208
1209 1209 chunksize = unpack(readexactly(fh, headersize))[0]
1210 1210 indebug(ui, 'payload chunk size: %i' % chunksize)
1211 1211
1212 1212 # changegroup.readexactly() is inlined below for performance.
1213 1213 while chunksize:
1214 1214 if chunksize >= 0:
1215 1215 s = read(chunksize)
1216 1216 if len(s) < chunksize:
1217 1217 raise error.Abort(_('stream ended unexpectedly '
1218 1218 ' (got %d bytes, expected %d)') %
1219 1219 (len(s), chunksize))
1220 1220
1221 1221 yield s
1222 1222 elif chunksize == flaginterrupt:
1223 1223 # Interrupt "signal" detected. The regular stream is interrupted
1224 1224 # and a bundle2 part follows. Consume it.
1225 1225 interrupthandler(ui, fh)()
1226 1226 else:
1227 1227 raise error.BundleValueError(
1228 1228 'negative payload chunk size: %s' % chunksize)
1229 1229
1230 1230 s = read(headersize)
1231 1231 if len(s) < headersize:
1232 1232 raise error.Abort(_('stream ended unexpectedly '
1233 1233 ' (got %d bytes, expected %d)') %
1234 1234 (len(s), chunksize))
1235 1235
1236 1236 chunksize = unpack(s)[0]
1237 1237
1238 1238 # indebug() inlined for performance.
1239 1239 if dolog:
1240 1240 debug('bundle2-input: payload chunk size: %i\n' % chunksize)
1241 1241
1242 1242 class unbundlepart(unpackermixin):
1243 1243 """a bundle part read from a bundle"""
1244 1244
1245 1245 def __init__(self, ui, header, fp):
1246 1246 super(unbundlepart, self).__init__(fp)
1247 1247 self._seekable = (util.safehasattr(fp, 'seek') and
1248 1248 util.safehasattr(fp, 'tell'))
1249 1249 self.ui = ui
1250 1250 # unbundle state attr
1251 1251 self._headerdata = header
1252 1252 self._headeroffset = 0
1253 1253 self._initialized = False
1254 1254 self.consumed = False
1255 1255 # part data
1256 1256 self.id = None
1257 1257 self.type = None
1258 1258 self.mandatoryparams = None
1259 1259 self.advisoryparams = None
1260 1260 self.params = None
1261 1261 self.mandatorykeys = ()
1262 1262 self._readheader()
1263 1263 self._mandatory = None
1264 1264 self._pos = 0
1265 1265
1266 1266 def _fromheader(self, size):
1267 1267 """return the next <size> byte from the header"""
1268 1268 offset = self._headeroffset
1269 1269 data = self._headerdata[offset:(offset + size)]
1270 1270 self._headeroffset = offset + size
1271 1271 return data
1272 1272
1273 1273 def _unpackheader(self, format):
1274 1274 """read given format from header
1275 1275
1276 1276 This automatically compute the size of the format to read."""
1277 1277 data = self._fromheader(struct.calcsize(format))
1278 1278 return _unpack(format, data)
1279 1279
1280 1280 def _initparams(self, mandatoryparams, advisoryparams):
1281 1281 """internal function to setup all logic related parameters"""
1282 1282 # make it read only to prevent people touching it by mistake.
1283 1283 self.mandatoryparams = tuple(mandatoryparams)
1284 1284 self.advisoryparams = tuple(advisoryparams)
1285 1285 # user friendly UI
1286 1286 self.params = util.sortdict(self.mandatoryparams)
1287 1287 self.params.update(self.advisoryparams)
1288 1288 self.mandatorykeys = frozenset(p[0] for p in mandatoryparams)
1289 1289
1290 1290 def _readheader(self):
1291 1291 """read the header and setup the object"""
1292 1292 typesize = self._unpackheader(_fparttypesize)[0]
1293 1293 self.type = self._fromheader(typesize)
1294 1294 indebug(self.ui, 'part type: "%s"' % self.type)
1295 1295 self.id = self._unpackheader(_fpartid)[0]
1296 1296 indebug(self.ui, 'part id: "%s"' % pycompat.bytestr(self.id))
1297 1297 # extract mandatory bit from type
1298 1298 self.mandatory = (self.type != self.type.lower())
1299 1299 self.type = self.type.lower()
1300 1300 ## reading parameters
1301 1301 # param count
1302 1302 mancount, advcount = self._unpackheader(_fpartparamcount)
1303 1303 indebug(self.ui, 'part parameters: %i' % (mancount + advcount))
1304 1304 # param size
1305 1305 fparamsizes = _makefpartparamsizes(mancount + advcount)
1306 1306 paramsizes = self._unpackheader(fparamsizes)
1307 1307 # make it a list of couple again
1308 1308 paramsizes = list(zip(paramsizes[::2], paramsizes[1::2]))
1309 1309 # split mandatory from advisory
1310 1310 mansizes = paramsizes[:mancount]
1311 1311 advsizes = paramsizes[mancount:]
1312 1312 # retrieve param value
1313 1313 manparams = []
1314 1314 for key, value in mansizes:
1315 1315 manparams.append((self._fromheader(key), self._fromheader(value)))
1316 1316 advparams = []
1317 1317 for key, value in advsizes:
1318 1318 advparams.append((self._fromheader(key), self._fromheader(value)))
1319 1319 self._initparams(manparams, advparams)
1320 1320 ## part payload
1321 1321 self._payloadstream = util.chunkbuffer(self._payloadchunks())
1322 1322 # we read the data, tell it
1323 1323 self._initialized = True
1324 1324
1325 1325 def _payloadchunks(self):
1326 1326 """Generator of decoded chunks in the payload."""
1327 1327 return decodepayloadchunks(self.ui, self._fp)
1328 1328
1329 1329 def consume(self):
1330 1330 """Read the part payload until completion.
1331 1331
1332 1332 By consuming the part data, the underlying stream read offset will
1333 1333 be advanced to the next part (or end of stream).
1334 1334 """
1335 1335 if self.consumed:
1336 1336 return
1337 1337
1338 1338 chunk = self.read(32768)
1339 1339 while chunk:
1340 1340 self._pos += len(chunk)
1341 1341 chunk = self.read(32768)
1342 1342
1343 1343 def read(self, size=None):
1344 1344 """read payload data"""
1345 1345 if not self._initialized:
1346 1346 self._readheader()
1347 1347 if size is None:
1348 1348 data = self._payloadstream.read()
1349 1349 else:
1350 1350 data = self._payloadstream.read(size)
1351 1351 self._pos += len(data)
1352 1352 if size is None or len(data) < size:
1353 1353 if not self.consumed and self._pos:
1354 1354 self.ui.debug('bundle2-input-part: total payload size %i\n'
1355 1355 % self._pos)
1356 1356 self.consumed = True
1357 1357 return data
1358 1358
1359 1359 class seekableunbundlepart(unbundlepart):
1360 1360 """A bundle2 part in a bundle that is seekable.
1361 1361
1362 1362 Regular ``unbundlepart`` instances can only be read once. This class
1363 1363 extends ``unbundlepart`` to enable bi-directional seeking within the
1364 1364 part.
1365 1365
1366 1366 Bundle2 part data consists of framed chunks. Offsets when seeking
1367 1367 refer to the decoded data, not the offsets in the underlying bundle2
1368 1368 stream.
1369 1369
1370 1370 To facilitate quickly seeking within the decoded data, instances of this
1371 1371 class maintain a mapping between offsets in the underlying stream and
1372 1372 the decoded payload. This mapping will consume memory in proportion
1373 1373 to the number of chunks within the payload (which almost certainly
1374 1374 increases in proportion with the size of the part).
1375 1375 """
1376 1376 def __init__(self, ui, header, fp):
1377 1377 # (payload, file) offsets for chunk starts.
1378 1378 self._chunkindex = []
1379 1379
1380 1380 super(seekableunbundlepart, self).__init__(ui, header, fp)
1381 1381
1382 1382 def _payloadchunks(self, chunknum=0):
1383 1383 '''seek to specified chunk and start yielding data'''
1384 1384 if len(self._chunkindex) == 0:
1385 1385 assert chunknum == 0, 'Must start with chunk 0'
1386 1386 self._chunkindex.append((0, self._tellfp()))
1387 1387 else:
1388 1388 assert chunknum < len(self._chunkindex), \
1389 1389 'Unknown chunk %d' % chunknum
1390 1390 self._seekfp(self._chunkindex[chunknum][1])
1391 1391
1392 1392 pos = self._chunkindex[chunknum][0]
1393 1393
1394 1394 for chunk in decodepayloadchunks(self.ui, self._fp):
1395 1395 chunknum += 1
1396 1396 pos += len(chunk)
1397 1397 if chunknum == len(self._chunkindex):
1398 1398 self._chunkindex.append((pos, self._tellfp()))
1399 1399
1400 1400 yield chunk
1401 1401
1402 1402 def _findchunk(self, pos):
1403 1403 '''for a given payload position, return a chunk number and offset'''
1404 1404 for chunk, (ppos, fpos) in enumerate(self._chunkindex):
1405 1405 if ppos == pos:
1406 1406 return chunk, 0
1407 1407 elif ppos > pos:
1408 1408 return chunk - 1, pos - self._chunkindex[chunk - 1][0]
1409 1409 raise ValueError('Unknown chunk')
1410 1410
1411 1411 def tell(self):
1412 1412 return self._pos
1413 1413
1414 1414 def seek(self, offset, whence=os.SEEK_SET):
1415 1415 if whence == os.SEEK_SET:
1416 1416 newpos = offset
1417 1417 elif whence == os.SEEK_CUR:
1418 1418 newpos = self._pos + offset
1419 1419 elif whence == os.SEEK_END:
1420 1420 if not self.consumed:
1421 1421 # Can't use self.consume() here because it advances self._pos.
1422 1422 chunk = self.read(32768)
1423 1423 while chunk:
1424 1424 chunk = self.read(32768)
1425 1425 newpos = self._chunkindex[-1][0] - offset
1426 1426 else:
1427 1427 raise ValueError('Unknown whence value: %r' % (whence,))
1428 1428
1429 1429 if newpos > self._chunkindex[-1][0] and not self.consumed:
1430 1430 # Can't use self.consume() here because it advances self._pos.
1431 1431 chunk = self.read(32768)
1432 1432 while chunk:
1433 1433 chunk = self.read(32668)
1434 1434
1435 1435 if not 0 <= newpos <= self._chunkindex[-1][0]:
1436 1436 raise ValueError('Offset out of range')
1437 1437
1438 1438 if self._pos != newpos:
1439 1439 chunk, internaloffset = self._findchunk(newpos)
1440 1440 self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
1441 1441 adjust = self.read(internaloffset)
1442 1442 if len(adjust) != internaloffset:
1443 1443 raise error.Abort(_('Seek failed\n'))
1444 1444 self._pos = newpos
1445 1445
1446 1446 def _seekfp(self, offset, whence=0):
1447 1447 """move the underlying file pointer
1448 1448
1449 1449 This method is meant for internal usage by the bundle2 protocol only.
1450 1450 They directly manipulate the low level stream including bundle2 level
1451 1451 instruction.
1452 1452
1453 1453 Do not use it to implement higher-level logic or methods."""
1454 1454 if self._seekable:
1455 1455 return self._fp.seek(offset, whence)
1456 1456 else:
1457 1457 raise NotImplementedError(_('File pointer is not seekable'))
1458 1458
1459 1459 def _tellfp(self):
1460 1460 """return the file offset, or None if file is not seekable
1461 1461
1462 1462 This method is meant for internal usage by the bundle2 protocol only.
1463 1463 They directly manipulate the low level stream including bundle2 level
1464 1464 instruction.
1465 1465
1466 1466 Do not use it to implement higher-level logic or methods."""
1467 1467 if self._seekable:
1468 1468 try:
1469 1469 return self._fp.tell()
1470 1470 except IOError as e:
1471 1471 if e.errno == errno.ESPIPE:
1472 1472 self._seekable = False
1473 1473 else:
1474 1474 raise
1475 1475 return None
1476 1476
1477 1477 # These are only the static capabilities.
1478 1478 # Check the 'getrepocaps' function for the rest.
1479 1479 capabilities = {'HG20': (),
1480 1480 'bookmarks': (),
1481 1481 'error': ('abort', 'unsupportedcontent', 'pushraced',
1482 1482 'pushkey'),
1483 1483 'listkeys': (),
1484 1484 'pushkey': (),
1485 1485 'digests': tuple(sorted(util.DIGESTS.keys())),
1486 1486 'remote-changegroup': ('http', 'https'),
1487 1487 'hgtagsfnodes': (),
1488 1488 'phases': ('heads',),
1489 1489 }
1490 1490
1491 1491 def getrepocaps(repo, allowpushback=False):
1492 1492 """return the bundle2 capabilities for a given repo
1493 1493
1494 1494 Exists to allow extensions (like evolution) to mutate the capabilities.
1495 1495 """
1496 1496 caps = capabilities.copy()
1497 1497 caps['changegroup'] = tuple(sorted(
1498 1498 changegroup.supportedincomingversions(repo)))
1499 1499 if obsolete.isenabled(repo, obsolete.exchangeopt):
1500 1500 supportedformat = tuple('V%i' % v for v in obsolete.formats)
1501 1501 caps['obsmarkers'] = supportedformat
1502 1502 if allowpushback:
1503 1503 caps['pushback'] = ()
1504 1504 cpmode = repo.ui.config('server', 'concurrent-push-mode')
1505 1505 if cpmode == 'check-related':
1506 1506 caps['checkheads'] = ('related',)
1507 1507 if 'phases' in repo.ui.configlist('devel', 'legacy.exchange'):
1508 1508 caps.pop('phases')
1509 1509 return caps
1510 1510
1511 1511 def bundle2caps(remote):
1512 1512 """return the bundle capabilities of a peer as dict"""
1513 1513 raw = remote.capable('bundle2')
1514 1514 if not raw and raw != '':
1515 1515 return {}
1516 1516 capsblob = urlreq.unquote(remote.capable('bundle2'))
1517 1517 return decodecaps(capsblob)
1518 1518
1519 1519 def obsmarkersversion(caps):
1520 1520 """extract the list of supported obsmarkers versions from a bundle2caps dict
1521 1521 """
1522 1522 obscaps = caps.get('obsmarkers', ())
1523 1523 return [int(c[1:]) for c in obscaps if c.startswith('V')]
1524 1524
1525 1525 def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
1526 1526 vfs=None, compression=None, compopts=None):
1527 1527 if bundletype.startswith('HG10'):
1528 1528 cg = changegroup.makechangegroup(repo, outgoing, '01', source)
1529 1529 return writebundle(ui, cg, filename, bundletype, vfs=vfs,
1530 1530 compression=compression, compopts=compopts)
1531 1531 elif not bundletype.startswith('HG20'):
1532 1532 raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
1533 1533
1534 1534 caps = {}
1535 1535 if 'obsolescence' in opts:
1536 1536 caps['obsmarkers'] = ('V1',)
1537 1537 bundle = bundle20(ui, caps)
1538 1538 bundle.setcompression(compression, compopts)
1539 1539 _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
1540 1540 chunkiter = bundle.getchunks()
1541 1541
1542 1542 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1543 1543
1544 1544 def _addpartsfromopts(ui, repo, bundler, source, outgoing, opts):
1545 1545 # We should eventually reconcile this logic with the one behind
1546 1546 # 'exchange.getbundle2partsgenerator'.
1547 1547 #
1548 1548 # The type of input from 'getbundle' and 'writenewbundle' are a bit
1549 1549 # different right now. So we keep them separated for now for the sake of
1550 1550 # simplicity.
1551 1551
1552 1552 # we always want a changegroup in such bundle
1553 1553 cgversion = opts.get('cg.version')
1554 1554 if cgversion is None:
1555 1555 cgversion = changegroup.safeversion(repo)
1556 1556 cg = changegroup.makechangegroup(repo, outgoing, cgversion, source)
1557 1557 part = bundler.newpart('changegroup', data=cg.getchunks())
1558 1558 part.addparam('version', cg.version)
1559 1559 if 'clcount' in cg.extras:
1560 1560 part.addparam('nbchanges', '%d' % cg.extras['clcount'],
1561 1561 mandatory=False)
1562 1562 if opts.get('phases') and repo.revs('%ln and secret()',
1563 1563 outgoing.missingheads):
1564 1564 part.addparam('targetphase', '%d' % phases.secret, mandatory=False)
1565 1565
1566 1566 addparttagsfnodescache(repo, bundler, outgoing)
1567 1567
1568 1568 if opts.get('obsolescence', False):
1569 1569 obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
1570 1570 buildobsmarkerspart(bundler, obsmarkers)
1571 1571
1572 1572 if opts.get('phases', False):
1573 1573 headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
1574 1574 phasedata = phases.binaryencode(headsbyphase)
1575 1575 bundler.newpart('phase-heads', data=phasedata)
1576 1576
1577 1577 def addparttagsfnodescache(repo, bundler, outgoing):
1578 1578 # we include the tags fnode cache for the bundle changeset
1579 1579 # (as an optional parts)
1580 1580 cache = tags.hgtagsfnodescache(repo.unfiltered())
1581 1581 chunks = []
1582 1582
1583 1583 # .hgtags fnodes are only relevant for head changesets. While we could
1584 1584 # transfer values for all known nodes, there will likely be little to
1585 1585 # no benefit.
1586 1586 #
1587 1587 # We don't bother using a generator to produce output data because
1588 1588 # a) we only have 40 bytes per head and even esoteric numbers of heads
1589 1589 # consume little memory (1M heads is 40MB) b) we don't want to send the
1590 1590 # part if we don't have entries and knowing if we have entries requires
1591 1591 # cache lookups.
1592 1592 for node in outgoing.missingheads:
1593 1593 # Don't compute missing, as this may slow down serving.
1594 1594 fnode = cache.getfnode(node, computemissing=False)
1595 1595 if fnode is not None:
1596 1596 chunks.extend([node, fnode])
1597 1597
1598 1598 if chunks:
1599 1599 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1600 1600
1601 1601 def buildobsmarkerspart(bundler, markers):
1602 1602 """add an obsmarker part to the bundler with <markers>
1603 1603
1604 1604 No part is created if markers is empty.
1605 1605 Raises ValueError if the bundler doesn't support any known obsmarker format.
1606 1606 """
1607 1607 if not markers:
1608 1608 return None
1609 1609
1610 1610 remoteversions = obsmarkersversion(bundler.capabilities)
1611 1611 version = obsolete.commonversion(remoteversions)
1612 1612 if version is None:
1613 1613 raise ValueError('bundler does not support common obsmarker format')
1614 1614 stream = obsolete.encodemarkers(markers, True, version=version)
1615 1615 return bundler.newpart('obsmarkers', data=stream)
1616 1616
1617 1617 def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None,
1618 1618 compopts=None):
1619 1619 """Write a bundle file and return its filename.
1620 1620
1621 1621 Existing files will not be overwritten.
1622 1622 If no filename is specified, a temporary file is created.
1623 1623 bz2 compression can be turned off.
1624 1624 The bundle file will be deleted in case of errors.
1625 1625 """
1626 1626
1627 1627 if bundletype == "HG20":
1628 1628 bundle = bundle20(ui)
1629 1629 bundle.setcompression(compression, compopts)
1630 1630 part = bundle.newpart('changegroup', data=cg.getchunks())
1631 1631 part.addparam('version', cg.version)
1632 1632 if 'clcount' in cg.extras:
1633 1633 part.addparam('nbchanges', '%d' % cg.extras['clcount'],
1634 1634 mandatory=False)
1635 1635 chunkiter = bundle.getchunks()
1636 1636 else:
1637 1637 # compression argument is only for the bundle2 case
1638 1638 assert compression is None
1639 1639 if cg.version != '01':
1640 1640 raise error.Abort(_('old bundle types only supports v1 '
1641 1641 'changegroups'))
1642 1642 header, comp = bundletypes[bundletype]
1643 1643 if comp not in util.compengines.supportedbundletypes:
1644 1644 raise error.Abort(_('unknown stream compression type: %s')
1645 1645 % comp)
1646 1646 compengine = util.compengines.forbundletype(comp)
1647 1647 def chunkiter():
1648 1648 yield header
1649 1649 for chunk in compengine.compressstream(cg.getchunks(), compopts):
1650 1650 yield chunk
1651 1651 chunkiter = chunkiter()
1652 1652
1653 1653 # parse the changegroup data, otherwise we will block
1654 1654 # in case of sshrepo because we don't know the end of the stream
1655 1655 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1656 1656
1657 1657 def combinechangegroupresults(op):
1658 1658 """logic to combine 0 or more addchangegroup results into one"""
1659 1659 results = [r.get('return', 0)
1660 1660 for r in op.records['changegroup']]
1661 1661 changedheads = 0
1662 1662 result = 1
1663 1663 for ret in results:
1664 1664 # If any changegroup result is 0, return 0
1665 1665 if ret == 0:
1666 1666 result = 0
1667 1667 break
1668 1668 if ret < -1:
1669 1669 changedheads += ret + 1
1670 1670 elif ret > 1:
1671 1671 changedheads += ret - 1
1672 1672 if changedheads > 0:
1673 1673 result = 1 + changedheads
1674 1674 elif changedheads < 0:
1675 1675 result = -1 + changedheads
1676 1676 return result
1677 1677
1678 1678 @parthandler('changegroup', ('version', 'nbchanges', 'treemanifest',
1679 1679 'targetphase'))
1680 1680 def handlechangegroup(op, inpart):
1681 1681 """apply a changegroup part on the repo
1682 1682
1683 1683 This is a very early implementation that will massive rework before being
1684 1684 inflicted to any end-user.
1685 1685 """
1686 1686 tr = op.gettransaction()
1687 1687 unpackerversion = inpart.params.get('version', '01')
1688 1688 # We should raise an appropriate exception here
1689 1689 cg = changegroup.getunbundler(unpackerversion, inpart, None)
1690 1690 # the source and url passed here are overwritten by the one contained in
1691 1691 # the transaction.hookargs argument. So 'bundle2' is a placeholder
1692 1692 nbchangesets = None
1693 1693 if 'nbchanges' in inpart.params:
1694 1694 nbchangesets = int(inpart.params.get('nbchanges'))
1695 1695 if ('treemanifest' in inpart.params and
1696 1696 'treemanifest' not in op.repo.requirements):
1697 1697 if len(op.repo.changelog) != 0:
1698 1698 raise error.Abort(_(
1699 1699 "bundle contains tree manifests, but local repo is "
1700 1700 "non-empty and does not use tree manifests"))
1701 1701 op.repo.requirements.add('treemanifest')
1702 1702 op.repo._applyopenerreqs()
1703 1703 op.repo._writerequirements()
1704 1704 extrakwargs = {}
1705 1705 targetphase = inpart.params.get('targetphase')
1706 1706 if targetphase is not None:
1707 1707 extrakwargs['targetphase'] = int(targetphase)
1708 1708 ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2',
1709 1709 expectedtotal=nbchangesets, **extrakwargs)
1710 1710 if op.reply is not None:
1711 1711 # This is definitely not the final form of this
1712 1712 # return. But one need to start somewhere.
1713 1713 part = op.reply.newpart('reply:changegroup', mandatory=False)
1714 1714 part.addparam(
1715 1715 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
1716 1716 part.addparam('return', '%i' % ret, mandatory=False)
1717 1717 assert not inpart.read()
1718 1718
1719 1719 _remotechangegroupparams = tuple(['url', 'size', 'digests'] +
1720 1720 ['digest:%s' % k for k in util.DIGESTS.keys()])
1721 1721 @parthandler('remote-changegroup', _remotechangegroupparams)
1722 1722 def handleremotechangegroup(op, inpart):
1723 1723 """apply a bundle10 on the repo, given an url and validation information
1724 1724
1725 1725 All the information about the remote bundle to import are given as
1726 1726 parameters. The parameters include:
1727 1727 - url: the url to the bundle10.
1728 1728 - size: the bundle10 file size. It is used to validate what was
1729 1729 retrieved by the client matches the server knowledge about the bundle.
1730 1730 - digests: a space separated list of the digest types provided as
1731 1731 parameters.
1732 1732 - digest:<digest-type>: the hexadecimal representation of the digest with
1733 1733 that name. Like the size, it is used to validate what was retrieved by
1734 1734 the client matches what the server knows about the bundle.
1735 1735
1736 1736 When multiple digest types are given, all of them are checked.
1737 1737 """
1738 1738 try:
1739 1739 raw_url = inpart.params['url']
1740 1740 except KeyError:
1741 1741 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
1742 1742 parsed_url = util.url(raw_url)
1743 1743 if parsed_url.scheme not in capabilities['remote-changegroup']:
1744 1744 raise error.Abort(_('remote-changegroup does not support %s urls') %
1745 1745 parsed_url.scheme)
1746 1746
1747 1747 try:
1748 1748 size = int(inpart.params['size'])
1749 1749 except ValueError:
1750 1750 raise error.Abort(_('remote-changegroup: invalid value for param "%s"')
1751 1751 % 'size')
1752 1752 except KeyError:
1753 1753 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
1754 1754
1755 1755 digests = {}
1756 1756 for typ in inpart.params.get('digests', '').split():
1757 1757 param = 'digest:%s' % typ
1758 1758 try:
1759 1759 value = inpart.params[param]
1760 1760 except KeyError:
1761 1761 raise error.Abort(_('remote-changegroup: missing "%s" param') %
1762 1762 param)
1763 1763 digests[typ] = value
1764 1764
1765 1765 real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
1766 1766
1767 1767 tr = op.gettransaction()
1768 1768 from . import exchange
1769 1769 cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
1770 1770 if not isinstance(cg, changegroup.cg1unpacker):
1771 1771 raise error.Abort(_('%s: not a bundle version 1.0') %
1772 1772 util.hidepassword(raw_url))
1773 1773 ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2')
1774 1774 if op.reply is not None:
1775 1775 # This is definitely not the final form of this
1776 1776 # return. But one need to start somewhere.
1777 1777 part = op.reply.newpart('reply:changegroup')
1778 1778 part.addparam(
1779 1779 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
1780 1780 part.addparam('return', '%i' % ret, mandatory=False)
1781 1781 try:
1782 1782 real_part.validate()
1783 1783 except error.Abort as e:
1784 1784 raise error.Abort(_('bundle at %s is corrupted:\n%s') %
1785 1785 (util.hidepassword(raw_url), str(e)))
1786 1786 assert not inpart.read()
1787 1787
1788 1788 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
1789 1789 def handlereplychangegroup(op, inpart):
1790 1790 ret = int(inpart.params['return'])
1791 1791 replyto = int(inpart.params['in-reply-to'])
1792 1792 op.records.add('changegroup', {'return': ret}, replyto)
1793 1793
1794 1794 @parthandler('check:bookmarks')
1795 1795 def handlecheckbookmarks(op, inpart):
1796 1796 """check location of bookmarks
1797 1797
1798 1798 This part is to be used to detect push race regarding bookmark, it
1799 1799 contains binary encoded (bookmark, node) tuple. If the local state does
1800 1800 not marks the one in the part, a PushRaced exception is raised
1801 1801 """
1802 1802 bookdata = bookmarks.binarydecode(inpart)
1803 1803
1804 1804 msgstandard = ('repository changed while pushing - please try again '
1805 1805 '(bookmark "%s" move from %s to %s)')
1806 1806 msgmissing = ('repository changed while pushing - please try again '
1807 1807 '(bookmark "%s" is missing, expected %s)')
1808 1808 msgexist = ('repository changed while pushing - please try again '
1809 1809 '(bookmark "%s" set on %s, expected missing)')
1810 1810 for book, node in bookdata:
1811 1811 currentnode = op.repo._bookmarks.get(book)
1812 1812 if currentnode != node:
1813 1813 if node is None:
1814 1814 finalmsg = msgexist % (book, nodemod.short(currentnode))
1815 1815 elif currentnode is None:
1816 1816 finalmsg = msgmissing % (book, nodemod.short(node))
1817 1817 else:
1818 1818 finalmsg = msgstandard % (book, nodemod.short(node),
1819 1819 nodemod.short(currentnode))
1820 1820 raise error.PushRaced(finalmsg)
1821 1821
1822 1822 @parthandler('check:heads')
1823 1823 def handlecheckheads(op, inpart):
1824 1824 """check that head of the repo did not change
1825 1825
1826 1826 This is used to detect a push race when using unbundle.
1827 1827 This replaces the "heads" argument of unbundle."""
1828 1828 h = inpart.read(20)
1829 1829 heads = []
1830 1830 while len(h) == 20:
1831 1831 heads.append(h)
1832 1832 h = inpart.read(20)
1833 1833 assert not h
1834 1834 # Trigger a transaction so that we are guaranteed to have the lock now.
1835 1835 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1836 1836 op.gettransaction()
1837 1837 if sorted(heads) != sorted(op.repo.heads()):
1838 1838 raise error.PushRaced('repository changed while pushing - '
1839 1839 'please try again')
1840 1840
1841 1841 @parthandler('check:updated-heads')
1842 1842 def handlecheckupdatedheads(op, inpart):
1843 1843 """check for race on the heads touched by a push
1844 1844
1845 1845 This is similar to 'check:heads' but focus on the heads actually updated
1846 1846 during the push. If other activities happen on unrelated heads, it is
1847 1847 ignored.
1848 1848
1849 1849 This allow server with high traffic to avoid push contention as long as
1850 1850 unrelated parts of the graph are involved."""
1851 1851 h = inpart.read(20)
1852 1852 heads = []
1853 1853 while len(h) == 20:
1854 1854 heads.append(h)
1855 1855 h = inpart.read(20)
1856 1856 assert not h
1857 1857 # trigger a transaction so that we are guaranteed to have the lock now.
1858 1858 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1859 1859 op.gettransaction()
1860 1860
1861 1861 currentheads = set()
1862 1862 for ls in op.repo.branchmap().itervalues():
1863 1863 currentheads.update(ls)
1864 1864
1865 1865 for h in heads:
1866 1866 if h not in currentheads:
1867 1867 raise error.PushRaced('repository changed while pushing - '
1868 1868 'please try again')
1869 1869
1870 1870 @parthandler('check:phases')
1871 1871 def handlecheckphases(op, inpart):
1872 1872 """check that phase boundaries of the repository did not change
1873 1873
1874 1874 This is used to detect a push race.
1875 1875 """
1876 1876 phasetonodes = phases.binarydecode(inpart)
1877 1877 unfi = op.repo.unfiltered()
1878 1878 cl = unfi.changelog
1879 1879 phasecache = unfi._phasecache
1880 1880 msg = ('repository changed while pushing - please try again '
1881 1881 '(%s is %s expected %s)')
1882 1882 for expectedphase, nodes in enumerate(phasetonodes):
1883 1883 for n in nodes:
1884 1884 actualphase = phasecache.phase(unfi, cl.rev(n))
1885 1885 if actualphase != expectedphase:
1886 1886 finalmsg = msg % (nodemod.short(n),
1887 1887 phases.phasenames[actualphase],
1888 1888 phases.phasenames[expectedphase])
1889 1889 raise error.PushRaced(finalmsg)
1890 1890
1891 1891 @parthandler('output')
1892 1892 def handleoutput(op, inpart):
1893 1893 """forward output captured on the server to the client"""
1894 1894 for line in inpart.read().splitlines():
1895 1895 op.ui.status(_('remote: %s\n') % line)
1896 1896
1897 1897 @parthandler('replycaps')
1898 1898 def handlereplycaps(op, inpart):
1899 1899 """Notify that a reply bundle should be created
1900 1900
1901 1901 The payload contains the capabilities information for the reply"""
1902 1902 caps = decodecaps(inpart.read())
1903 1903 if op.reply is None:
1904 1904 op.reply = bundle20(op.ui, caps)
1905 1905
1906 1906 class AbortFromPart(error.Abort):
1907 1907 """Sub-class of Abort that denotes an error from a bundle2 part."""
1908 1908
1909 1909 @parthandler('error:abort', ('message', 'hint'))
1910 1910 def handleerrorabort(op, inpart):
1911 1911 """Used to transmit abort error over the wire"""
1912 1912 raise AbortFromPart(inpart.params['message'],
1913 1913 hint=inpart.params.get('hint'))
1914 1914
1915 1915 @parthandler('error:pushkey', ('namespace', 'key', 'new', 'old', 'ret',
1916 1916 'in-reply-to'))
1917 1917 def handleerrorpushkey(op, inpart):
1918 1918 """Used to transmit failure of a mandatory pushkey over the wire"""
1919 1919 kwargs = {}
1920 1920 for name in ('namespace', 'key', 'new', 'old', 'ret'):
1921 1921 value = inpart.params.get(name)
1922 1922 if value is not None:
1923 1923 kwargs[name] = value
1924 1924 raise error.PushkeyFailed(inpart.params['in-reply-to'], **kwargs)
1925 1925
1926 1926 @parthandler('error:unsupportedcontent', ('parttype', 'params'))
1927 1927 def handleerrorunsupportedcontent(op, inpart):
1928 1928 """Used to transmit unknown content error over the wire"""
1929 1929 kwargs = {}
1930 1930 parttype = inpart.params.get('parttype')
1931 1931 if parttype is not None:
1932 1932 kwargs['parttype'] = parttype
1933 1933 params = inpart.params.get('params')
1934 1934 if params is not None:
1935 1935 kwargs['params'] = params.split('\0')
1936 1936
1937 1937 raise error.BundleUnknownFeatureError(**kwargs)
1938 1938
1939 1939 @parthandler('error:pushraced', ('message',))
1940 1940 def handleerrorpushraced(op, inpart):
1941 1941 """Used to transmit push race error over the wire"""
1942 1942 raise error.ResponseError(_('push failed:'), inpart.params['message'])
1943 1943
1944 1944 @parthandler('listkeys', ('namespace',))
1945 1945 def handlelistkeys(op, inpart):
1946 1946 """retrieve pushkey namespace content stored in a bundle2"""
1947 1947 namespace = inpart.params['namespace']
1948 1948 r = pushkey.decodekeys(inpart.read())
1949 1949 op.records.add('listkeys', (namespace, r))
1950 1950
1951 1951 @parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
1952 1952 def handlepushkey(op, inpart):
1953 1953 """process a pushkey request"""
1954 1954 dec = pushkey.decode
1955 1955 namespace = dec(inpart.params['namespace'])
1956 1956 key = dec(inpart.params['key'])
1957 1957 old = dec(inpart.params['old'])
1958 1958 new = dec(inpart.params['new'])
1959 1959 # Grab the transaction to ensure that we have the lock before performing the
1960 1960 # pushkey.
1961 1961 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1962 1962 op.gettransaction()
1963 1963 ret = op.repo.pushkey(namespace, key, old, new)
1964 1964 record = {'namespace': namespace,
1965 1965 'key': key,
1966 1966 'old': old,
1967 1967 'new': new}
1968 1968 op.records.add('pushkey', record)
1969 1969 if op.reply is not None:
1970 1970 rpart = op.reply.newpart('reply:pushkey')
1971 1971 rpart.addparam(
1972 1972 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
1973 1973 rpart.addparam('return', '%i' % ret, mandatory=False)
1974 1974 if inpart.mandatory and not ret:
1975 1975 kwargs = {}
1976 1976 for key in ('namespace', 'key', 'new', 'old', 'ret'):
1977 1977 if key in inpart.params:
1978 1978 kwargs[key] = inpart.params[key]
1979 1979 raise error.PushkeyFailed(partid=str(inpart.id), **kwargs)
1980 1980
1981 1981 @parthandler('bookmarks')
1982 1982 def handlebookmark(op, inpart):
1983 1983 """transmit bookmark information
1984 1984
1985 1985 The part contains binary encoded bookmark information.
1986 1986
1987 1987 The exact behavior of this part can be controlled by the 'bookmarks' mode
1988 1988 on the bundle operation.
1989 1989
1990 1990 When mode is 'apply' (the default) the bookmark information is applied as
1991 1991 is to the unbundling repository. Make sure a 'check:bookmarks' part is
1992 1992 issued earlier to check for push races in such update. This behavior is
1993 1993 suitable for pushing.
1994 1994
1995 1995 When mode is 'records', the information is recorded into the 'bookmarks'
1996 1996 records of the bundle operation. This behavior is suitable for pulling.
1997 1997 """
1998 1998 changes = bookmarks.binarydecode(inpart)
1999 1999
2000 2000 pushkeycompat = op.repo.ui.configbool('server', 'bookmarks-pushkey-compat')
2001 2001 bookmarksmode = op.modes.get('bookmarks', 'apply')
2002 2002
2003 2003 if bookmarksmode == 'apply':
2004 2004 tr = op.gettransaction()
2005 2005 bookstore = op.repo._bookmarks
2006 2006 if pushkeycompat:
2007 2007 allhooks = []
2008 2008 for book, node in changes:
2009 2009 hookargs = tr.hookargs.copy()
2010 2010 hookargs['pushkeycompat'] = '1'
2011 2011 hookargs['namespace'] = 'bookmark'
2012 2012 hookargs['key'] = book
2013 2013 hookargs['old'] = nodemod.hex(bookstore.get(book, ''))
2014 2014 hookargs['new'] = nodemod.hex(node if node is not None else '')
2015 2015 allhooks.append(hookargs)
2016 2016
2017 2017 for hookargs in allhooks:
2018 2018 op.repo.hook('prepushkey', throw=True, **hookargs)
2019 2019
2020 2020 bookstore.applychanges(op.repo, op.gettransaction(), changes)
2021 2021
2022 2022 if pushkeycompat:
2023 2023 def runhook():
2024 2024 for hookargs in allhooks:
2025 2025 op.repo.hook('prepushkey', **hookargs)
2026 2026 op.repo._afterlock(runhook)
2027 2027
2028 2028 elif bookmarksmode == 'records':
2029 2029 for book, node in changes:
2030 2030 record = {'bookmark': book, 'node': node}
2031 2031 op.records.add('bookmarks', record)
2032 2032 else:
2033 2033 raise error.ProgrammingError('unkown bookmark mode: %s' % bookmarksmode)
2034 2034
2035 2035 @parthandler('phase-heads')
2036 2036 def handlephases(op, inpart):
2037 2037 """apply phases from bundle part to repo"""
2038 2038 headsbyphase = phases.binarydecode(inpart)
2039 2039 phases.updatephases(op.repo.unfiltered(), op.gettransaction, headsbyphase)
2040 2040
2041 2041 @parthandler('reply:pushkey', ('return', 'in-reply-to'))
2042 2042 def handlepushkeyreply(op, inpart):
2043 2043 """retrieve the result of a pushkey request"""
2044 2044 ret = int(inpart.params['return'])
2045 2045 partid = int(inpart.params['in-reply-to'])
2046 2046 op.records.add('pushkey', {'return': ret}, partid)
2047 2047
2048 2048 @parthandler('obsmarkers')
2049 2049 def handleobsmarker(op, inpart):
2050 2050 """add a stream of obsmarkers to the repo"""
2051 2051 tr = op.gettransaction()
2052 2052 markerdata = inpart.read()
2053 2053 if op.ui.config('experimental', 'obsmarkers-exchange-debug'):
2054 2054 op.ui.write(('obsmarker-exchange: %i bytes received\n')
2055 2055 % len(markerdata))
2056 2056 # The mergemarkers call will crash if marker creation is not enabled.
2057 2057 # we want to avoid this if the part is advisory.
2058 2058 if not inpart.mandatory and op.repo.obsstore.readonly:
2059 op.repo.ui.debug('ignoring obsolescence markers, feature not enabled')
2059 op.repo.ui.debug('ignoring obsolescence markers, feature not enabled\n')
2060 2060 return
2061 2061 new = op.repo.obsstore.mergemarkers(tr, markerdata)
2062 2062 op.repo.invalidatevolatilesets()
2063 2063 if new:
2064 2064 op.repo.ui.status(_('%i new obsolescence markers\n') % new)
2065 2065 op.records.add('obsmarkers', {'new': new})
2066 2066 if op.reply is not None:
2067 2067 rpart = op.reply.newpart('reply:obsmarkers')
2068 2068 rpart.addparam(
2069 2069 'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
2070 2070 rpart.addparam('new', '%i' % new, mandatory=False)
2071 2071
2072 2072
2073 2073 @parthandler('reply:obsmarkers', ('new', 'in-reply-to'))
2074 2074 def handleobsmarkerreply(op, inpart):
2075 2075 """retrieve the result of a pushkey request"""
2076 2076 ret = int(inpart.params['new'])
2077 2077 partid = int(inpart.params['in-reply-to'])
2078 2078 op.records.add('obsmarkers', {'new': ret}, partid)
2079 2079
2080 2080 @parthandler('hgtagsfnodes')
2081 2081 def handlehgtagsfnodes(op, inpart):
2082 2082 """Applies .hgtags fnodes cache entries to the local repo.
2083 2083
2084 2084 Payload is pairs of 20 byte changeset nodes and filenodes.
2085 2085 """
2086 2086 # Grab the transaction so we ensure that we have the lock at this point.
2087 2087 if op.ui.configbool('experimental', 'bundle2lazylocking'):
2088 2088 op.gettransaction()
2089 2089 cache = tags.hgtagsfnodescache(op.repo.unfiltered())
2090 2090
2091 2091 count = 0
2092 2092 while True:
2093 2093 node = inpart.read(20)
2094 2094 fnode = inpart.read(20)
2095 2095 if len(node) < 20 or len(fnode) < 20:
2096 2096 op.ui.debug('ignoring incomplete received .hgtags fnodes data\n')
2097 2097 break
2098 2098 cache.setfnode(node, fnode)
2099 2099 count += 1
2100 2100
2101 2101 cache.write()
2102 2102 op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
2103 2103
2104 2104 @parthandler('pushvars')
2105 2105 def bundle2getvars(op, part):
2106 2106 '''unbundle a bundle2 containing shellvars on the server'''
2107 2107 # An option to disable unbundling on server-side for security reasons
2108 2108 if op.ui.configbool('push', 'pushvars.server'):
2109 2109 hookargs = {}
2110 2110 for key, value in part.advisoryparams:
2111 2111 key = key.upper()
2112 2112 # We want pushed variables to have USERVAR_ prepended so we know
2113 2113 # they came from the --pushvar flag.
2114 2114 key = "USERVAR_" + key
2115 2115 hookargs[key] = value
2116 2116 op.addhookargs(hookargs)
@@ -1,996 +1,996 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import, print_function
9 9
10 10 import difflib
11 11 import errno
12 12 import getopt
13 13 import os
14 14 import pdb
15 15 import re
16 16 import signal
17 17 import sys
18 18 import time
19 19 import traceback
20 20
21 21
22 22 from .i18n import _
23 23
24 24 from . import (
25 25 cmdutil,
26 26 color,
27 27 commands,
28 28 demandimport,
29 29 encoding,
30 30 error,
31 31 extensions,
32 32 fancyopts,
33 33 help,
34 34 hg,
35 35 hook,
36 36 profiling,
37 37 pycompat,
38 38 registrar,
39 39 scmutil,
40 40 ui as uimod,
41 41 util,
42 42 )
43 43
44 44 unrecoverablewrite = registrar.command.unrecoverablewrite
45 45
46 46 class request(object):
47 47 def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
48 48 ferr=None, prereposetups=None):
49 49 self.args = args
50 50 self.ui = ui
51 51 self.repo = repo
52 52
53 53 # input/output/error streams
54 54 self.fin = fin
55 55 self.fout = fout
56 56 self.ferr = ferr
57 57
58 58 # remember options pre-parsed by _earlyparseopts()
59 59 self.earlyoptions = {}
60 60
61 61 # reposetups which run before extensions, useful for chg to pre-fill
62 62 # low-level repo state (for example, changelog) before extensions.
63 63 self.prereposetups = prereposetups or []
64 64
65 65 def _runexithandlers(self):
66 66 exc = None
67 67 handlers = self.ui._exithandlers
68 68 try:
69 69 while handlers:
70 70 func, args, kwargs = handlers.pop()
71 71 try:
72 72 func(*args, **kwargs)
73 73 except: # re-raises below
74 74 if exc is None:
75 75 exc = sys.exc_info()[1]
76 76 self.ui.warn(('error in exit handlers:\n'))
77 77 self.ui.traceback(force=True)
78 78 finally:
79 79 if exc is not None:
80 80 raise exc
81 81
82 82 def run():
83 83 "run the command in sys.argv"
84 84 _initstdio()
85 85 req = request(pycompat.sysargv[1:])
86 86 err = None
87 87 try:
88 88 status = (dispatch(req) or 0) & 255
89 89 except error.StdioError as e:
90 90 err = e
91 91 status = -1
92 92 if util.safehasattr(req.ui, 'fout'):
93 93 try:
94 94 req.ui.fout.flush()
95 95 except IOError as e:
96 96 err = e
97 97 status = -1
98 98 if util.safehasattr(req.ui, 'ferr'):
99 99 if err is not None and err.errno != errno.EPIPE:
100 100 req.ui.ferr.write('abort: %s\n' %
101 101 encoding.strtolocal(err.strerror))
102 102 req.ui.ferr.flush()
103 103 sys.exit(status & 255)
104 104
105 105 def _initstdio():
106 106 for fp in (sys.stdin, sys.stdout, sys.stderr):
107 107 util.setbinary(fp)
108 108
109 109 def _getsimilar(symbols, value):
110 110 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
111 111 # The cutoff for similarity here is pretty arbitrary. It should
112 112 # probably be investigated and tweaked.
113 113 return [s for s in symbols if sim(s) > 0.6]
114 114
115 115 def _reportsimilar(write, similar):
116 116 if len(similar) == 1:
117 117 write(_("(did you mean %s?)\n") % similar[0])
118 118 elif similar:
119 119 ss = ", ".join(sorted(similar))
120 120 write(_("(did you mean one of %s?)\n") % ss)
121 121
122 122 def _formatparse(write, inst):
123 123 similar = []
124 124 if isinstance(inst, error.UnknownIdentifier):
125 125 # make sure to check fileset first, as revset can invoke fileset
126 126 similar = _getsimilar(inst.symbols, inst.function)
127 127 if len(inst.args) > 1:
128 128 write(_("hg: parse error at %s: %s\n") %
129 129 (inst.args[1], inst.args[0]))
130 130 if (inst.args[0][0] == ' '):
131 131 write(_("unexpected leading whitespace\n"))
132 132 else:
133 133 write(_("hg: parse error: %s\n") % inst.args[0])
134 134 _reportsimilar(write, similar)
135 135 if inst.hint:
136 136 write(_("(%s)\n") % inst.hint)
137 137
138 138 def _formatargs(args):
139 139 return ' '.join(util.shellquote(a) for a in args)
140 140
141 141 def dispatch(req):
142 142 "run the command specified in req.args"
143 143 if req.ferr:
144 144 ferr = req.ferr
145 145 elif req.ui:
146 146 ferr = req.ui.ferr
147 147 else:
148 148 ferr = util.stderr
149 149
150 150 try:
151 151 if not req.ui:
152 152 req.ui = uimod.ui.load()
153 153 req.earlyoptions.update(_earlyparseopts(req.ui, req.args))
154 154 if req.earlyoptions['traceback']:
155 155 req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
156 156
157 157 # set ui streams from the request
158 158 if req.fin:
159 159 req.ui.fin = req.fin
160 160 if req.fout:
161 161 req.ui.fout = req.fout
162 162 if req.ferr:
163 163 req.ui.ferr = req.ferr
164 164 except error.Abort as inst:
165 165 ferr.write(_("abort: %s\n") % inst)
166 166 if inst.hint:
167 167 ferr.write(_("(%s)\n") % inst.hint)
168 168 return -1
169 169 except error.ParseError as inst:
170 170 _formatparse(ferr.write, inst)
171 171 return -1
172 172
173 173 msg = _formatargs(req.args)
174 174 starttime = util.timer()
175 175 ret = None
176 176 try:
177 177 ret = _runcatch(req)
178 178 except error.ProgrammingError as inst:
179 179 req.ui.warn(_('** ProgrammingError: %s\n') % inst)
180 180 if inst.hint:
181 181 req.ui.warn(_('** (%s)\n') % inst.hint)
182 182 raise
183 183 except KeyboardInterrupt as inst:
184 184 try:
185 185 if isinstance(inst, error.SignalInterrupt):
186 186 msg = _("killed!\n")
187 187 else:
188 188 msg = _("interrupted!\n")
189 189 req.ui.warn(msg)
190 190 except error.SignalInterrupt:
191 191 # maybe pager would quit without consuming all the output, and
192 192 # SIGPIPE was raised. we cannot print anything in this case.
193 193 pass
194 194 except IOError as inst:
195 195 if inst.errno != errno.EPIPE:
196 196 raise
197 197 ret = -1
198 198 finally:
199 199 duration = util.timer() - starttime
200 200 req.ui.flush()
201 201 if req.ui.logblockedtimes:
202 202 req.ui._blockedtimes['command_duration'] = duration * 1000
203 203 req.ui.log('uiblocked', 'ui blocked ms',
204 204 **pycompat.strkwargs(req.ui._blockedtimes))
205 205 req.ui.log("commandfinish", "%s exited %d after %0.2f seconds\n",
206 206 msg, ret or 0, duration)
207 207 try:
208 208 req._runexithandlers()
209 209 except: # exiting, so no re-raises
210 210 ret = ret or -1
211 211 return ret
212 212
213 213 def _runcatch(req):
214 214 def catchterm(*args):
215 215 raise error.SignalInterrupt
216 216
217 217 ui = req.ui
218 218 try:
219 219 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
220 220 num = getattr(signal, name, None)
221 221 if num:
222 222 signal.signal(num, catchterm)
223 223 except ValueError:
224 224 pass # happens if called in a thread
225 225
226 226 def _runcatchfunc():
227 227 realcmd = None
228 228 try:
229 229 cmdargs = fancyopts.fancyopts(req.args[:], commands.globalopts, {})
230 230 cmd = cmdargs[0]
231 231 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
232 232 realcmd = aliases[0]
233 233 except (error.UnknownCommand, error.AmbiguousCommand,
234 234 IndexError, getopt.GetoptError):
235 235 # Don't handle this here. We know the command is
236 236 # invalid, but all we're worried about for now is that
237 237 # it's not a command that server operators expect to
238 238 # be safe to offer to users in a sandbox.
239 239 pass
240 240 if realcmd == 'serve' and '--stdio' in cmdargs:
241 241 # We want to constrain 'hg serve --stdio' instances pretty
242 242 # closely, as many shared-ssh access tools want to grant
243 243 # access to run *only* 'hg -R $repo serve --stdio'. We
244 244 # restrict to exactly that set of arguments, and prohibit
245 245 # any repo name that starts with '--' to prevent
246 246 # shenanigans wherein a user does something like pass
247 247 # --debugger or --config=ui.debugger=1 as a repo
248 248 # name. This used to actually run the debugger.
249 249 if (len(req.args) != 4 or
250 250 req.args[0] != '-R' or
251 251 req.args[1].startswith('--') or
252 252 req.args[2] != 'serve' or
253 253 req.args[3] != '--stdio'):
254 254 raise error.Abort(
255 255 _('potentially unsafe serve --stdio invocation: %r') %
256 256 (req.args,))
257 257
258 258 try:
259 259 debugger = 'pdb'
260 260 debugtrace = {
261 261 'pdb': pdb.set_trace
262 262 }
263 263 debugmortem = {
264 264 'pdb': pdb.post_mortem
265 265 }
266 266
267 267 # read --config before doing anything else
268 268 # (e.g. to change trust settings for reading .hg/hgrc)
269 269 cfgs = _parseconfig(req.ui, req.earlyoptions['config'])
270 270
271 271 if req.repo:
272 272 # copy configs that were passed on the cmdline (--config) to
273 273 # the repo ui
274 274 for sec, name, val in cfgs:
275 275 req.repo.ui.setconfig(sec, name, val, source='--config')
276 276
277 277 # developer config: ui.debugger
278 278 debugger = ui.config("ui", "debugger")
279 279 debugmod = pdb
280 280 if not debugger or ui.plain():
281 281 # if we are in HGPLAIN mode, then disable custom debugging
282 282 debugger = 'pdb'
283 283 elif req.earlyoptions['debugger']:
284 284 # This import can be slow for fancy debuggers, so only
285 285 # do it when absolutely necessary, i.e. when actual
286 286 # debugging has been requested
287 287 with demandimport.deactivated():
288 288 try:
289 289 debugmod = __import__(debugger)
290 290 except ImportError:
291 291 pass # Leave debugmod = pdb
292 292
293 293 debugtrace[debugger] = debugmod.set_trace
294 294 debugmortem[debugger] = debugmod.post_mortem
295 295
296 296 # enter the debugger before command execution
297 297 if req.earlyoptions['debugger']:
298 298 ui.warn(_("entering debugger - "
299 299 "type c to continue starting hg or h for help\n"))
300 300
301 301 if (debugger != 'pdb' and
302 302 debugtrace[debugger] == debugtrace['pdb']):
303 303 ui.warn(_("%s debugger specified "
304 304 "but its module was not found\n") % debugger)
305 305 with demandimport.deactivated():
306 306 debugtrace[debugger]()
307 307 try:
308 308 return _dispatch(req)
309 309 finally:
310 310 ui.flush()
311 311 except: # re-raises
312 312 # enter the debugger when we hit an exception
313 313 if req.earlyoptions['debugger']:
314 314 traceback.print_exc()
315 315 debugmortem[debugger](sys.exc_info()[2])
316 316 raise
317 317
318 318 return _callcatch(ui, _runcatchfunc)
319 319
320 320 def _callcatch(ui, func):
321 321 """like scmutil.callcatch but handles more high-level exceptions about
322 322 config parsing and commands. besides, use handlecommandexception to handle
323 323 uncaught exceptions.
324 324 """
325 325 try:
326 326 return scmutil.callcatch(ui, func)
327 327 except error.AmbiguousCommand as inst:
328 328 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
329 329 (inst.args[0], " ".join(inst.args[1])))
330 330 except error.CommandError as inst:
331 331 if inst.args[0]:
332 332 ui.pager('help')
333 333 msgbytes = pycompat.bytestr(inst.args[1])
334 334 ui.warn(_("hg %s: %s\n") % (inst.args[0], msgbytes))
335 335 commands.help_(ui, inst.args[0], full=False, command=True)
336 336 else:
337 337 ui.pager('help')
338 338 ui.warn(_("hg: %s\n") % inst.args[1])
339 339 commands.help_(ui, 'shortlist')
340 340 except error.ParseError as inst:
341 341 _formatparse(ui.warn, inst)
342 342 return -1
343 343 except error.UnknownCommand as inst:
344 344 nocmdmsg = _("hg: unknown command '%s'\n") % inst.args[0]
345 345 try:
346 346 # check if the command is in a disabled extension
347 347 # (but don't check for extensions themselves)
348 348 formatted = help.formattedhelp(ui, commands, inst.args[0],
349 349 unknowncmd=True)
350 350 ui.warn(nocmdmsg)
351 351 ui.write(formatted)
352 352 except (error.UnknownCommand, error.Abort):
353 353 suggested = False
354 354 if len(inst.args) == 2:
355 355 sim = _getsimilar(inst.args[1], inst.args[0])
356 356 if sim:
357 357 ui.warn(nocmdmsg)
358 358 _reportsimilar(ui.warn, sim)
359 359 suggested = True
360 360 if not suggested:
361 361 ui.pager('help')
362 362 ui.warn(nocmdmsg)
363 363 commands.help_(ui, 'shortlist')
364 364 except IOError:
365 365 raise
366 366 except KeyboardInterrupt:
367 367 raise
368 368 except: # probably re-raises
369 369 if not handlecommandexception(ui):
370 370 raise
371 371
372 372 return -1
373 373
374 374 def aliasargs(fn, givenargs):
375 375 args = []
376 376 # only care about alias 'args', ignore 'args' set by extensions.wrapfunction
377 377 if not util.safehasattr(fn, '_origfunc'):
378 378 args = getattr(fn, 'args', args)
379 379 if args:
380 380 cmd = ' '.join(map(util.shellquote, args))
381 381
382 382 nums = []
383 383 def replacer(m):
384 384 num = int(m.group(1)) - 1
385 385 nums.append(num)
386 386 if num < len(givenargs):
387 387 return givenargs[num]
388 388 raise error.Abort(_('too few arguments for command alias'))
389 389 cmd = re.sub(br'\$(\d+|\$)', replacer, cmd)
390 390 givenargs = [x for i, x in enumerate(givenargs)
391 391 if i not in nums]
392 392 args = pycompat.shlexsplit(cmd)
393 393 return args + givenargs
394 394
395 395 def aliasinterpolate(name, args, cmd):
396 396 '''interpolate args into cmd for shell aliases
397 397
398 398 This also handles $0, $@ and "$@".
399 399 '''
400 400 # util.interpolate can't deal with "$@" (with quotes) because it's only
401 401 # built to match prefix + patterns.
402 402 replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args))
403 403 replacemap['$0'] = name
404 404 replacemap['$$'] = '$'
405 405 replacemap['$@'] = ' '.join(args)
406 406 # Typical Unix shells interpolate "$@" (with quotes) as all the positional
407 407 # parameters, separated out into words. Emulate the same behavior here by
408 408 # quoting the arguments individually. POSIX shells will then typically
409 409 # tokenize each argument into exactly one word.
410 410 replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args)
411 411 # escape '\$' for regex
412 412 regex = '|'.join(replacemap.keys()).replace('$', br'\$')
413 413 r = re.compile(regex)
414 414 return r.sub(lambda x: replacemap[x.group()], cmd)
415 415
416 416 class cmdalias(object):
417 417 def __init__(self, name, definition, cmdtable, source):
418 418 self.name = self.cmd = name
419 419 self.cmdname = ''
420 420 self.definition = definition
421 421 self.fn = None
422 422 self.givenargs = []
423 423 self.opts = []
424 424 self.help = ''
425 425 self.badalias = None
426 426 self.unknowncmd = False
427 427 self.source = source
428 428
429 429 try:
430 430 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
431 431 for alias, e in cmdtable.iteritems():
432 432 if e is entry:
433 433 self.cmd = alias
434 434 break
435 435 self.shadows = True
436 436 except error.UnknownCommand:
437 437 self.shadows = False
438 438
439 439 if not self.definition:
440 440 self.badalias = _("no definition for alias '%s'") % self.name
441 441 return
442 442
443 443 if self.definition.startswith('!'):
444 444 self.shell = True
445 445 def fn(ui, *args):
446 446 env = {'HG_ARGS': ' '.join((self.name,) + args)}
447 447 def _checkvar(m):
448 448 if m.groups()[0] == '$':
449 449 return m.group()
450 450 elif int(m.groups()[0]) <= len(args):
451 451 return m.group()
452 452 else:
453 453 ui.debug("No argument found for substitution "
454 "of %i variable in alias '%s' definition."
454 "of %i variable in alias '%s' definition.\n"
455 455 % (int(m.groups()[0]), self.name))
456 456 return ''
457 457 cmd = re.sub(br'\$(\d+|\$)', _checkvar, self.definition[1:])
458 458 cmd = aliasinterpolate(self.name, args, cmd)
459 459 return ui.system(cmd, environ=env,
460 460 blockedtag='alias_%s' % self.name)
461 461 self.fn = fn
462 462 return
463 463
464 464 try:
465 465 args = pycompat.shlexsplit(self.definition)
466 466 except ValueError as inst:
467 467 self.badalias = (_("error in definition for alias '%s': %s")
468 468 % (self.name, inst))
469 469 return
470 470 earlyopts, args = _earlysplitopts(args)
471 471 if earlyopts:
472 472 self.badalias = (_("error in definition for alias '%s': %s may "
473 473 "only be given on the command line")
474 474 % (self.name, '/'.join(zip(*earlyopts)[0])))
475 475 return
476 476 self.cmdname = cmd = args.pop(0)
477 477 self.givenargs = args
478 478
479 479 try:
480 480 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
481 481 if len(tableentry) > 2:
482 482 self.fn, self.opts, self.help = tableentry
483 483 else:
484 484 self.fn, self.opts = tableentry
485 485
486 486 if self.help.startswith("hg " + cmd):
487 487 # drop prefix in old-style help lines so hg shows the alias
488 488 self.help = self.help[4 + len(cmd):]
489 489 self.__doc__ = self.fn.__doc__
490 490
491 491 except error.UnknownCommand:
492 492 self.badalias = (_("alias '%s' resolves to unknown command '%s'")
493 493 % (self.name, cmd))
494 494 self.unknowncmd = True
495 495 except error.AmbiguousCommand:
496 496 self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
497 497 % (self.name, cmd))
498 498
499 499 @property
500 500 def args(self):
501 501 args = pycompat.maplist(util.expandpath, self.givenargs)
502 502 return aliasargs(self.fn, args)
503 503
504 504 def __getattr__(self, name):
505 505 adefaults = {r'norepo': True, r'cmdtype': unrecoverablewrite,
506 506 r'optionalrepo': False, r'inferrepo': False}
507 507 if name not in adefaults:
508 508 raise AttributeError(name)
509 509 if self.badalias or util.safehasattr(self, 'shell'):
510 510 return adefaults[name]
511 511 return getattr(self.fn, name)
512 512
513 513 def __call__(self, ui, *args, **opts):
514 514 if self.badalias:
515 515 hint = None
516 516 if self.unknowncmd:
517 517 try:
518 518 # check if the command is in a disabled extension
519 519 cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2]
520 520 hint = _("'%s' is provided by '%s' extension") % (cmd, ext)
521 521 except error.UnknownCommand:
522 522 pass
523 523 raise error.Abort(self.badalias, hint=hint)
524 524 if self.shadows:
525 525 ui.debug("alias '%s' shadows command '%s'\n" %
526 526 (self.name, self.cmdname))
527 527
528 528 ui.log('commandalias', "alias '%s' expands to '%s'\n",
529 529 self.name, self.definition)
530 530 if util.safehasattr(self, 'shell'):
531 531 return self.fn(ui, *args, **opts)
532 532 else:
533 533 try:
534 534 return util.checksignature(self.fn)(ui, *args, **opts)
535 535 except error.SignatureError:
536 536 args = ' '.join([self.cmdname] + self.args)
537 537 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
538 538 raise
539 539
540 540 class lazyaliasentry(object):
541 541 """like a typical command entry (func, opts, help), but is lazy"""
542 542
543 543 def __init__(self, name, definition, cmdtable, source):
544 544 self.name = name
545 545 self.definition = definition
546 546 self.cmdtable = cmdtable.copy()
547 547 self.source = source
548 548
549 549 @util.propertycache
550 550 def _aliasdef(self):
551 551 return cmdalias(self.name, self.definition, self.cmdtable, self.source)
552 552
553 553 def __getitem__(self, n):
554 554 aliasdef = self._aliasdef
555 555 if n == 0:
556 556 return aliasdef
557 557 elif n == 1:
558 558 return aliasdef.opts
559 559 elif n == 2:
560 560 return aliasdef.help
561 561 else:
562 562 raise IndexError
563 563
564 564 def __iter__(self):
565 565 for i in range(3):
566 566 yield self[i]
567 567
568 568 def __len__(self):
569 569 return 3
570 570
571 571 def addaliases(ui, cmdtable):
572 572 # aliases are processed after extensions have been loaded, so they
573 573 # may use extension commands. Aliases can also use other alias definitions,
574 574 # but only if they have been defined prior to the current definition.
575 575 for alias, definition in ui.configitems('alias'):
576 576 try:
577 577 if cmdtable[alias].definition == definition:
578 578 continue
579 579 except (KeyError, AttributeError):
580 580 # definition might not exist or it might not be a cmdalias
581 581 pass
582 582
583 583 source = ui.configsource('alias', alias)
584 584 entry = lazyaliasentry(alias, definition, cmdtable, source)
585 585 cmdtable[alias] = entry
586 586
587 587 def _parse(ui, args):
588 588 options = {}
589 589 cmdoptions = {}
590 590
591 591 try:
592 592 args = fancyopts.fancyopts(args, commands.globalopts, options)
593 593 except getopt.GetoptError as inst:
594 594 raise error.CommandError(None, inst)
595 595
596 596 if args:
597 597 cmd, args = args[0], args[1:]
598 598 aliases, entry = cmdutil.findcmd(cmd, commands.table,
599 599 ui.configbool("ui", "strict"))
600 600 cmd = aliases[0]
601 601 args = aliasargs(entry[0], args)
602 602 defaults = ui.config("defaults", cmd)
603 603 if defaults:
604 604 args = pycompat.maplist(
605 605 util.expandpath, pycompat.shlexsplit(defaults)) + args
606 606 c = list(entry[1])
607 607 else:
608 608 cmd = None
609 609 c = []
610 610
611 611 # combine global options into local
612 612 for o in commands.globalopts:
613 613 c.append((o[0], o[1], options[o[1]], o[3]))
614 614
615 615 try:
616 616 args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True)
617 617 except getopt.GetoptError as inst:
618 618 raise error.CommandError(cmd, inst)
619 619
620 620 # separate global options back out
621 621 for o in commands.globalopts:
622 622 n = o[1]
623 623 options[n] = cmdoptions[n]
624 624 del cmdoptions[n]
625 625
626 626 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
627 627
628 628 def _parseconfig(ui, config):
629 629 """parse the --config options from the command line"""
630 630 configs = []
631 631
632 632 for cfg in config:
633 633 try:
634 634 name, value = [cfgelem.strip()
635 635 for cfgelem in cfg.split('=', 1)]
636 636 section, name = name.split('.', 1)
637 637 if not section or not name:
638 638 raise IndexError
639 639 ui.setconfig(section, name, value, '--config')
640 640 configs.append((section, name, value))
641 641 except (IndexError, ValueError):
642 642 raise error.Abort(_('malformed --config option: %r '
643 643 '(use --config section.name=value)') % cfg)
644 644
645 645 return configs
646 646
647 647 def _earlyparseopts(ui, args):
648 648 options = {}
649 649 fancyopts.fancyopts(args, commands.globalopts, options,
650 650 gnu=not ui.plain('strictflags'), early=True,
651 651 optaliases={'repository': ['repo']})
652 652 return options
653 653
654 654 def _earlysplitopts(args):
655 655 """Split args into a list of possible early options and remainder args"""
656 656 shortoptions = 'R:'
657 657 # TODO: perhaps 'debugger' should be included
658 658 longoptions = ['cwd=', 'repository=', 'repo=', 'config=']
659 659 return fancyopts.earlygetopt(args, shortoptions, longoptions,
660 660 gnu=True, keepsep=True)
661 661
662 662 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
663 663 # run pre-hook, and abort if it fails
664 664 hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
665 665 pats=cmdpats, opts=cmdoptions)
666 666 try:
667 667 ret = _runcommand(ui, options, cmd, d)
668 668 # run post-hook, passing command result
669 669 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
670 670 result=ret, pats=cmdpats, opts=cmdoptions)
671 671 except Exception:
672 672 # run failure hook and re-raise
673 673 hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs),
674 674 pats=cmdpats, opts=cmdoptions)
675 675 raise
676 676 return ret
677 677
678 678 def _getlocal(ui, rpath, wd=None):
679 679 """Return (path, local ui object) for the given target path.
680 680
681 681 Takes paths in [cwd]/.hg/hgrc into account."
682 682 """
683 683 if wd is None:
684 684 try:
685 685 wd = pycompat.getcwd()
686 686 except OSError as e:
687 687 raise error.Abort(_("error getting current working directory: %s") %
688 688 encoding.strtolocal(e.strerror))
689 689 path = cmdutil.findrepo(wd) or ""
690 690 if not path:
691 691 lui = ui
692 692 else:
693 693 lui = ui.copy()
694 694 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
695 695
696 696 if rpath:
697 697 path = lui.expandpath(rpath)
698 698 lui = ui.copy()
699 699 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
700 700
701 701 return path, lui
702 702
703 703 def _checkshellalias(lui, ui, args):
704 704 """Return the function to run the shell alias, if it is required"""
705 705 options = {}
706 706
707 707 try:
708 708 args = fancyopts.fancyopts(args, commands.globalopts, options)
709 709 except getopt.GetoptError:
710 710 return
711 711
712 712 if not args:
713 713 return
714 714
715 715 cmdtable = commands.table
716 716
717 717 cmd = args[0]
718 718 try:
719 719 strict = ui.configbool("ui", "strict")
720 720 aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
721 721 except (error.AmbiguousCommand, error.UnknownCommand):
722 722 return
723 723
724 724 cmd = aliases[0]
725 725 fn = entry[0]
726 726
727 727 if cmd and util.safehasattr(fn, 'shell'):
728 728 # shell alias shouldn't receive early options which are consumed by hg
729 729 _earlyopts, args = _earlysplitopts(args)
730 730 d = lambda: fn(ui, *args[1:])
731 731 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
732 732 [], {})
733 733
734 734 def _dispatch(req):
735 735 args = req.args
736 736 ui = req.ui
737 737
738 738 # check for cwd
739 739 cwd = req.earlyoptions['cwd']
740 740 if cwd:
741 741 os.chdir(cwd)
742 742
743 743 rpath = req.earlyoptions['repository']
744 744 path, lui = _getlocal(ui, rpath)
745 745
746 746 uis = {ui, lui}
747 747
748 748 if req.repo:
749 749 uis.add(req.repo.ui)
750 750
751 751 if req.earlyoptions['profile']:
752 752 for ui_ in uis:
753 753 ui_.setconfig('profiling', 'enabled', 'true', '--profile')
754 754
755 755 profile = lui.configbool('profiling', 'enabled')
756 756 with profiling.profile(lui, enabled=profile) as profiler:
757 757 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
758 758 # reposetup
759 759 extensions.loadall(lui)
760 760 # Propagate any changes to lui.__class__ by extensions
761 761 ui.__class__ = lui.__class__
762 762
763 763 # (uisetup and extsetup are handled in extensions.loadall)
764 764
765 765 # (reposetup is handled in hg.repository)
766 766
767 767 addaliases(lui, commands.table)
768 768
769 769 # All aliases and commands are completely defined, now.
770 770 # Check abbreviation/ambiguity of shell alias.
771 771 shellaliasfn = _checkshellalias(lui, ui, args)
772 772 if shellaliasfn:
773 773 return shellaliasfn()
774 774
775 775 # check for fallback encoding
776 776 fallback = lui.config('ui', 'fallbackencoding')
777 777 if fallback:
778 778 encoding.fallbackencoding = fallback
779 779
780 780 fullargs = args
781 781 cmd, func, args, options, cmdoptions = _parse(lui, args)
782 782
783 783 if options["config"] != req.earlyoptions["config"]:
784 784 raise error.Abort(_("option --config may not be abbreviated!"))
785 785 if options["cwd"] != req.earlyoptions["cwd"]:
786 786 raise error.Abort(_("option --cwd may not be abbreviated!"))
787 787 if options["repository"] != req.earlyoptions["repository"]:
788 788 raise error.Abort(_(
789 789 "option -R has to be separated from other options (e.g. not "
790 790 "-qR) and --repository may only be abbreviated as --repo!"))
791 791 if options["debugger"] != req.earlyoptions["debugger"]:
792 792 raise error.Abort(_("option --debugger may not be abbreviated!"))
793 793 # don't validate --profile/--traceback, which can be enabled from now
794 794
795 795 if options["encoding"]:
796 796 encoding.encoding = options["encoding"]
797 797 if options["encodingmode"]:
798 798 encoding.encodingmode = options["encodingmode"]
799 799 if options["time"]:
800 800 def get_times():
801 801 t = os.times()
802 802 if t[4] == 0.0:
803 803 # Windows leaves this as zero, so use time.clock()
804 804 t = (t[0], t[1], t[2], t[3], time.clock())
805 805 return t
806 806 s = get_times()
807 807 def print_time():
808 808 t = get_times()
809 809 ui.warn(
810 810 _("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
811 811 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
812 812 ui.atexit(print_time)
813 813 if options["profile"]:
814 814 profiler.start()
815 815
816 816 if options['verbose'] or options['debug'] or options['quiet']:
817 817 for opt in ('verbose', 'debug', 'quiet'):
818 818 val = str(bool(options[opt]))
819 819 if pycompat.ispy3:
820 820 val = val.encode('ascii')
821 821 for ui_ in uis:
822 822 ui_.setconfig('ui', opt, val, '--' + opt)
823 823
824 824 if options['traceback']:
825 825 for ui_ in uis:
826 826 ui_.setconfig('ui', 'traceback', 'on', '--traceback')
827 827
828 828 if options['noninteractive']:
829 829 for ui_ in uis:
830 830 ui_.setconfig('ui', 'interactive', 'off', '-y')
831 831
832 832 if cmdoptions.get('insecure', False):
833 833 for ui_ in uis:
834 834 ui_.insecureconnections = True
835 835
836 836 # setup color handling before pager, because setting up pager
837 837 # might cause incorrect console information
838 838 coloropt = options['color']
839 839 for ui_ in uis:
840 840 if coloropt:
841 841 ui_.setconfig('ui', 'color', coloropt, '--color')
842 842 color.setup(ui_)
843 843
844 844 if util.parsebool(options['pager']):
845 845 # ui.pager() expects 'internal-always-' prefix in this case
846 846 ui.pager('internal-always-' + cmd)
847 847 elif options['pager'] != 'auto':
848 848 for ui_ in uis:
849 849 ui_.disablepager()
850 850
851 851 if options['version']:
852 852 return commands.version_(ui)
853 853 if options['help']:
854 854 return commands.help_(ui, cmd, command=cmd is not None)
855 855 elif not cmd:
856 856 return commands.help_(ui, 'shortlist')
857 857
858 858 repo = None
859 859 cmdpats = args[:]
860 860 if not func.norepo:
861 861 # use the repo from the request only if we don't have -R
862 862 if not rpath and not cwd:
863 863 repo = req.repo
864 864
865 865 if repo:
866 866 # set the descriptors of the repo ui to those of ui
867 867 repo.ui.fin = ui.fin
868 868 repo.ui.fout = ui.fout
869 869 repo.ui.ferr = ui.ferr
870 870 else:
871 871 try:
872 872 repo = hg.repository(ui, path=path,
873 873 presetupfuncs=req.prereposetups)
874 874 if not repo.local():
875 875 raise error.Abort(_("repository '%s' is not local")
876 876 % path)
877 877 repo.ui.setconfig("bundle", "mainreporoot", repo.root,
878 878 'repo')
879 879 except error.RequirementError:
880 880 raise
881 881 except error.RepoError:
882 882 if rpath: # invalid -R path
883 883 raise
884 884 if not func.optionalrepo:
885 885 if func.inferrepo and args and not path:
886 886 # try to infer -R from command args
887 887 repos = pycompat.maplist(cmdutil.findrepo, args)
888 888 guess = repos[0]
889 889 if guess and repos.count(guess) == len(repos):
890 890 req.args = ['--repository', guess] + fullargs
891 891 req.earlyoptions['repository'] = guess
892 892 return _dispatch(req)
893 893 if not path:
894 894 raise error.RepoError(_("no repository found in"
895 895 " '%s' (.hg not found)")
896 896 % pycompat.getcwd())
897 897 raise
898 898 if repo:
899 899 ui = repo.ui
900 900 if options['hidden']:
901 901 repo = repo.unfiltered()
902 902 args.insert(0, repo)
903 903 elif rpath:
904 904 ui.warn(_("warning: --repository ignored\n"))
905 905
906 906 msg = _formatargs(fullargs)
907 907 ui.log("command", '%s\n', msg)
908 908 strcmdopt = pycompat.strkwargs(cmdoptions)
909 909 d = lambda: util.checksignature(func)(ui, *args, **strcmdopt)
910 910 try:
911 911 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
912 912 cmdpats, cmdoptions)
913 913 finally:
914 914 if repo and repo != req.repo:
915 915 repo.close()
916 916
917 917 def _runcommand(ui, options, cmd, cmdfunc):
918 918 """Run a command function, possibly with profiling enabled."""
919 919 try:
920 920 return cmdfunc()
921 921 except error.SignatureError:
922 922 raise error.CommandError(cmd, _('invalid arguments'))
923 923
924 924 def _exceptionwarning(ui):
925 925 """Produce a warning message for the current active exception"""
926 926
927 927 # For compatibility checking, we discard the portion of the hg
928 928 # version after the + on the assumption that if a "normal
929 929 # user" is running a build with a + in it the packager
930 930 # probably built from fairly close to a tag and anyone with a
931 931 # 'make local' copy of hg (where the version number can be out
932 932 # of date) will be clueful enough to notice the implausible
933 933 # version number and try updating.
934 934 ct = util.versiontuple(n=2)
935 935 worst = None, ct, ''
936 936 if ui.config('ui', 'supportcontact') is None:
937 937 for name, mod in extensions.extensions():
938 938 testedwith = getattr(mod, 'testedwith', '')
939 939 if pycompat.ispy3 and isinstance(testedwith, str):
940 940 testedwith = testedwith.encode(u'utf-8')
941 941 report = getattr(mod, 'buglink', _('the extension author.'))
942 942 if not testedwith.strip():
943 943 # We found an untested extension. It's likely the culprit.
944 944 worst = name, 'unknown', report
945 945 break
946 946
947 947 # Never blame on extensions bundled with Mercurial.
948 948 if extensions.ismoduleinternal(mod):
949 949 continue
950 950
951 951 tested = [util.versiontuple(t, 2) for t in testedwith.split()]
952 952 if ct in tested:
953 953 continue
954 954
955 955 lower = [t for t in tested if t < ct]
956 956 nearest = max(lower or tested)
957 957 if worst[0] is None or nearest < worst[1]:
958 958 worst = name, nearest, report
959 959 if worst[0] is not None:
960 960 name, testedwith, report = worst
961 961 if not isinstance(testedwith, (bytes, str)):
962 962 testedwith = '.'.join([str(c) for c in testedwith])
963 963 warning = (_('** Unknown exception encountered with '
964 964 'possibly-broken third-party extension %s\n'
965 965 '** which supports versions %s of Mercurial.\n'
966 966 '** Please disable %s and try your action again.\n'
967 967 '** If that fixes the bug please report it to %s\n')
968 968 % (name, testedwith, name, report))
969 969 else:
970 970 bugtracker = ui.config('ui', 'supportcontact')
971 971 if bugtracker is None:
972 972 bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
973 973 warning = (_("** unknown exception encountered, "
974 974 "please report by visiting\n** ") + bugtracker + '\n')
975 975 if pycompat.ispy3:
976 976 sysversion = sys.version.encode(u'utf-8')
977 977 else:
978 978 sysversion = sys.version
979 979 sysversion = sysversion.replace('\n', '')
980 980 warning += ((_("** Python %s\n") % sysversion) +
981 981 (_("** Mercurial Distributed SCM (version %s)\n") %
982 982 util.version()) +
983 983 (_("** Extensions loaded: %s\n") %
984 984 ", ".join([x[0] for x in extensions.extensions()])))
985 985 return warning
986 986
987 987 def handlecommandexception(ui):
988 988 """Produce a warning message for broken commands
989 989
990 990 Called when handling an exception; the exception is reraised if
991 991 this function returns False, ignored otherwise.
992 992 """
993 993 warning = _exceptionwarning(ui)
994 994 ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
995 995 ui.warn(warning)
996 996 return False # re-raise the exception
General Comments 0
You need to be logged in to leave comments. Login now