##// END OF EJS Templates
py3: fix infinitepush extension tests...
Mark Thomas -
r40288:090e5f39 default
parent child Browse files
Show More
@@ -1,620 +1,623 b''
1 1 test-abort-checkin.t
2 2 test-absorb-filefixupstate.py
3 3 test-absorb-phase.t
4 4 test-absorb-rename.t
5 5 test-absorb-strip.t
6 6 test-absorb.t
7 7 test-add.t
8 8 test-addremove-similar.t
9 9 test-addremove.t
10 10 test-alias.t
11 11 test-amend-subrepo.t
12 12 test-amend.t
13 13 test-ancestor.py
14 14 test-annotate.py
15 15 test-annotate.t
16 16 test-archive-symlinks.t
17 17 test-archive.t
18 18 test-atomictempfile.py
19 19 test-audit-path.t
20 20 test-audit-subrepo.t
21 21 test-automv.t
22 22 test-backout.t
23 23 test-backwards-remove.t
24 24 test-bad-extension.t
25 25 test-bad-pull.t
26 26 test-basic.t
27 27 test-bdiff.py
28 28 test-bheads.t
29 29 test-bisect.t
30 30 test-bisect2.t
31 31 test-bisect3.t
32 32 test-blackbox.t
33 33 test-bookmarks-current.t
34 34 test-bookmarks-merge.t
35 35 test-bookmarks-pushpull.t
36 36 test-bookmarks-rebase.t
37 37 test-bookmarks-strip.t
38 38 test-bookmarks.t
39 39 test-branch-change.t
40 40 test-branch-option.t
41 41 test-branch-tag-confict.t
42 42 test-branches.t
43 43 test-bundle-phases.t
44 44 test-bundle-r.t
45 45 test-bundle-type.t
46 46 test-bundle-vs-outgoing.t
47 47 test-bundle.t
48 48 test-bundle2-exchange.t
49 49 test-bundle2-format.t
50 50 test-bundle2-multiple-changegroups.t
51 51 test-bundle2-pushback.t
52 52 test-bundle2-remote-changegroup.t
53 53 test-cache-abuse.t
54 54 test-cappedreader.py
55 55 test-casecollision.t
56 56 test-cat.t
57 57 test-cbor.py
58 58 test-censor.t
59 59 test-changelog-exec.t
60 60 test-check-code.t
61 61 test-check-commit.t
62 62 test-check-execute.t
63 63 test-check-interfaces.py
64 64 test-check-module-imports.t
65 65 test-check-py3-compat.t
66 66 test-check-pyflakes.t
67 67 test-check-pylint.t
68 68 test-check-shbang.t
69 69 test-children.t
70 70 test-churn.t
71 71 test-clone-cgi.t
72 72 test-clone-pull-corruption.t
73 73 test-clone-r.t
74 74 test-clone-uncompressed.t
75 75 test-clone-update-order.t
76 76 test-clone.t
77 77 test-clonebundles.t
78 78 test-close-head.t
79 79 test-commit-amend.t
80 80 test-commit-interactive.t
81 81 test-commit-multiple.t
82 82 test-commit-unresolved.t
83 83 test-commit.t
84 84 test-committer.t
85 85 test-completion.t
86 86 test-config-env.py
87 87 test-config.t
88 88 test-conflict.t
89 89 test-confused-revert.t
90 90 test-context.py
91 91 test-contrib-check-code.t
92 92 test-contrib-check-commit.t
93 93 test-contrib-dumprevlog.t
94 94 test-contrib-perf.t
95 95 test-contrib-relnotes.t
96 96 test-contrib-testparseutil.t
97 97 test-convert-authormap.t
98 98 test-convert-clonebranches.t
99 99 test-convert-cvs-branch.t
100 100 test-convert-cvs-detectmerge.t
101 101 test-convert-cvs-synthetic.t
102 102 test-convert-cvs.t
103 103 test-convert-cvsnt-mergepoints.t
104 104 test-convert-datesort.t
105 105 test-convert-filemap.t
106 106 test-convert-hg-sink.t
107 107 test-convert-hg-source.t
108 108 test-convert-hg-startrev.t
109 109 test-convert-splicemap.t
110 110 test-convert-tagsbranch-topology.t
111 111 test-copy-move-merge.t
112 112 test-copy.t
113 113 test-copytrace-heuristics.t
114 114 test-debugbuilddag.t
115 115 test-debugbundle.t
116 116 test-debugcommands.t
117 117 test-debugextensions.t
118 118 test-debugindexdot.t
119 119 test-debugrename.t
120 120 test-default-push.t
121 121 test-diff-antipatience.t
122 122 test-diff-binary-file.t
123 123 test-diff-change.t
124 124 test-diff-copy-depth.t
125 125 test-diff-hashes.t
126 126 test-diff-ignore-whitespace.t
127 127 test-diff-indent-heuristic.t
128 128 test-diff-issue2761.t
129 129 test-diff-newlines.t
130 130 test-diff-reverse.t
131 131 test-diff-subdir.t
132 132 test-diff-unified.t
133 133 test-diff-upgrade.t
134 134 test-diffdir.t
135 135 test-diffstat.t
136 136 test-directaccess.t
137 137 test-dirstate-backup.t
138 138 test-dirstate-nonnormalset.t
139 139 test-dirstate.t
140 140 test-dispatch.py
141 141 test-doctest.py
142 142 test-double-merge.t
143 143 test-drawdag.t
144 144 test-duplicateoptions.py
145 145 test-editor-filename.t
146 146 test-empty-dir.t
147 147 test-empty-file.t
148 148 test-empty-group.t
149 149 test-empty.t
150 150 test-encode.t
151 151 test-encoding-func.py
152 152 test-encoding.t
153 153 test-eol-add.t
154 154 test-eol-clone.t
155 155 test-eol-hook.t
156 156 test-eol-patch.t
157 157 test-eol-tag.t
158 158 test-eol-update.t
159 159 test-eol.t
160 160 test-eolfilename.t
161 161 test-excessive-merge.t
162 162 test-exchange-obsmarkers-case-A1.t
163 163 test-exchange-obsmarkers-case-A2.t
164 164 test-exchange-obsmarkers-case-A3.t
165 165 test-exchange-obsmarkers-case-A4.t
166 166 test-exchange-obsmarkers-case-A5.t
167 167 test-exchange-obsmarkers-case-A6.t
168 168 test-exchange-obsmarkers-case-A7.t
169 169 test-exchange-obsmarkers-case-B1.t
170 170 test-exchange-obsmarkers-case-B2.t
171 171 test-exchange-obsmarkers-case-B3.t
172 172 test-exchange-obsmarkers-case-B4.t
173 173 test-exchange-obsmarkers-case-B5.t
174 174 test-exchange-obsmarkers-case-B6.t
175 175 test-exchange-obsmarkers-case-B7.t
176 176 test-exchange-obsmarkers-case-C1.t
177 177 test-exchange-obsmarkers-case-C2.t
178 178 test-exchange-obsmarkers-case-C3.t
179 179 test-exchange-obsmarkers-case-C4.t
180 180 test-exchange-obsmarkers-case-D1.t
181 181 test-exchange-obsmarkers-case-D2.t
182 182 test-exchange-obsmarkers-case-D3.t
183 183 test-exchange-obsmarkers-case-D4.t
184 184 test-execute-bit.t
185 185 test-export.t
186 186 test-extdata.t
187 187 test-extdiff.t
188 188 test-extensions-afterloaded.t
189 189 test-extensions-wrapfunction.py
190 190 test-extra-filelog-entry.t
191 191 test-fetch.t
192 192 test-filebranch.t
193 193 test-filecache.py
194 194 test-filelog.py
195 195 test-fileset-generated.t
196 196 test-fileset.t
197 197 test-fix-topology.t
198 198 test-flags.t
199 199 test-generaldelta.t
200 200 test-getbundle.t
201 201 test-git-export.t
202 202 test-globalopts.t
203 203 test-glog-beautifygraph.t
204 204 test-glog-topological.t
205 205 test-glog.t
206 206 test-gpg.t
207 207 test-graft.t
208 208 test-grep.t
209 209 test-hg-parseurl.py
210 210 test-hghave.t
211 211 test-hgignore.t
212 212 test-hgk.t
213 213 test-hgrc.t
214 214 test-hgweb-annotate-whitespace.t
215 215 test-hgweb-bundle.t
216 216 test-hgweb-csp.t
217 217 test-hgweb-descend-empties.t
218 218 test-hgweb-diffs.t
219 219 test-hgweb-empty.t
220 220 test-hgweb-filelog.t
221 221 test-hgweb-non-interactive.t
222 222 test-hgweb-raw.t
223 223 test-hgweb-removed.t
224 224 test-hgweb.t
225 225 test-hgwebdir-paths.py
226 226 test-hgwebdirsym.t
227 227 test-histedit-arguments.t
228 228 test-histedit-base.t
229 229 test-histedit-bookmark-motion.t
230 230 test-histedit-commute.t
231 231 test-histedit-drop.t
232 232 test-histedit-edit.t
233 233 test-histedit-fold-non-commute.t
234 234 test-histedit-fold.t
235 235 test-histedit-no-backup.t
236 236 test-histedit-no-change.t
237 237 test-histedit-non-commute-abort.t
238 238 test-histedit-non-commute.t
239 239 test-histedit-obsolete.t
240 240 test-histedit-outgoing.t
241 241 test-histedit-templates.t
242 242 test-http-branchmap.t
243 243 test-http-bundle1.t
244 244 test-http-clone-r.t
245 245 test-http-permissions.t
246 246 test-http.t
247 247 test-hybridencode.py
248 248 test-i18n.t
249 249 test-identify.t
250 250 test-impexp-branch.t
251 251 test-import-bypass.t
252 252 test-import-eol.t
253 253 test-import-merge.t
254 254 test-import-unknown.t
255 255 test-import.t
256 256 test-imports-checker.t
257 257 test-incoming-outgoing.t
258 test-infinitepush-bundlestore.t
259 test-infinitepush-ci.t
260 test-infinitepush.t
258 261 test-inherit-mode.t
259 262 test-init.t
260 263 test-issue1089.t
261 264 test-issue1102.t
262 265 test-issue1175.t
263 266 test-issue1306.t
264 267 test-issue1438.t
265 268 test-issue1502.t
266 269 test-issue1802.t
267 270 test-issue1877.t
268 271 test-issue1993.t
269 272 test-issue2137.t
270 273 test-issue3084.t
271 274 test-issue4074.t
272 275 test-issue522.t
273 276 test-issue586.t
274 277 test-issue5979.t
275 278 test-issue612.t
276 279 test-issue619.t
277 280 test-issue660.t
278 281 test-issue672.t
279 282 test-issue842.t
280 283 test-journal-exists.t
281 284 test-journal-share.t
282 285 test-journal.t
283 286 test-known.t
284 287 test-largefiles-cache.t
285 288 test-largefiles-misc.t
286 289 test-largefiles-small-disk.t
287 290 test-largefiles-update.t
288 291 test-largefiles.t
289 292 test-lfs-largefiles.t
290 293 test-lfs-pointer.py
291 294 test-linelog.py
292 295 test-linerange.py
293 296 test-locate.t
294 297 test-lock-badness.t
295 298 test-log-linerange.t
296 299 test-log.t
297 300 test-logexchange.t
298 301 test-lrucachedict.py
299 302 test-mactext.t
300 303 test-mailmap.t
301 304 test-manifest-merging.t
302 305 test-manifest.py
303 306 test-manifest.t
304 307 test-match.py
305 308 test-mdiff.py
306 309 test-merge-changedelete.t
307 310 test-merge-closedheads.t
308 311 test-merge-commit.t
309 312 test-merge-criss-cross.t
310 313 test-merge-default.t
311 314 test-merge-force.t
312 315 test-merge-halt.t
313 316 test-merge-internal-tools-pattern.t
314 317 test-merge-local.t
315 318 test-merge-no-file-change.t
316 319 test-merge-remove.t
317 320 test-merge-revert.t
318 321 test-merge-revert2.t
319 322 test-merge-subrepos.t
320 323 test-merge-symlinks.t
321 324 test-merge-tools.t
322 325 test-merge-types.t
323 326 test-merge1.t
324 327 test-merge10.t
325 328 test-merge2.t
326 329 test-merge4.t
327 330 test-merge5.t
328 331 test-merge6.t
329 332 test-merge7.t
330 333 test-merge8.t
331 334 test-merge9.t
332 335 test-minifileset.py
333 336 test-minirst.py
334 337 test-mq-git.t
335 338 test-mq-guards.t
336 339 test-mq-header-date.t
337 340 test-mq-header-from.t
338 341 test-mq-merge.t
339 342 test-mq-pull-from-bundle.t
340 343 test-mq-qclone-http.t
341 344 test-mq-qdelete.t
342 345 test-mq-qdiff.t
343 346 test-mq-qfold.t
344 347 test-mq-qgoto.t
345 348 test-mq-qimport-fail-cleanup.t
346 349 test-mq-qnew.t
347 350 test-mq-qpush-exact.t
348 351 test-mq-qpush-fail.t
349 352 test-mq-qqueue.t
350 353 test-mq-qrefresh-interactive.t
351 354 test-mq-qrefresh-replace-log-message.t
352 355 test-mq-qrefresh.t
353 356 test-mq-qrename.t
354 357 test-mq-qsave.t
355 358 test-mq-safety.t
356 359 test-mq-subrepo.t
357 360 test-mq-symlinks.t
358 361 test-mq.t
359 362 test-mv-cp-st-diff.t
360 363 test-narrow-acl.t
361 364 test-narrow-archive.t
362 365 test-narrow-clone-no-ellipsis.t
363 366 test-narrow-clone-non-narrow-server.t
364 367 test-narrow-clone-nonlinear.t
365 368 test-narrow-clone.t
366 369 test-narrow-commit.t
367 370 test-narrow-copies.t
368 371 test-narrow-debugcommands.t
369 372 test-narrow-debugrebuilddirstate.t
370 373 test-narrow-exchange-merges.t
371 374 test-narrow-exchange.t
372 375 test-narrow-expanddirstate.t
373 376 test-narrow-merge.t
374 377 test-narrow-patch.t
375 378 test-narrow-patterns.t
376 379 test-narrow-pull.t
377 380 test-narrow-rebase.t
378 381 test-narrow-shallow-merges.t
379 382 test-narrow-shallow.t
380 383 test-narrow-strip.t
381 384 test-narrow-trackedcmd.t
382 385 test-narrow-update.t
383 386 test-narrow-widen-no-ellipsis.t
384 387 test-narrow-widen.t
385 388 test-narrow.t
386 389 test-nested-repo.t
387 390 test-newbranch.t
388 391 test-newercgi.t
389 392 test-nointerrupt.t
390 393 test-obshistory.t
391 394 test-obsmarker-template.t
392 395 test-obsmarkers-effectflag.t
393 396 test-obsolete-bounds-checking.t
394 397 test-obsolete-bundle-strip.t
395 398 test-obsolete-changeset-exchange.t
396 399 test-obsolete-checkheads.t
397 400 test-obsolete-distributed.t
398 401 test-obsolete-divergent.t
399 402 test-obsolete-tag-cache.t
400 403 test-obsolete.t
401 404 test-origbackup-conflict.t
402 405 test-pager-legacy.t
403 406 test-pager.t
404 407 test-parents.t
405 408 test-parseindex2.py
406 409 test-patch-offset.t
407 410 test-patch.t
408 411 test-patchbomb-bookmark.t
409 412 test-patchbomb-tls.t
410 413 test-patchbomb.t
411 414 test-pathconflicts-basic.t
412 415 test-pathconflicts-merge.t
413 416 test-pathconflicts-update.t
414 417 test-pathencode.py
415 418 test-pending.t
416 419 test-permissions.t
417 420 test-phases-exchange.t
418 421 test-phases.t
419 422 test-profile.t
420 423 test-progress.t
421 424 test-pull-branch.t
422 425 test-pull-http.t
423 426 test-pull-permission.t
424 427 test-pull-pull-corruption.t
425 428 test-pull-r.t
426 429 test-pull-update.t
427 430 test-pull.t
428 431 test-purge.t
429 432 test-push-cgi.t
430 433 test-push-checkheads-partial-C1.t
431 434 test-push-checkheads-partial-C2.t
432 435 test-push-checkheads-partial-C3.t
433 436 test-push-checkheads-partial-C4.t
434 437 test-push-checkheads-pruned-B1.t
435 438 test-push-checkheads-pruned-B2.t
436 439 test-push-checkheads-pruned-B3.t
437 440 test-push-checkheads-pruned-B4.t
438 441 test-push-checkheads-pruned-B5.t
439 442 test-push-checkheads-pruned-B6.t
440 443 test-push-checkheads-pruned-B7.t
441 444 test-push-checkheads-pruned-B8.t
442 445 test-push-checkheads-superceed-A1.t
443 446 test-push-checkheads-superceed-A2.t
444 447 test-push-checkheads-superceed-A3.t
445 448 test-push-checkheads-superceed-A4.t
446 449 test-push-checkheads-superceed-A5.t
447 450 test-push-checkheads-superceed-A6.t
448 451 test-push-checkheads-superceed-A7.t
449 452 test-push-checkheads-superceed-A8.t
450 453 test-push-checkheads-unpushed-D1.t
451 454 test-push-checkheads-unpushed-D2.t
452 455 test-push-checkheads-unpushed-D3.t
453 456 test-push-checkheads-unpushed-D4.t
454 457 test-push-checkheads-unpushed-D5.t
455 458 test-push-checkheads-unpushed-D6.t
456 459 test-push-checkheads-unpushed-D7.t
457 460 test-push-http.t
458 461 test-push-warn.t
459 462 test-push.t
460 463 test-pushvars.t
461 464 test-qrecord.t
462 465 test-rebase-abort.t
463 466 test-rebase-backup.t
464 467 test-rebase-base-flag.t
465 468 test-rebase-bookmarks.t
466 469 test-rebase-brute-force.t
467 470 test-rebase-cache.t
468 471 test-rebase-check-restore.t
469 472 test-rebase-collapse.t
470 473 test-rebase-conflicts.t
471 474 test-rebase-dest.t
472 475 test-rebase-detach.t
473 476 test-rebase-emptycommit.t
474 477 test-rebase-inmemory.t
475 478 test-rebase-interruptions.t
476 479 test-rebase-issue-noparam-single-rev.t
477 480 test-rebase-legacy.t
478 481 test-rebase-mq-skip.t
479 482 test-rebase-mq.t
480 483 test-rebase-named-branches.t
481 484 test-rebase-newancestor.t
482 485 test-rebase-obsolete.t
483 486 test-rebase-parameters.t
484 487 test-rebase-partial.t
485 488 test-rebase-pull.t
486 489 test-rebase-rename.t
487 490 test-rebase-scenario-global.t
488 491 test-rebase-templates.t
489 492 test-rebase-transaction.t
490 493 test-rebuildstate.t
491 494 test-record.t
492 495 test-releasenotes-formatting.t
493 496 test-releasenotes-merging.t
494 497 test-releasenotes-parsing.t
495 498 test-relink.t
496 499 test-remove.t
497 500 test-removeemptydirs.t
498 501 test-rename-after-merge.t
499 502 test-rename-dir-merge.t
500 503 test-rename-merge1.t
501 504 test-rename-merge2.t
502 505 test-rename.t
503 506 test-repair-strip.t
504 507 test-repo-compengines.t
505 508 test-requires.t
506 509 test-resolve.t
507 510 test-revert-flags.t
508 511 test-revert-interactive.t
509 512 test-revert-unknown.t
510 513 test-revert.t
511 514 test-revisions.t
512 515 test-revlog-ancestry.py
513 516 test-revlog-group-emptyiter.t
514 517 test-revlog-mmapindex.t
515 518 test-revlog-packentry.t
516 519 test-revlog-raw.py
517 520 test-revlog-v2.t
518 521 test-revset-dirstate-parents.t
519 522 test-revset-legacy-lookup.t
520 523 test-revset-outgoing.t
521 524 test-rollback.t
522 525 test-run-tests.py
523 526 test-run-tests.t
524 527 test-schemes.t
525 528 test-serve.t
526 529 test-setdiscovery.t
527 530 test-share.t
528 531 test-shelve.t
529 532 test-show-stack.t
530 533 test-show-work.t
531 534 test-show.t
532 535 test-simple-update.t
533 536 test-simplekeyvaluefile.py
534 537 test-simplemerge.py
535 538 test-single-head.t
536 539 test-sparse-clear.t
537 540 test-sparse-clone.t
538 541 test-sparse-import.t
539 542 test-sparse-merges.t
540 543 test-sparse-profiles.t
541 544 test-sparse-requirement.t
542 545 test-sparse-verbose-json.t
543 546 test-sparse.t
544 547 test-split.t
545 548 test-ssh-bundle1.t
546 549 test-ssh-clone-r.t
547 550 test-ssh-proto-unbundle.t
548 551 test-ssh-proto.t
549 552 test-ssh-repoerror.t
550 553 test-ssh.t
551 554 test-sshserver.py
552 555 test-stack.t
553 556 test-status-color.t
554 557 test-status-inprocess.py
555 558 test-status-rev.t
556 559 test-status-terse.t
557 560 test-storage.py
558 561 test-stream-bundle-v2.t
559 562 test-strict.t
560 563 test-strip-cross.t
561 564 test-strip.t
562 565 test-subrepo-deep-nested-change.t
563 566 test-subrepo-missing.t
564 567 test-subrepo-paths.t
565 568 test-subrepo-recursion.t
566 569 test-subrepo-relative-path.t
567 570 test-subrepo.t
568 571 test-symlink-os-yes-fs-no.py
569 572 test-symlink-placeholder.t
570 573 test-symlinks.t
571 574 test-tag.t
572 575 test-tags.t
573 576 test-template-basic.t
574 577 test-template-functions.t
575 578 test-template-keywords.t
576 579 test-template-map.t
577 580 test-tools.t
578 581 test-transplant.t
579 582 test-treemanifest.t
580 583 test-ui-color.py
581 584 test-ui-config.py
582 585 test-ui-verbosity.py
583 586 test-unamend.t
584 587 test-unbundlehash.t
585 588 test-uncommit.t
586 589 test-unified-test.t
587 590 test-unionrepo.t
588 591 test-unrelated-pull.t
589 592 test-up-local-change.t
590 593 test-update-branches.t
591 594 test-update-dest.t
592 595 test-update-issue1456.t
593 596 test-update-names.t
594 597 test-update-reverse.t
595 598 test-upgrade-repo.t
596 599 test-url-download.t
597 600 test-url-rev.t
598 601 test-url.py
599 602 test-username-newline.t
600 603 test-util.py
601 604 test-verify.t
602 605 test-walk.t
603 606 test-walkrepo.py
604 607 test-websub.t
605 608 test-win32text.t
606 609 test-wireproto-clientreactor.py
607 610 test-wireproto-command-branchmap.t
608 611 test-wireproto-command-changesetdata.t
609 612 test-wireproto-command-filedata.t
610 613 test-wireproto-command-filesdata.t
611 614 test-wireproto-command-heads.t
612 615 test-wireproto-command-listkeys.t
613 616 test-wireproto-command-lookup.t
614 617 test-wireproto-command-manifestdata.t
615 618 test-wireproto-command-pushkey.t
616 619 test-wireproto-framing.py
617 620 test-wireproto-serverreactor.py
618 621 test-wireproto.py
619 622 test-wsgirequest.py
620 623 test-xdg.t
@@ -1,1187 +1,1186 b''
1 1 # Infinite push
2 2 #
3 3 # Copyright 2016 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """ store some pushes in a remote blob store on the server (EXPERIMENTAL)
8 8
9 9 [infinitepush]
10 10 # Server-side and client-side option. Pattern of the infinitepush bookmark
11 11 branchpattern = PATTERN
12 12
13 13 # Server or client
14 14 server = False
15 15
16 16 # Server-side option. Possible values: 'disk' or 'sql'. Fails if not set
17 17 indextype = disk
18 18
19 19 # Server-side option. Used only if indextype=sql.
20 20 # Format: 'IP:PORT:DB_NAME:USER:PASSWORD'
21 21 sqlhost = IP:PORT:DB_NAME:USER:PASSWORD
22 22
23 23 # Server-side option. Used only if indextype=disk.
24 24 # Filesystem path to the index store
25 25 indexpath = PATH
26 26
27 27 # Server-side option. Possible values: 'disk' or 'external'
28 28 # Fails if not set
29 29 storetype = disk
30 30
31 31 # Server-side option.
32 32 # Path to the binary that will save bundle to the bundlestore
33 33 # Formatted cmd line will be passed to it (see `put_args`)
34 34 put_binary = put
35 35
36 36 # Serser-side option. Used only if storetype=external.
37 37 # Format cmd-line string for put binary. Placeholder: {filename}
38 38 put_args = {filename}
39 39
40 40 # Server-side option.
41 41 # Path to the binary that get bundle from the bundlestore.
42 42 # Formatted cmd line will be passed to it (see `get_args`)
43 43 get_binary = get
44 44
45 45 # Serser-side option. Used only if storetype=external.
46 46 # Format cmd-line string for get binary. Placeholders: {filename} {handle}
47 47 get_args = {filename} {handle}
48 48
49 49 # Server-side option
50 50 logfile = FIlE
51 51
52 52 # Server-side option
53 53 loglevel = DEBUG
54 54
55 55 # Server-side option. Used only if indextype=sql.
56 56 # Sets mysql wait_timeout option.
57 57 waittimeout = 300
58 58
59 59 # Server-side option. Used only if indextype=sql.
60 60 # Sets mysql innodb_lock_wait_timeout option.
61 61 locktimeout = 120
62 62
63 63 # Server-side option. Used only if indextype=sql.
64 64 # Name of the repository
65 65 reponame = ''
66 66
67 67 # Client-side option. Used by --list-remote option. List of remote scratch
68 68 # patterns to list if no patterns are specified.
69 69 defaultremotepatterns = ['*']
70 70
71 71 # Instructs infinitepush to forward all received bundle2 parts to the
72 72 # bundle for storage. Defaults to False.
73 73 storeallparts = True
74 74
75 75 # routes each incoming push to the bundlestore. defaults to False
76 76 pushtobundlestore = True
77 77
78 78 [remotenames]
79 79 # Client-side option
80 80 # This option should be set only if remotenames extension is enabled.
81 81 # Whether remote bookmarks are tracked by remotenames extension.
82 82 bookmarks = True
83 83 """
84 84
85 85 from __future__ import absolute_import
86 86
87 87 import collections
88 88 import contextlib
89 89 import errno
90 90 import functools
91 91 import logging
92 92 import os
93 93 import random
94 94 import re
95 95 import socket
96 96 import subprocess
97 97 import time
98 98
99 99 from mercurial.node import (
100 100 bin,
101 101 hex,
102 102 )
103 103
104 104 from mercurial.i18n import _
105 105
106 106 from mercurial.utils import (
107 107 procutil,
108 108 stringutil,
109 109 )
110 110
111 111 from mercurial import (
112 112 bundle2,
113 113 changegroup,
114 114 commands,
115 115 discovery,
116 116 encoding,
117 117 error,
118 118 exchange,
119 119 extensions,
120 120 hg,
121 121 localrepo,
122 122 phases,
123 123 pushkey,
124 124 pycompat,
125 125 registrar,
126 126 util,
127 127 wireprototypes,
128 128 wireprotov1peer,
129 129 wireprotov1server,
130 130 )
131 131
132 132 from . import (
133 133 bundleparts,
134 134 common,
135 135 )
136 136
137 137 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
138 138 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
139 139 # be specifying the version(s) of Mercurial they are tested with, or
140 140 # leave the attribute unspecified.
141 141 testedwith = 'ships-with-hg-core'
142 142
143 143 configtable = {}
144 144 configitem = registrar.configitem(configtable)
145 145
146 146 configitem('infinitepush', 'server',
147 147 default=False,
148 148 )
149 149 configitem('infinitepush', 'storetype',
150 150 default='',
151 151 )
152 152 configitem('infinitepush', 'indextype',
153 153 default='',
154 154 )
155 155 configitem('infinitepush', 'indexpath',
156 156 default='',
157 157 )
158 158 configitem('infinitepush', 'storeallparts',
159 159 default=False,
160 160 )
161 161 configitem('infinitepush', 'reponame',
162 162 default='',
163 163 )
164 164 configitem('scratchbranch', 'storepath',
165 165 default='',
166 166 )
167 167 configitem('infinitepush', 'branchpattern',
168 168 default='',
169 169 )
170 170 configitem('infinitepush', 'pushtobundlestore',
171 171 default=False,
172 172 )
173 173 configitem('experimental', 'server-bundlestore-bookmark',
174 174 default='',
175 175 )
176 176 configitem('experimental', 'infinitepush-scratchpush',
177 177 default=False,
178 178 )
179 179
180 180 experimental = 'experimental'
181 181 configbookmark = 'server-bundlestore-bookmark'
182 182 configscratchpush = 'infinitepush-scratchpush'
183 183
184 184 scratchbranchparttype = bundleparts.scratchbranchparttype
185 185 revsetpredicate = registrar.revsetpredicate()
186 186 templatekeyword = registrar.templatekeyword()
187 187 _scratchbranchmatcher = lambda x: False
188 188 _maybehash = re.compile(r'^[a-f0-9]+$').search
189 189
190 190 def _buildexternalbundlestore(ui):
191 191 put_args = ui.configlist('infinitepush', 'put_args', [])
192 192 put_binary = ui.config('infinitepush', 'put_binary')
193 193 if not put_binary:
194 194 raise error.Abort('put binary is not specified')
195 195 get_args = ui.configlist('infinitepush', 'get_args', [])
196 196 get_binary = ui.config('infinitepush', 'get_binary')
197 197 if not get_binary:
198 198 raise error.Abort('get binary is not specified')
199 199 from . import store
200 200 return store.externalbundlestore(put_binary, put_args, get_binary, get_args)
201 201
202 202 def _buildsqlindex(ui):
203 203 sqlhost = ui.config('infinitepush', 'sqlhost')
204 204 if not sqlhost:
205 205 raise error.Abort(_('please set infinitepush.sqlhost'))
206 206 host, port, db, user, password = sqlhost.split(':')
207 207 reponame = ui.config('infinitepush', 'reponame')
208 208 if not reponame:
209 209 raise error.Abort(_('please set infinitepush.reponame'))
210 210
211 211 logfile = ui.config('infinitepush', 'logfile', '')
212 212 waittimeout = ui.configint('infinitepush', 'waittimeout', 300)
213 213 locktimeout = ui.configint('infinitepush', 'locktimeout', 120)
214 214 from . import sqlindexapi
215 215 return sqlindexapi.sqlindexapi(
216 216 reponame, host, port, db, user, password,
217 217 logfile, _getloglevel(ui), waittimeout=waittimeout,
218 218 locktimeout=locktimeout)
219 219
220 220 def _getloglevel(ui):
221 221 loglevel = ui.config('infinitepush', 'loglevel', 'DEBUG')
222 222 numeric_loglevel = getattr(logging, loglevel.upper(), None)
223 223 if not isinstance(numeric_loglevel, int):
224 224 raise error.Abort(_('invalid log level %s') % loglevel)
225 225 return numeric_loglevel
226 226
227 227 def _tryhoist(ui, remotebookmark):
228 228 '''returns a bookmarks with hoisted part removed
229 229
230 230 Remotenames extension has a 'hoist' config that allows to use remote
231 231 bookmarks without specifying remote path. For example, 'hg update master'
232 232 works as well as 'hg update remote/master'. We want to allow the same in
233 233 infinitepush.
234 234 '''
235 235
236 236 if common.isremotebooksenabled(ui):
237 237 hoist = ui.config('remotenames', 'hoistedpeer') + '/'
238 238 if remotebookmark.startswith(hoist):
239 239 return remotebookmark[len(hoist):]
240 240 return remotebookmark
241 241
242 242 class bundlestore(object):
243 243 def __init__(self, repo):
244 244 self._repo = repo
245 245 storetype = self._repo.ui.config('infinitepush', 'storetype')
246 246 if storetype == 'disk':
247 247 from . import store
248 248 self.store = store.filebundlestore(self._repo.ui, self._repo)
249 249 elif storetype == 'external':
250 250 self.store = _buildexternalbundlestore(self._repo.ui)
251 251 else:
252 252 raise error.Abort(
253 253 _('unknown infinitepush store type specified %s') % storetype)
254 254
255 255 indextype = self._repo.ui.config('infinitepush', 'indextype')
256 256 if indextype == 'disk':
257 257 from . import fileindexapi
258 258 self.index = fileindexapi.fileindexapi(self._repo)
259 259 elif indextype == 'sql':
260 260 self.index = _buildsqlindex(self._repo.ui)
261 261 else:
262 262 raise error.Abort(
263 263 _('unknown infinitepush index type specified %s') % indextype)
264 264
265 265 def _isserver(ui):
266 266 return ui.configbool('infinitepush', 'server')
267 267
268 268 def reposetup(ui, repo):
269 269 if _isserver(ui) and repo.local():
270 270 repo.bundlestore = bundlestore(repo)
271 271
272 272 def extsetup(ui):
273 273 commonsetup(ui)
274 274 if _isserver(ui):
275 275 serverextsetup(ui)
276 276 else:
277 277 clientextsetup(ui)
278 278
279 279 def commonsetup(ui):
280 280 wireprotov1server.commands['listkeyspatterns'] = (
281 281 wireprotolistkeyspatterns, 'namespace patterns')
282 282 scratchbranchpat = ui.config('infinitepush', 'branchpattern')
283 283 if scratchbranchpat:
284 284 global _scratchbranchmatcher
285 285 kind, pat, _scratchbranchmatcher = \
286 286 stringutil.stringmatcher(scratchbranchpat)
287 287
288 288 def serverextsetup(ui):
289 289 origpushkeyhandler = bundle2.parthandlermapping['pushkey']
290 290
291 291 def newpushkeyhandler(*args, **kwargs):
292 292 bundle2pushkey(origpushkeyhandler, *args, **kwargs)
293 293 newpushkeyhandler.params = origpushkeyhandler.params
294 294 bundle2.parthandlermapping['pushkey'] = newpushkeyhandler
295 295
296 296 orighandlephasehandler = bundle2.parthandlermapping['phase-heads']
297 297 newphaseheadshandler = lambda *args, **kwargs: \
298 298 bundle2handlephases(orighandlephasehandler, *args, **kwargs)
299 299 newphaseheadshandler.params = orighandlephasehandler.params
300 300 bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler
301 301
302 302 extensions.wrapfunction(localrepo.localrepository, 'listkeys',
303 303 localrepolistkeys)
304 304 wireprotov1server.commands['lookup'] = (
305 305 _lookupwrap(wireprotov1server.commands['lookup'][0]), 'key')
306 306 extensions.wrapfunction(exchange, 'getbundlechunks', getbundlechunks)
307 307
308 308 extensions.wrapfunction(bundle2, 'processparts', processparts)
309 309
310 310 def clientextsetup(ui):
311 311 entry = extensions.wrapcommand(commands.table, 'push', _push)
312 312
313 313 entry[1].append(
314 314 ('', 'bundle-store', None,
315 315 _('force push to go to bundle store (EXPERIMENTAL)')))
316 316
317 317 extensions.wrapcommand(commands.table, 'pull', _pull)
318 318
319 319 extensions.wrapfunction(discovery, 'checkheads', _checkheads)
320 320
321 321 wireprotov1peer.wirepeer.listkeyspatterns = listkeyspatterns
322 322
323 323 partorder = exchange.b2partsgenorder
324 324 index = partorder.index('changeset')
325 325 partorder.insert(
326 326 index, partorder.pop(partorder.index(scratchbranchparttype)))
327 327
328 328 def _checkheads(orig, pushop):
329 329 if pushop.ui.configbool(experimental, configscratchpush, False):
330 330 return
331 331 return orig(pushop)
332 332
333 333 def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
334 334 patterns = wireprototypes.decodelist(patterns)
335 335 d = repo.listkeys(encoding.tolocal(namespace), patterns).iteritems()
336 336 return pushkey.encodekeys(d)
337 337
338 338 def localrepolistkeys(orig, self, namespace, patterns=None):
339 339 if namespace == 'bookmarks' and patterns:
340 340 index = self.bundlestore.index
341 341 results = {}
342 342 bookmarks = orig(self, namespace)
343 343 for pattern in patterns:
344 344 results.update(index.getbookmarks(pattern))
345 345 if pattern.endswith('*'):
346 346 pattern = 're:^' + pattern[:-1] + '.*'
347 347 kind, pat, matcher = stringutil.stringmatcher(pattern)
348 348 for bookmark, node in bookmarks.iteritems():
349 349 if matcher(bookmark):
350 350 results[bookmark] = node
351 351 return results
352 352 else:
353 353 return orig(self, namespace)
354 354
355 355 @wireprotov1peer.batchable
356 356 def listkeyspatterns(self, namespace, patterns):
357 357 if not self.capable('pushkey'):
358 358 yield {}, None
359 359 f = wireprotov1peer.future()
360 self.ui.debug('preparing listkeys for "%s" with pattern "%s"\n' %
361 (namespace, patterns))
360 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
362 361 yield {
363 362 'namespace': encoding.fromlocal(namespace),
364 363 'patterns': wireprototypes.encodelist(patterns)
365 364 }, f
366 365 d = f.value
367 366 self.ui.debug('received listkey for "%s": %i bytes\n'
368 367 % (namespace, len(d)))
369 368 yield pushkey.decodekeys(d)
370 369
371 370 def _readbundlerevs(bundlerepo):
372 371 return list(bundlerepo.revs('bundle()'))
373 372
374 373 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
375 374 '''Tells remotefilelog to include all changed files to the changegroup
376 375
377 376 By default remotefilelog doesn't include file content to the changegroup.
378 377 But we need to include it if we are fetching from bundlestore.
379 378 '''
380 379 changedfiles = set()
381 380 cl = bundlerepo.changelog
382 381 for r in bundlerevs:
383 382 # [3] means changed files
384 383 changedfiles.update(cl.read(r)[3])
385 384 if not changedfiles:
386 385 return bundlecaps
387 386
388 387 changedfiles = '\0'.join(changedfiles)
389 388 newcaps = []
390 389 appended = False
391 390 for cap in (bundlecaps or []):
392 391 if cap.startswith('excludepattern='):
393 392 newcaps.append('\0'.join((cap, changedfiles)))
394 393 appended = True
395 394 else:
396 395 newcaps.append(cap)
397 396 if not appended:
398 397 # Not found excludepattern cap. Just append it
399 398 newcaps.append('excludepattern=' + changedfiles)
400 399
401 400 return newcaps
402 401
403 402 def _rebundle(bundlerepo, bundleroots, unknownhead):
404 403 '''
405 404 Bundle may include more revision then user requested. For example,
406 405 if user asks for revision but bundle also consists its descendants.
407 406 This function will filter out all revision that user is not requested.
408 407 '''
409 408 parts = []
410 409
411 410 version = '02'
412 411 outgoing = discovery.outgoing(bundlerepo, commonheads=bundleroots,
413 412 missingheads=[unknownhead])
414 413 cgstream = changegroup.makestream(bundlerepo, outgoing, version, 'pull')
415 414 cgstream = util.chunkbuffer(cgstream).read()
416 415 cgpart = bundle2.bundlepart('changegroup', data=cgstream)
417 416 cgpart.addparam('version', version)
418 417 parts.append(cgpart)
419 418
420 419 return parts
421 420
422 421 def _getbundleroots(oldrepo, bundlerepo, bundlerevs):
423 422 cl = bundlerepo.changelog
424 423 bundleroots = []
425 424 for rev in bundlerevs:
426 425 node = cl.node(rev)
427 426 parents = cl.parents(node)
428 427 for parent in parents:
429 428 # include all revs that exist in the main repo
430 429 # to make sure that bundle may apply client-side
431 430 if parent in oldrepo:
432 431 bundleroots.append(parent)
433 432 return bundleroots
434 433
435 434 def _needsrebundling(head, bundlerepo):
436 435 bundleheads = list(bundlerepo.revs('heads(bundle())'))
437 436 return not (len(bundleheads) == 1 and
438 437 bundlerepo[bundleheads[0]].node() == head)
439 438
440 439 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
441 440 '''generates bundle that will be send to the user
442 441
443 442 returns tuple with raw bundle string and bundle type
444 443 '''
445 444 parts = []
446 445 if not _needsrebundling(head, bundlerepo):
447 446 with util.posixfile(bundlefile, "rb") as f:
448 447 unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile)
449 448 if isinstance(unbundler, changegroup.cg1unpacker):
450 449 part = bundle2.bundlepart('changegroup',
451 450 data=unbundler._stream.read())
452 451 part.addparam('version', '01')
453 452 parts.append(part)
454 453 elif isinstance(unbundler, bundle2.unbundle20):
455 454 haschangegroup = False
456 455 for part in unbundler.iterparts():
457 456 if part.type == 'changegroup':
458 457 haschangegroup = True
459 458 newpart = bundle2.bundlepart(part.type, data=part.read())
460 459 for key, value in part.params.iteritems():
461 460 newpart.addparam(key, value)
462 461 parts.append(newpart)
463 462
464 463 if not haschangegroup:
465 464 raise error.Abort(
466 465 'unexpected bundle without changegroup part, ' +
467 466 'head: %s' % hex(head),
468 467 hint='report to administrator')
469 468 else:
470 469 raise error.Abort('unknown bundle type')
471 470 else:
472 471 parts = _rebundle(bundlerepo, bundleroots, head)
473 472
474 473 return parts
475 474
476 475 def getbundlechunks(orig, repo, source, heads=None, bundlecaps=None, **kwargs):
477 476 heads = heads or []
478 477 # newheads are parents of roots of scratch bundles that were requested
479 478 newphases = {}
480 479 scratchbundles = []
481 480 newheads = []
482 481 scratchheads = []
483 482 nodestobundle = {}
484 483 allbundlestocleanup = []
485 484 try:
486 485 for head in heads:
487 486 if head not in repo.changelog.nodemap:
488 487 if head not in nodestobundle:
489 488 newbundlefile = common.downloadbundle(repo, head)
490 489 bundlepath = "bundle:%s+%s" % (repo.root, newbundlefile)
491 490 bundlerepo = hg.repository(repo.ui, bundlepath)
492 491
493 492 allbundlestocleanup.append((bundlerepo, newbundlefile))
494 493 bundlerevs = set(_readbundlerevs(bundlerepo))
495 494 bundlecaps = _includefilelogstobundle(
496 495 bundlecaps, bundlerepo, bundlerevs, repo.ui)
497 496 cl = bundlerepo.changelog
498 497 bundleroots = _getbundleroots(repo, bundlerepo, bundlerevs)
499 498 for rev in bundlerevs:
500 499 node = cl.node(rev)
501 500 newphases[hex(node)] = str(phases.draft)
502 501 nodestobundle[node] = (bundlerepo, bundleroots,
503 502 newbundlefile)
504 503
505 504 scratchbundles.append(
506 505 _generateoutputparts(head, *nodestobundle[head]))
507 506 newheads.extend(bundleroots)
508 507 scratchheads.append(head)
509 508 finally:
510 509 for bundlerepo, bundlefile in allbundlestocleanup:
511 510 bundlerepo.close()
512 511 try:
513 512 os.unlink(bundlefile)
514 513 except (IOError, OSError):
515 514 # if we can't cleanup the file then just ignore the error,
516 515 # no need to fail
517 516 pass
518 517
519 518 pullfrombundlestore = bool(scratchbundles)
520 519 wrappedchangegrouppart = False
521 520 wrappedlistkeys = False
522 521 oldchangegrouppart = exchange.getbundle2partsmapping['changegroup']
523 522 try:
524 523 def _changegrouppart(bundler, *args, **kwargs):
525 524 # Order is important here. First add non-scratch part
526 525 # and only then add parts with scratch bundles because
527 526 # non-scratch part contains parents of roots of scratch bundles.
528 527 result = oldchangegrouppart(bundler, *args, **kwargs)
529 528 for bundle in scratchbundles:
530 529 for part in bundle:
531 530 bundler.addpart(part)
532 531 return result
533 532
534 533 exchange.getbundle2partsmapping['changegroup'] = _changegrouppart
535 534 wrappedchangegrouppart = True
536 535
537 536 def _listkeys(orig, self, namespace):
538 537 origvalues = orig(self, namespace)
539 538 if namespace == 'phases' and pullfrombundlestore:
540 539 if origvalues.get('publishing') == 'True':
541 540 # Make repo non-publishing to preserve draft phase
542 541 del origvalues['publishing']
543 542 origvalues.update(newphases)
544 543 return origvalues
545 544
546 545 extensions.wrapfunction(localrepo.localrepository, 'listkeys',
547 546 _listkeys)
548 547 wrappedlistkeys = True
549 548 heads = list((set(newheads) | set(heads)) - set(scratchheads))
550 549 result = orig(repo, source, heads=heads,
551 550 bundlecaps=bundlecaps, **kwargs)
552 551 finally:
553 552 if wrappedchangegrouppart:
554 553 exchange.getbundle2partsmapping['changegroup'] = oldchangegrouppart
555 554 if wrappedlistkeys:
556 555 extensions.unwrapfunction(localrepo.localrepository, 'listkeys',
557 556 _listkeys)
558 557 return result
559 558
560 559 def _lookupwrap(orig):
561 560 def _lookup(repo, proto, key):
562 561 localkey = encoding.tolocal(key)
563 562
564 563 if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
565 564 scratchnode = repo.bundlestore.index.getnode(localkey)
566 565 if scratchnode:
567 566 return "%d %s\n" % (1, scratchnode)
568 567 else:
569 568 return "%d %s\n" % (0, 'scratch branch %s not found' % localkey)
570 569 else:
571 570 try:
572 571 r = hex(repo.lookup(localkey))
573 572 return "%d %s\n" % (1, r)
574 573 except Exception as inst:
575 574 if repo.bundlestore.index.getbundle(localkey):
576 575 return "%d %s\n" % (1, localkey)
577 576 else:
578 577 r = stringutil.forcebytestr(inst)
579 578 return "%d %s\n" % (0, r)
580 579 return _lookup
581 580
582 581 def _pull(orig, ui, repo, source="default", **opts):
583 582 opts = pycompat.byteskwargs(opts)
584 583 # Copy paste from `pull` command
585 584 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
586 585
587 586 scratchbookmarks = {}
588 587 unfi = repo.unfiltered()
589 588 unknownnodes = []
590 589 for rev in opts.get('rev', []):
591 590 if rev not in unfi:
592 591 unknownnodes.append(rev)
593 592 if opts.get('bookmark'):
594 593 bookmarks = []
595 594 revs = opts.get('rev') or []
596 595 for bookmark in opts.get('bookmark'):
597 596 if _scratchbranchmatcher(bookmark):
598 597 # rev is not known yet
599 598 # it will be fetched with listkeyspatterns next
600 599 scratchbookmarks[bookmark] = 'REVTOFETCH'
601 600 else:
602 601 bookmarks.append(bookmark)
603 602
604 603 if scratchbookmarks:
605 604 other = hg.peer(repo, opts, source)
606 605 fetchedbookmarks = other.listkeyspatterns(
607 606 'bookmarks', patterns=scratchbookmarks)
608 607 for bookmark in scratchbookmarks:
609 608 if bookmark not in fetchedbookmarks:
610 609 raise error.Abort('remote bookmark %s not found!' %
611 610 bookmark)
612 611 scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
613 612 revs.append(fetchedbookmarks[bookmark])
614 613 opts['bookmark'] = bookmarks
615 614 opts['rev'] = revs
616 615
617 616 if scratchbookmarks or unknownnodes:
618 617 # Set anyincoming to True
619 618 extensions.wrapfunction(discovery, 'findcommonincoming',
620 619 _findcommonincoming)
621 620 try:
622 621 # Remote scratch bookmarks will be deleted because remotenames doesn't
623 622 # know about them. Let's save it before pull and restore after
624 623 remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, source)
625 624 result = orig(ui, repo, source, **pycompat.strkwargs(opts))
626 625 # TODO(stash): race condition is possible
627 626 # if scratch bookmarks was updated right after orig.
628 627 # But that's unlikely and shouldn't be harmful.
629 628 if common.isremotebooksenabled(ui):
630 629 remotescratchbookmarks.update(scratchbookmarks)
631 630 _saveremotebookmarks(repo, remotescratchbookmarks, source)
632 631 else:
633 632 _savelocalbookmarks(repo, scratchbookmarks)
634 633 return result
635 634 finally:
636 635 if scratchbookmarks:
637 636 extensions.unwrapfunction(discovery, 'findcommonincoming')
638 637
639 638 def _readscratchremotebookmarks(ui, repo, other):
640 639 if common.isremotebooksenabled(ui):
641 640 remotenamesext = extensions.find('remotenames')
642 641 remotepath = remotenamesext.activepath(repo.ui, other)
643 642 result = {}
644 643 # Let's refresh remotenames to make sure we have it up to date
645 644 # Seems that `repo.names['remotebookmarks']` may return stale bookmarks
646 645 # and it results in deleting scratch bookmarks. Our best guess how to
647 646 # fix it is to use `clearnames()`
648 647 repo._remotenames.clearnames()
649 648 for remotebookmark in repo.names['remotebookmarks'].listnames(repo):
650 649 path, bookname = remotenamesext.splitremotename(remotebookmark)
651 650 if path == remotepath and _scratchbranchmatcher(bookname):
652 651 nodes = repo.names['remotebookmarks'].nodes(repo,
653 652 remotebookmark)
654 653 if nodes:
655 654 result[bookname] = hex(nodes[0])
656 655 return result
657 656 else:
658 657 return {}
659 658
660 659 def _saveremotebookmarks(repo, newbookmarks, remote):
661 660 remotenamesext = extensions.find('remotenames')
662 661 remotepath = remotenamesext.activepath(repo.ui, remote)
663 662 branches = collections.defaultdict(list)
664 663 bookmarks = {}
665 664 remotenames = remotenamesext.readremotenames(repo)
666 665 for hexnode, nametype, remote, rname in remotenames:
667 666 if remote != remotepath:
668 667 continue
669 668 if nametype == 'bookmarks':
670 669 if rname in newbookmarks:
671 670 # It's possible if we have a normal bookmark that matches
672 671 # scratch branch pattern. In this case just use the current
673 672 # bookmark node
674 673 del newbookmarks[rname]
675 674 bookmarks[rname] = hexnode
676 675 elif nametype == 'branches':
677 676 # saveremotenames expects 20 byte binary nodes for branches
678 677 branches[rname].append(bin(hexnode))
679 678
680 679 for bookmark, hexnode in newbookmarks.iteritems():
681 680 bookmarks[bookmark] = hexnode
682 681 remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks)
683 682
684 683 def _savelocalbookmarks(repo, bookmarks):
685 684 if not bookmarks:
686 685 return
687 686 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
688 687 changes = []
689 688 for scratchbook, node in bookmarks.iteritems():
690 689 changectx = repo[node]
691 690 changes.append((scratchbook, changectx.node()))
692 691 repo._bookmarks.applychanges(repo, tr, changes)
693 692
694 693 def _findcommonincoming(orig, *args, **kwargs):
695 694 common, inc, remoteheads = orig(*args, **kwargs)
696 695 return common, True, remoteheads
697 696
698 697 def _push(orig, ui, repo, dest=None, *args, **opts):
699
700 bookmark = opts.get(r'bookmark')
698 opts = pycompat.byteskwargs(opts)
699 bookmark = opts.get('bookmark')
701 700 # we only support pushing one infinitepush bookmark at once
702 701 if len(bookmark) == 1:
703 702 bookmark = bookmark[0]
704 703 else:
705 704 bookmark = ''
706 705
707 706 oldphasemove = None
708 707 overrides = {(experimental, configbookmark): bookmark}
709 708
710 709 with ui.configoverride(overrides, 'infinitepush'):
711 710 scratchpush = opts.get('bundle_store')
712 711 if _scratchbranchmatcher(bookmark):
713 712 scratchpush = True
714 713 # bundle2 can be sent back after push (for example, bundle2
715 714 # containing `pushkey` part to update bookmarks)
716 715 ui.setconfig(experimental, 'bundle2.pushback', True)
717 716
718 717 if scratchpush:
719 718 # this is an infinitepush, we don't want the bookmark to be applied
720 719 # rather that should be stored in the bundlestore
721 opts[r'bookmark'] = []
720 opts['bookmark'] = []
722 721 ui.setconfig(experimental, configscratchpush, True)
723 722 oldphasemove = extensions.wrapfunction(exchange,
724 723 '_localphasemove',
725 724 _phasemove)
726 725 # Copy-paste from `push` command
727 726 path = ui.paths.getpath(dest, default=('default-push', 'default'))
728 727 if not path:
729 728 raise error.Abort(_('default repository not configured!'),
730 729 hint=_("see 'hg help config.paths'"))
731 730 destpath = path.pushloc or path.loc
732 731 # Remote scratch bookmarks will be deleted because remotenames doesn't
733 732 # know about them. Let's save it before push and restore after
734 733 remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath)
735 result = orig(ui, repo, dest, *args, **opts)
734 result = orig(ui, repo, dest, *args, **pycompat.strkwargs(opts))
736 735 if common.isremotebooksenabled(ui):
737 736 if bookmark and scratchpush:
738 737 other = hg.peer(repo, opts, destpath)
739 738 fetchedbookmarks = other.listkeyspatterns('bookmarks',
740 739 patterns=[bookmark])
741 740 remotescratchbookmarks.update(fetchedbookmarks)
742 741 _saveremotebookmarks(repo, remotescratchbookmarks, destpath)
743 742 if oldphasemove:
744 743 exchange._localphasemove = oldphasemove
745 744 return result
746 745
747 746 def _deleteinfinitepushbookmarks(ui, repo, path, names):
748 747 """Prune remote names by removing the bookmarks we don't want anymore,
749 748 then writing the result back to disk
750 749 """
751 750 remotenamesext = extensions.find('remotenames')
752 751
753 752 # remotename format is:
754 753 # (node, nametype ("branches" or "bookmarks"), remote, name)
755 754 nametype_idx = 1
756 755 remote_idx = 2
757 756 name_idx = 3
758 757 remotenames = [remotename for remotename in \
759 758 remotenamesext.readremotenames(repo) \
760 759 if remotename[remote_idx] == path]
761 760 remote_bm_names = [remotename[name_idx] for remotename in \
762 761 remotenames if remotename[nametype_idx] == "bookmarks"]
763 762
764 763 for name in names:
765 764 if name not in remote_bm_names:
766 765 raise error.Abort(_("infinitepush bookmark '{}' does not exist "
767 766 "in path '{}'").format(name, path))
768 767
769 768 bookmarks = {}
770 769 branches = collections.defaultdict(list)
771 770 for node, nametype, remote, name in remotenames:
772 771 if nametype == "bookmarks" and name not in names:
773 772 bookmarks[name] = node
774 773 elif nametype == "branches":
775 774 # saveremotenames wants binary nodes for branches
776 775 branches[name].append(bin(node))
777 776
778 777 remotenamesext.saveremotenames(repo, path, branches, bookmarks)
779 778
780 779 def _phasemove(orig, pushop, nodes, phase=phases.public):
781 780 """prevent commits from being marked public
782 781
783 782 Since these are going to a scratch branch, they aren't really being
784 783 published."""
785 784
786 785 if phase != phases.public:
787 786 orig(pushop, nodes, phase)
788 787
789 788 @exchange.b2partsgenerator(scratchbranchparttype)
790 789 def partgen(pushop, bundler):
791 790 bookmark = pushop.ui.config(experimental, configbookmark)
792 791 scratchpush = pushop.ui.configbool(experimental, configscratchpush)
793 792 if 'changesets' in pushop.stepsdone or not scratchpush:
794 793 return
795 794
796 795 if scratchbranchparttype not in bundle2.bundle2caps(pushop.remote):
797 796 return
798 797
799 798 pushop.stepsdone.add('changesets')
800 799 if not pushop.outgoing.missing:
801 800 pushop.ui.status(_('no changes found\n'))
802 801 pushop.cgresult = 0
803 802 return
804 803
805 804 # This parameter tells the server that the following bundle is an
806 805 # infinitepush. This let's it switch the part processing to our infinitepush
807 806 # code path.
808 807 bundler.addparam("infinitepush", "True")
809 808
810 809 scratchparts = bundleparts.getscratchbranchparts(pushop.repo,
811 810 pushop.remote,
812 811 pushop.outgoing,
813 812 pushop.ui,
814 813 bookmark)
815 814
816 815 for scratchpart in scratchparts:
817 816 bundler.addpart(scratchpart)
818 817
819 818 def handlereply(op):
820 819 # server either succeeds or aborts; no code to read
821 820 pushop.cgresult = 1
822 821
823 822 return handlereply
824 823
825 824 bundle2.capabilities[bundleparts.scratchbranchparttype] = ()
826 825
827 826 def _getrevs(bundle, oldnode, force, bookmark):
828 827 'extracts and validates the revs to be imported'
829 828 revs = [bundle[r] for r in bundle.revs('sort(bundle())')]
830 829
831 830 # new bookmark
832 831 if oldnode is None:
833 832 return revs
834 833
835 834 # Fast forward update
836 835 if oldnode in bundle and list(bundle.set('bundle() & %s::', oldnode)):
837 836 return revs
838 837
839 838 return revs
840 839
841 840 @contextlib.contextmanager
842 841 def logservicecall(logger, service, **kwargs):
843 842 start = time.time()
844 843 logger(service, eventtype='start', **kwargs)
845 844 try:
846 845 yield
847 846 logger(service, eventtype='success',
848 847 elapsedms=(time.time() - start) * 1000, **kwargs)
849 848 except Exception as e:
850 849 logger(service, eventtype='failure',
851 850 elapsedms=(time.time() - start) * 1000, errormsg=str(e),
852 851 **kwargs)
853 852 raise
854 853
855 854 def _getorcreateinfinitepushlogger(op):
856 855 logger = op.records['infinitepushlogger']
857 856 if not logger:
858 857 ui = op.repo.ui
859 858 try:
860 859 username = procutil.getuser()
861 860 except Exception:
862 861 username = 'unknown'
863 862 # Generate random request id to be able to find all logged entries
864 863 # for the same request. Since requestid is pseudo-generated it may
865 864 # not be unique, but we assume that (hostname, username, requestid)
866 865 # is unique.
867 866 random.seed()
868 867 requestid = random.randint(0, 2000000000)
869 868 hostname = socket.gethostname()
870 869 logger = functools.partial(ui.log, 'infinitepush', user=username,
871 870 requestid=requestid, hostname=hostname,
872 871 reponame=ui.config('infinitepush',
873 872 'reponame'))
874 873 op.records.add('infinitepushlogger', logger)
875 874 else:
876 875 logger = logger[0]
877 876 return logger
878 877
879 878 def storetobundlestore(orig, repo, op, unbundler):
880 879 """stores the incoming bundle coming from push command to the bundlestore
881 880 instead of applying on the revlogs"""
882 881
883 882 repo.ui.status(_("storing changesets on the bundlestore\n"))
884 883 bundler = bundle2.bundle20(repo.ui)
885 884
886 885 # processing each part and storing it in bundler
887 886 with bundle2.partiterator(repo, op, unbundler) as parts:
888 887 for part in parts:
889 888 bundlepart = None
890 889 if part.type == 'replycaps':
891 890 # This configures the current operation to allow reply parts.
892 891 bundle2._processpart(op, part)
893 892 else:
894 893 bundlepart = bundle2.bundlepart(part.type, data=part.read())
895 894 for key, value in part.params.iteritems():
896 895 bundlepart.addparam(key, value)
897 896
898 897 # Certain parts require a response
899 898 if part.type in ('pushkey', 'changegroup'):
900 899 if op.reply is not None:
901 900 rpart = op.reply.newpart('reply:%s' % part.type)
902 rpart.addparam('in-reply-to', str(part.id),
901 rpart.addparam('in-reply-to', b'%d' % part.id,
903 902 mandatory=False)
904 903 rpart.addparam('return', '1', mandatory=False)
905 904
906 905 op.records.add(part.type, {
907 906 'return': 1,
908 907 })
909 908 if bundlepart:
910 909 bundler.addpart(bundlepart)
911 910
912 911 # storing the bundle in the bundlestore
913 912 buf = util.chunkbuffer(bundler.getchunks())
914 913 fd, bundlefile = pycompat.mkstemp()
915 914 try:
916 915 try:
917 916 fp = os.fdopen(fd, r'wb')
918 917 fp.write(buf.read())
919 918 finally:
920 919 fp.close()
921 920 storebundle(op, {}, bundlefile)
922 921 finally:
923 922 try:
924 923 os.unlink(bundlefile)
925 924 except Exception:
926 925 # we would rather see the original exception
927 926 pass
928 927
929 928 def processparts(orig, repo, op, unbundler):
930 929
931 930 # make sure we don't wrap processparts in case of `hg unbundle`
932 931 if op.source == 'unbundle':
933 932 return orig(repo, op, unbundler)
934 933
935 934 # this server routes each push to bundle store
936 935 if repo.ui.configbool('infinitepush', 'pushtobundlestore'):
937 936 return storetobundlestore(orig, repo, op, unbundler)
938 937
939 938 if unbundler.params.get('infinitepush') != 'True':
940 939 return orig(repo, op, unbundler)
941 940
942 941 handleallparts = repo.ui.configbool('infinitepush', 'storeallparts')
943 942
944 943 bundler = bundle2.bundle20(repo.ui)
945 944 cgparams = None
946 945 with bundle2.partiterator(repo, op, unbundler) as parts:
947 946 for part in parts:
948 947 bundlepart = None
949 948 if part.type == 'replycaps':
950 949 # This configures the current operation to allow reply parts.
951 950 bundle2._processpart(op, part)
952 951 elif part.type == bundleparts.scratchbranchparttype:
953 952 # Scratch branch parts need to be converted to normal
954 953 # changegroup parts, and the extra parameters stored for later
955 954 # when we upload to the store. Eventually those parameters will
956 955 # be put on the actual bundle instead of this part, then we can
957 956 # send a vanilla changegroup instead of the scratchbranch part.
958 957 cgversion = part.params.get('cgversion', '01')
959 958 bundlepart = bundle2.bundlepart('changegroup', data=part.read())
960 959 bundlepart.addparam('version', cgversion)
961 960 cgparams = part.params
962 961
963 962 # If we're not dumping all parts into the new bundle, we need to
964 963 # alert the future pushkey and phase-heads handler to skip
965 964 # the part.
966 965 if not handleallparts:
967 966 op.records.add(scratchbranchparttype + '_skippushkey', True)
968 967 op.records.add(scratchbranchparttype + '_skipphaseheads',
969 968 True)
970 969 else:
971 970 if handleallparts:
972 971 # Ideally we would not process any parts, and instead just
973 972 # forward them to the bundle for storage, but since this
974 973 # differs from previous behavior, we need to put it behind a
975 974 # config flag for incremental rollout.
976 975 bundlepart = bundle2.bundlepart(part.type, data=part.read())
977 976 for key, value in part.params.iteritems():
978 977 bundlepart.addparam(key, value)
979 978
980 979 # Certain parts require a response
981 980 if part.type == 'pushkey':
982 981 if op.reply is not None:
983 982 rpart = op.reply.newpart('reply:pushkey')
984 983 rpart.addparam('in-reply-to', str(part.id),
985 984 mandatory=False)
986 985 rpart.addparam('return', '1', mandatory=False)
987 986 else:
988 987 bundle2._processpart(op, part)
989 988
990 989 if handleallparts:
991 990 op.records.add(part.type, {
992 991 'return': 1,
993 992 })
994 993 if bundlepart:
995 994 bundler.addpart(bundlepart)
996 995
997 996 # If commits were sent, store them
998 997 if cgparams:
999 998 buf = util.chunkbuffer(bundler.getchunks())
1000 999 fd, bundlefile = pycompat.mkstemp()
1001 1000 try:
1002 1001 try:
1003 1002 fp = os.fdopen(fd, r'wb')
1004 1003 fp.write(buf.read())
1005 1004 finally:
1006 1005 fp.close()
1007 1006 storebundle(op, cgparams, bundlefile)
1008 1007 finally:
1009 1008 try:
1010 1009 os.unlink(bundlefile)
1011 1010 except Exception:
1012 1011 # we would rather see the original exception
1013 1012 pass
1014 1013
1015 1014 def storebundle(op, params, bundlefile):
1016 1015 log = _getorcreateinfinitepushlogger(op)
1017 1016 parthandlerstart = time.time()
1018 1017 log(scratchbranchparttype, eventtype='start')
1019 1018 index = op.repo.bundlestore.index
1020 1019 store = op.repo.bundlestore.store
1021 1020 op.records.add(scratchbranchparttype + '_skippushkey', True)
1022 1021
1023 1022 bundle = None
1024 1023 try: # guards bundle
1025 1024 bundlepath = "bundle:%s+%s" % (op.repo.root, bundlefile)
1026 1025 bundle = hg.repository(op.repo.ui, bundlepath)
1027 1026
1028 1027 bookmark = params.get('bookmark')
1029 1028 bookprevnode = params.get('bookprevnode', '')
1030 1029 force = params.get('force')
1031 1030
1032 1031 if bookmark:
1033 1032 oldnode = index.getnode(bookmark)
1034 1033 else:
1035 1034 oldnode = None
1036 1035 bundleheads = bundle.revs('heads(bundle())')
1037 1036 if bookmark and len(bundleheads) > 1:
1038 1037 raise error.Abort(
1039 1038 _('cannot push more than one head to a scratch branch'))
1040 1039
1041 1040 revs = _getrevs(bundle, oldnode, force, bookmark)
1042 1041
1043 1042 # Notify the user of what is being pushed
1044 1043 plural = 's' if len(revs) > 1 else ''
1045 1044 op.repo.ui.warn(_("pushing %d commit%s:\n") % (len(revs), plural))
1046 1045 maxoutput = 10
1047 1046 for i in range(0, min(len(revs), maxoutput)):
1048 1047 firstline = bundle[revs[i]].description().split('\n')[0][:50]
1049 1048 op.repo.ui.warn((" %s %s\n") % (revs[i], firstline))
1050 1049
1051 1050 if len(revs) > maxoutput + 1:
1052 1051 op.repo.ui.warn((" ...\n"))
1053 1052 firstline = bundle[revs[-1]].description().split('\n')[0][:50]
1054 1053 op.repo.ui.warn((" %s %s\n") % (revs[-1], firstline))
1055 1054
1056 1055 nodesctx = [bundle[rev] for rev in revs]
1057 1056 inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
1058 1057 if bundleheads:
1059 1058 newheadscount = sum(not inindex(rev) for rev in bundleheads)
1060 1059 else:
1061 1060 newheadscount = 0
1062 1061 # If there's a bookmark specified, there should be only one head,
1063 1062 # so we choose the last node, which will be that head.
1064 1063 # If a bug or malicious client allows there to be a bookmark
1065 1064 # with multiple heads, we will place the bookmark on the last head.
1066 1065 bookmarknode = nodesctx[-1].hex() if nodesctx else None
1067 1066 key = None
1068 1067 if newheadscount:
1069 1068 with open(bundlefile, 'rb') as f:
1070 1069 bundledata = f.read()
1071 1070 with logservicecall(log, 'bundlestore',
1072 1071 bundlesize=len(bundledata)):
1073 1072 bundlesizelimit = 100 * 1024 * 1024 # 100 MB
1074 1073 if len(bundledata) > bundlesizelimit:
1075 1074 error_msg = ('bundle is too big: %d bytes. ' +
1076 1075 'max allowed size is 100 MB')
1077 1076 raise error.Abort(error_msg % (len(bundledata),))
1078 1077 key = store.write(bundledata)
1079 1078
1080 1079 with logservicecall(log, 'index', newheadscount=newheadscount), index:
1081 1080 if key:
1082 1081 index.addbundle(key, nodesctx)
1083 1082 if bookmark:
1084 1083 index.addbookmark(bookmark, bookmarknode)
1085 1084 _maybeaddpushbackpart(op, bookmark, bookmarknode,
1086 1085 bookprevnode, params)
1087 1086 log(scratchbranchparttype, eventtype='success',
1088 1087 elapsedms=(time.time() - parthandlerstart) * 1000)
1089 1088
1090 1089 except Exception as e:
1091 1090 log(scratchbranchparttype, eventtype='failure',
1092 1091 elapsedms=(time.time() - parthandlerstart) * 1000,
1093 1092 errormsg=str(e))
1094 1093 raise
1095 1094 finally:
1096 1095 if bundle:
1097 1096 bundle.close()
1098 1097
1099 1098 @bundle2.parthandler(scratchbranchparttype,
1100 1099 ('bookmark', 'bookprevnode', 'force',
1101 1100 'pushbackbookmarks', 'cgversion'))
1102 1101 def bundle2scratchbranch(op, part):
1103 1102 '''unbundle a bundle2 part containing a changegroup to store'''
1104 1103
1105 1104 bundler = bundle2.bundle20(op.repo.ui)
1106 1105 cgversion = part.params.get('cgversion', '01')
1107 1106 cgpart = bundle2.bundlepart('changegroup', data=part.read())
1108 1107 cgpart.addparam('version', cgversion)
1109 1108 bundler.addpart(cgpart)
1110 1109 buf = util.chunkbuffer(bundler.getchunks())
1111 1110
1112 1111 fd, bundlefile = pycompat.mkstemp()
1113 1112 try:
1114 1113 try:
1115 1114 fp = os.fdopen(fd, r'wb')
1116 1115 fp.write(buf.read())
1117 1116 finally:
1118 1117 fp.close()
1119 1118 storebundle(op, part.params, bundlefile)
1120 1119 finally:
1121 1120 try:
1122 1121 os.unlink(bundlefile)
1123 1122 except OSError as e:
1124 1123 if e.errno != errno.ENOENT:
1125 1124 raise
1126 1125
1127 1126 return 1
1128 1127
1129 1128 def _maybeaddpushbackpart(op, bookmark, newnode, oldnode, params):
1130 1129 if params.get('pushbackbookmarks'):
1131 1130 if op.reply and 'pushback' in op.reply.capabilities:
1132 1131 params = {
1133 1132 'namespace': 'bookmarks',
1134 1133 'key': bookmark,
1135 1134 'new': newnode,
1136 1135 'old': oldnode,
1137 1136 }
1138 1137 op.reply.newpart('pushkey', mandatoryparams=params.iteritems())
1139 1138
1140 1139 def bundle2pushkey(orig, op, part):
1141 1140 '''Wrapper of bundle2.handlepushkey()
1142 1141
1143 1142 The only goal is to skip calling the original function if flag is set.
1144 1143 It's set if infinitepush push is happening.
1145 1144 '''
1146 1145 if op.records[scratchbranchparttype + '_skippushkey']:
1147 1146 if op.reply is not None:
1148 1147 rpart = op.reply.newpart('reply:pushkey')
1149 1148 rpart.addparam('in-reply-to', str(part.id), mandatory=False)
1150 1149 rpart.addparam('return', '1', mandatory=False)
1151 1150 return 1
1152 1151
1153 1152 return orig(op, part)
1154 1153
1155 1154 def bundle2handlephases(orig, op, part):
1156 1155 '''Wrapper of bundle2.handlephases()
1157 1156
1158 1157 The only goal is to skip calling the original function if flag is set.
1159 1158 It's set if infinitepush push is happening.
1160 1159 '''
1161 1160
1162 1161 if op.records[scratchbranchparttype + '_skipphaseheads']:
1163 1162 return
1164 1163
1165 1164 return orig(op, part)
1166 1165
1167 1166 def _asyncsavemetadata(root, nodes):
1168 1167 '''starts a separate process that fills metadata for the nodes
1169 1168
1170 1169 This function creates a separate process and doesn't wait for it's
1171 1170 completion. This was done to avoid slowing down pushes
1172 1171 '''
1173 1172
1174 1173 maxnodes = 50
1175 1174 if len(nodes) > maxnodes:
1176 1175 return
1177 1176 nodesargs = []
1178 1177 for node in nodes:
1179 1178 nodesargs.append('--node')
1180 1179 nodesargs.append(node)
1181 1180 with open(os.devnull, 'w+b') as devnull:
1182 1181 cmdline = [util.hgexecutable(), 'debugfillinfinitepushmetadata',
1183 1182 '-R', root] + nodesargs
1184 1183 # Process will run in background. We don't care about the return code
1185 1184 subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmdline),
1186 1185 close_fds=True, shell=False,
1187 1186 stdin=devnull, stdout=devnull, stderr=devnull)
@@ -1,48 +1,48 b''
1 1 # Copyright 2017 Facebook, Inc.
2 2 #
3 3 # This software may be used and distributed according to the terms of the
4 4 # GNU General Public License version 2 or any later version.
5 5
6 6 from __future__ import absolute_import
7 7
8 8 import os
9 9
10 10 from mercurial.node import hex
11 11
12 12 from mercurial import (
13 13 error,
14 14 extensions,
15 15 pycompat,
16 16 )
17 17
18 18 def isremotebooksenabled(ui):
19 19 return ('remotenames' in extensions._extensions and
20 20 ui.configbool('remotenames', 'bookmarks'))
21 21
22 22 def downloadbundle(repo, unknownbinhead):
23 23 index = repo.bundlestore.index
24 24 store = repo.bundlestore.store
25 25 bundleid = index.getbundle(hex(unknownbinhead))
26 26 if bundleid is None:
27 27 raise error.Abort('%s head is not known' % hex(unknownbinhead))
28 28 bundleraw = store.read(bundleid)
29 29 return _makebundlefromraw(bundleraw)
30 30
31 31 def _makebundlefromraw(data):
32 32 fp = None
33 33 fd, bundlefile = pycompat.mkstemp()
34 34 try: # guards bundlefile
35 35 try: # guards fp
36 fp = os.fdopen(fd, 'wb')
36 fp = os.fdopen(fd, r'wb')
37 37 fp.write(data)
38 38 finally:
39 39 fp.close()
40 40 except Exception:
41 41 try:
42 42 os.unlink(bundlefile)
43 43 except Exception:
44 44 # we would rather see the original exception
45 45 pass
46 46 raise
47 47
48 48 return bundlefile
@@ -1,166 +1,167 b''
1 1 # This software may be used and distributed according to the terms of the
2 2 # GNU General Public License version 2 or any later version.
3 3
4 4 # based on bundleheads extension by Gregory Szorc <gps@mozilla.com>
5 5
6 6 from __future__ import absolute_import
7 7
8 8 import abc
9 9 import hashlib
10 10 import os
11 11 import subprocess
12 12 import tempfile
13 13
14 14 from mercurial import (
15 node,
15 16 pycompat,
16 17 )
17 18 from mercurial.utils import (
18 19 procutil,
19 20 )
20 21
21 22 NamedTemporaryFile = tempfile.NamedTemporaryFile
22 23
23 24 class BundleWriteException(Exception):
24 25 pass
25 26
26 27 class BundleReadException(Exception):
27 28 pass
28 29
29 30 class abstractbundlestore(object):
30 31 """Defines the interface for bundle stores.
31 32
32 33 A bundle store is an entity that stores raw bundle data. It is a simple
33 34 key-value store. However, the keys are chosen by the store. The keys can
34 35 be any Python object understood by the corresponding bundle index (see
35 36 ``abstractbundleindex`` below).
36 37 """
37 38 __metaclass__ = abc.ABCMeta
38 39
39 40 @abc.abstractmethod
40 41 def write(self, data):
41 42 """Write bundle data to the store.
42 43
43 44 This function receives the raw data to be written as a str.
44 45 Throws BundleWriteException
45 46 The key of the written data MUST be returned.
46 47 """
47 48
48 49 @abc.abstractmethod
49 50 def read(self, key):
50 51 """Obtain bundle data for a key.
51 52
52 53 Returns None if the bundle isn't known.
53 54 Throws BundleReadException
54 55 The returned object should be a file object supporting read()
55 56 and close().
56 57 """
57 58
58 59 class filebundlestore(object):
59 60 """bundle store in filesystem
60 61
61 62 meant for storing bundles somewhere on disk and on network filesystems
62 63 """
63 64 def __init__(self, ui, repo):
64 65 self.ui = ui
65 66 self.repo = repo
66 67 self.storepath = ui.configpath('scratchbranch', 'storepath')
67 68 if not self.storepath:
68 69 self.storepath = self.repo.vfs.join("scratchbranches",
69 70 "filebundlestore")
70 71 if not os.path.exists(self.storepath):
71 72 os.makedirs(self.storepath)
72 73
73 74 def _dirpath(self, hashvalue):
74 75 """First two bytes of the hash are the name of the upper
75 76 level directory, next two bytes are the name of the
76 77 next level directory"""
77 78 return os.path.join(self.storepath, hashvalue[0:2], hashvalue[2:4])
78 79
79 80 def _filepath(self, filename):
80 81 return os.path.join(self._dirpath(filename), filename)
81 82
82 83 def write(self, data):
83 filename = hashlib.sha1(data).hexdigest()
84 filename = node.hex(hashlib.sha1(data).digest())
84 85 dirpath = self._dirpath(filename)
85 86
86 87 if not os.path.exists(dirpath):
87 88 os.makedirs(dirpath)
88 89
89 90 with open(self._filepath(filename), 'wb') as f:
90 91 f.write(data)
91 92
92 93 return filename
93 94
94 95 def read(self, key):
95 96 try:
96 97 with open(self._filepath(key), 'rb') as f:
97 98 return f.read()
98 99 except IOError:
99 100 return None
100 101
101 102 class externalbundlestore(abstractbundlestore):
102 103 def __init__(self, put_binary, put_args, get_binary, get_args):
103 104 """
104 105 `put_binary` - path to binary file which uploads bundle to external
105 106 storage and prints key to stdout
106 107 `put_args` - format string with additional args to `put_binary`
107 108 {filename} replacement field can be used.
108 109 `get_binary` - path to binary file which accepts filename and key
109 110 (in that order), downloads bundle from store and saves it to file
110 111 `get_args` - format string with additional args to `get_binary`.
111 112 {filename} and {handle} replacement field can be used.
112 113 """
113 114
114 115 self.put_args = put_args
115 116 self.get_args = get_args
116 117 self.put_binary = put_binary
117 118 self.get_binary = get_binary
118 119
119 120 def _call_binary(self, args):
120 121 p = subprocess.Popen(
121 122 pycompat.rapply(procutil.tonativestr, args),
122 123 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
123 124 close_fds=True)
124 125 stdout, stderr = p.communicate()
125 126 returncode = p.returncode
126 127 return returncode, stdout, stderr
127 128
128 129 def write(self, data):
129 130 # Won't work on windows because you can't open file second time without
130 131 # closing it
131 132 # TODO: rewrite without str.format() and replace NamedTemporaryFile()
132 133 # with pycompat.namedtempfile()
133 134 with NamedTemporaryFile() as temp:
134 135 temp.write(data)
135 136 temp.flush()
136 137 temp.seek(0)
137 138 formatted_args = [arg.format(filename=temp.name)
138 139 for arg in self.put_args]
139 140 returncode, stdout, stderr = self._call_binary(
140 141 [self.put_binary] + formatted_args)
141 142
142 143 if returncode != 0:
143 144 raise BundleWriteException(
144 145 'Failed to upload to external store: %s' % stderr)
145 146 stdout_lines = stdout.splitlines()
146 147 if len(stdout_lines) == 1:
147 148 return stdout_lines[0]
148 149 else:
149 150 raise BundleWriteException(
150 151 'Bad output from %s: %s' % (self.put_binary, stdout))
151 152
152 153 def read(self, handle):
153 154 # Won't work on windows because you can't open file second time without
154 155 # closing it
155 156 # TODO: rewrite without str.format() and replace NamedTemporaryFile()
156 157 # with pycompat.namedtempfile()
157 158 with NamedTemporaryFile() as temp:
158 159 formatted_args = [arg.format(filename=temp.name, handle=handle)
159 160 for arg in self.get_args]
160 161 returncode, stdout, stderr = self._call_binary(
161 162 [self.get_binary] + formatted_args)
162 163
163 164 if returncode != 0:
164 165 raise BundleReadException(
165 166 'Failed to download from external store: %s' % stderr)
166 167 return temp.read()
General Comments 0
You need to be logged in to leave comments. Login now