##// END OF EJS Templates
bundle2: properly request phases during getbundle...
Mike Hommey -
r29064:9dc27a33 stable
parent child Browse files
Show More
@@ -1,1930 +1,1930
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11
12 12 from .i18n import _
13 13 from .node import (
14 14 hex,
15 15 nullid,
16 16 )
17 17 from . import (
18 18 base85,
19 19 bookmarks as bookmod,
20 20 bundle2,
21 21 changegroup,
22 22 discovery,
23 23 error,
24 24 lock as lockmod,
25 25 obsolete,
26 26 phases,
27 27 pushkey,
28 28 scmutil,
29 29 sslutil,
30 30 streamclone,
31 31 tags,
32 32 url as urlmod,
33 33 util,
34 34 )
35 35
36 36 urlerr = util.urlerr
37 37 urlreq = util.urlreq
38 38
39 39 # Maps bundle compression human names to internal representation.
40 40 _bundlespeccompressions = {'none': None,
41 41 'bzip2': 'BZ',
42 42 'gzip': 'GZ',
43 43 }
44 44
45 45 # Maps bundle version human names to changegroup versions.
46 46 _bundlespeccgversions = {'v1': '01',
47 47 'v2': '02',
48 48 'packed1': 's1',
49 49 'bundle2': '02', #legacy
50 50 }
51 51
52 52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
53 53 """Parse a bundle string specification into parts.
54 54
55 55 Bundle specifications denote a well-defined bundle/exchange format.
56 56 The content of a given specification should not change over time in
57 57 order to ensure that bundles produced by a newer version of Mercurial are
58 58 readable from an older version.
59 59
60 60 The string currently has the form:
61 61
62 62 <compression>-<type>[;<parameter0>[;<parameter1>]]
63 63
64 64 Where <compression> is one of the supported compression formats
65 65 and <type> is (currently) a version string. A ";" can follow the type and
66 66 all text afterwards is interpretted as URI encoded, ";" delimited key=value
67 67 pairs.
68 68
69 69 If ``strict`` is True (the default) <compression> is required. Otherwise,
70 70 it is optional.
71 71
72 72 If ``externalnames`` is False (the default), the human-centric names will
73 73 be converted to their internal representation.
74 74
75 75 Returns a 3-tuple of (compression, version, parameters). Compression will
76 76 be ``None`` if not in strict mode and a compression isn't defined.
77 77
78 78 An ``InvalidBundleSpecification`` is raised when the specification is
79 79 not syntactically well formed.
80 80
81 81 An ``UnsupportedBundleSpecification`` is raised when the compression or
82 82 bundle type/version is not recognized.
83 83
84 84 Note: this function will likely eventually return a more complex data
85 85 structure, including bundle2 part information.
86 86 """
87 87 def parseparams(s):
88 88 if ';' not in s:
89 89 return s, {}
90 90
91 91 params = {}
92 92 version, paramstr = s.split(';', 1)
93 93
94 94 for p in paramstr.split(';'):
95 95 if '=' not in p:
96 96 raise error.InvalidBundleSpecification(
97 97 _('invalid bundle specification: '
98 98 'missing "=" in parameter: %s') % p)
99 99
100 100 key, value = p.split('=', 1)
101 101 key = urlreq.unquote(key)
102 102 value = urlreq.unquote(value)
103 103 params[key] = value
104 104
105 105 return version, params
106 106
107 107
108 108 if strict and '-' not in spec:
109 109 raise error.InvalidBundleSpecification(
110 110 _('invalid bundle specification; '
111 111 'must be prefixed with compression: %s') % spec)
112 112
113 113 if '-' in spec:
114 114 compression, version = spec.split('-', 1)
115 115
116 116 if compression not in _bundlespeccompressions:
117 117 raise error.UnsupportedBundleSpecification(
118 118 _('%s compression is not supported') % compression)
119 119
120 120 version, params = parseparams(version)
121 121
122 122 if version not in _bundlespeccgversions:
123 123 raise error.UnsupportedBundleSpecification(
124 124 _('%s is not a recognized bundle version') % version)
125 125 else:
126 126 # Value could be just the compression or just the version, in which
127 127 # case some defaults are assumed (but only when not in strict mode).
128 128 assert not strict
129 129
130 130 spec, params = parseparams(spec)
131 131
132 132 if spec in _bundlespeccompressions:
133 133 compression = spec
134 134 version = 'v1'
135 135 if 'generaldelta' in repo.requirements:
136 136 version = 'v2'
137 137 elif spec in _bundlespeccgversions:
138 138 if spec == 'packed1':
139 139 compression = 'none'
140 140 else:
141 141 compression = 'bzip2'
142 142 version = spec
143 143 else:
144 144 raise error.UnsupportedBundleSpecification(
145 145 _('%s is not a recognized bundle specification') % spec)
146 146
147 147 # The specification for packed1 can optionally declare the data formats
148 148 # required to apply it. If we see this metadata, compare against what the
149 149 # repo supports and error if the bundle isn't compatible.
150 150 if version == 'packed1' and 'requirements' in params:
151 151 requirements = set(params['requirements'].split(','))
152 152 missingreqs = requirements - repo.supportedformats
153 153 if missingreqs:
154 154 raise error.UnsupportedBundleSpecification(
155 155 _('missing support for repository features: %s') %
156 156 ', '.join(sorted(missingreqs)))
157 157
158 158 if not externalnames:
159 159 compression = _bundlespeccompressions[compression]
160 160 version = _bundlespeccgversions[version]
161 161 return compression, version, params
162 162
163 163 def readbundle(ui, fh, fname, vfs=None):
164 164 header = changegroup.readexactly(fh, 4)
165 165
166 166 alg = None
167 167 if not fname:
168 168 fname = "stream"
169 169 if not header.startswith('HG') and header.startswith('\0'):
170 170 fh = changegroup.headerlessfixup(fh, header)
171 171 header = "HG10"
172 172 alg = 'UN'
173 173 elif vfs:
174 174 fname = vfs.join(fname)
175 175
176 176 magic, version = header[0:2], header[2:4]
177 177
178 178 if magic != 'HG':
179 179 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
180 180 if version == '10':
181 181 if alg is None:
182 182 alg = changegroup.readexactly(fh, 2)
183 183 return changegroup.cg1unpacker(fh, alg)
184 184 elif version.startswith('2'):
185 185 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
186 186 elif version == 'S1':
187 187 return streamclone.streamcloneapplier(fh)
188 188 else:
189 189 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
190 190
191 191 def getbundlespec(ui, fh):
192 192 """Infer the bundlespec from a bundle file handle.
193 193
194 194 The input file handle is seeked and the original seek position is not
195 195 restored.
196 196 """
197 197 def speccompression(alg):
198 198 for k, v in _bundlespeccompressions.items():
199 199 if v == alg:
200 200 return k
201 201 return None
202 202
203 203 b = readbundle(ui, fh, None)
204 204 if isinstance(b, changegroup.cg1unpacker):
205 205 alg = b._type
206 206 if alg == '_truncatedBZ':
207 207 alg = 'BZ'
208 208 comp = speccompression(alg)
209 209 if not comp:
210 210 raise error.Abort(_('unknown compression algorithm: %s') % alg)
211 211 return '%s-v1' % comp
212 212 elif isinstance(b, bundle2.unbundle20):
213 213 if 'Compression' in b.params:
214 214 comp = speccompression(b.params['Compression'])
215 215 if not comp:
216 216 raise error.Abort(_('unknown compression algorithm: %s') % comp)
217 217 else:
218 218 comp = 'none'
219 219
220 220 version = None
221 221 for part in b.iterparts():
222 222 if part.type == 'changegroup':
223 223 version = part.params['version']
224 224 if version in ('01', '02'):
225 225 version = 'v2'
226 226 else:
227 227 raise error.Abort(_('changegroup version %s does not have '
228 228 'a known bundlespec') % version,
229 229 hint=_('try upgrading your Mercurial '
230 230 'client'))
231 231
232 232 if not version:
233 233 raise error.Abort(_('could not identify changegroup version in '
234 234 'bundle'))
235 235
236 236 return '%s-%s' % (comp, version)
237 237 elif isinstance(b, streamclone.streamcloneapplier):
238 238 requirements = streamclone.readbundle1header(fh)[2]
239 239 params = 'requirements=%s' % ','.join(sorted(requirements))
240 240 return 'none-packed1;%s' % urlreq.quote(params)
241 241 else:
242 242 raise error.Abort(_('unknown bundle type: %s') % b)
243 243
244 244 def buildobsmarkerspart(bundler, markers):
245 245 """add an obsmarker part to the bundler with <markers>
246 246
247 247 No part is created if markers is empty.
248 248 Raises ValueError if the bundler doesn't support any known obsmarker format.
249 249 """
250 250 if markers:
251 251 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
252 252 version = obsolete.commonversion(remoteversions)
253 253 if version is None:
254 254 raise ValueError('bundler does not support common obsmarker format')
255 255 stream = obsolete.encodemarkers(markers, True, version=version)
256 256 return bundler.newpart('obsmarkers', data=stream)
257 257 return None
258 258
259 259 def _canusebundle2(op):
260 260 """return true if a pull/push can use bundle2
261 261
262 262 Feel free to nuke this function when we drop the experimental option"""
263 263 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
264 264 and op.remote.capable('bundle2'))
265 265
266 266
267 267 class pushoperation(object):
268 268 """A object that represent a single push operation
269 269
270 270 Its purpose is to carry push related state and very common operations.
271 271
272 272 A new pushoperation should be created at the beginning of each push and
273 273 discarded afterward.
274 274 """
275 275
276 276 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
277 277 bookmarks=()):
278 278 # repo we push from
279 279 self.repo = repo
280 280 self.ui = repo.ui
281 281 # repo we push to
282 282 self.remote = remote
283 283 # force option provided
284 284 self.force = force
285 285 # revs to be pushed (None is "all")
286 286 self.revs = revs
287 287 # bookmark explicitly pushed
288 288 self.bookmarks = bookmarks
289 289 # allow push of new branch
290 290 self.newbranch = newbranch
291 291 # did a local lock get acquired?
292 292 self.locallocked = None
293 293 # step already performed
294 294 # (used to check what steps have been already performed through bundle2)
295 295 self.stepsdone = set()
296 296 # Integer version of the changegroup push result
297 297 # - None means nothing to push
298 298 # - 0 means HTTP error
299 299 # - 1 means we pushed and remote head count is unchanged *or*
300 300 # we have outgoing changesets but refused to push
301 301 # - other values as described by addchangegroup()
302 302 self.cgresult = None
303 303 # Boolean value for the bookmark push
304 304 self.bkresult = None
305 305 # discover.outgoing object (contains common and outgoing data)
306 306 self.outgoing = None
307 307 # all remote heads before the push
308 308 self.remoteheads = None
309 309 # testable as a boolean indicating if any nodes are missing locally.
310 310 self.incoming = None
311 311 # phases changes that must be pushed along side the changesets
312 312 self.outdatedphases = None
313 313 # phases changes that must be pushed if changeset push fails
314 314 self.fallbackoutdatedphases = None
315 315 # outgoing obsmarkers
316 316 self.outobsmarkers = set()
317 317 # outgoing bookmarks
318 318 self.outbookmarks = []
319 319 # transaction manager
320 320 self.trmanager = None
321 321 # map { pushkey partid -> callback handling failure}
322 322 # used to handle exception from mandatory pushkey part failure
323 323 self.pkfailcb = {}
324 324
325 325 @util.propertycache
326 326 def futureheads(self):
327 327 """future remote heads if the changeset push succeeds"""
328 328 return self.outgoing.missingheads
329 329
330 330 @util.propertycache
331 331 def fallbackheads(self):
332 332 """future remote heads if the changeset push fails"""
333 333 if self.revs is None:
334 334 # not target to push, all common are relevant
335 335 return self.outgoing.commonheads
336 336 unfi = self.repo.unfiltered()
337 337 # I want cheads = heads(::missingheads and ::commonheads)
338 338 # (missingheads is revs with secret changeset filtered out)
339 339 #
340 340 # This can be expressed as:
341 341 # cheads = ( (missingheads and ::commonheads)
342 342 # + (commonheads and ::missingheads))"
343 343 # )
344 344 #
345 345 # while trying to push we already computed the following:
346 346 # common = (::commonheads)
347 347 # missing = ((commonheads::missingheads) - commonheads)
348 348 #
349 349 # We can pick:
350 350 # * missingheads part of common (::commonheads)
351 351 common = self.outgoing.common
352 352 nm = self.repo.changelog.nodemap
353 353 cheads = [node for node in self.revs if nm[node] in common]
354 354 # and
355 355 # * commonheads parents on missing
356 356 revset = unfi.set('%ln and parents(roots(%ln))',
357 357 self.outgoing.commonheads,
358 358 self.outgoing.missing)
359 359 cheads.extend(c.node() for c in revset)
360 360 return cheads
361 361
362 362 @property
363 363 def commonheads(self):
364 364 """set of all common heads after changeset bundle push"""
365 365 if self.cgresult:
366 366 return self.futureheads
367 367 else:
368 368 return self.fallbackheads
369 369
370 370 # mapping of message used when pushing bookmark
371 371 bookmsgmap = {'update': (_("updating bookmark %s\n"),
372 372 _('updating bookmark %s failed!\n')),
373 373 'export': (_("exporting bookmark %s\n"),
374 374 _('exporting bookmark %s failed!\n')),
375 375 'delete': (_("deleting remote bookmark %s\n"),
376 376 _('deleting remote bookmark %s failed!\n')),
377 377 }
378 378
379 379
380 380 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
381 381 opargs=None):
382 382 '''Push outgoing changesets (limited by revs) from a local
383 383 repository to remote. Return an integer:
384 384 - None means nothing to push
385 385 - 0 means HTTP error
386 386 - 1 means we pushed and remote head count is unchanged *or*
387 387 we have outgoing changesets but refused to push
388 388 - other values as described by addchangegroup()
389 389 '''
390 390 if opargs is None:
391 391 opargs = {}
392 392 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
393 393 **opargs)
394 394 if pushop.remote.local():
395 395 missing = (set(pushop.repo.requirements)
396 396 - pushop.remote.local().supported)
397 397 if missing:
398 398 msg = _("required features are not"
399 399 " supported in the destination:"
400 400 " %s") % (', '.join(sorted(missing)))
401 401 raise error.Abort(msg)
402 402
403 403 # there are two ways to push to remote repo:
404 404 #
405 405 # addchangegroup assumes local user can lock remote
406 406 # repo (local filesystem, old ssh servers).
407 407 #
408 408 # unbundle assumes local user cannot lock remote repo (new ssh
409 409 # servers, http servers).
410 410
411 411 if not pushop.remote.canpush():
412 412 raise error.Abort(_("destination does not support push"))
413 413 # get local lock as we might write phase data
414 414 localwlock = locallock = None
415 415 try:
416 416 # bundle2 push may receive a reply bundle touching bookmarks or other
417 417 # things requiring the wlock. Take it now to ensure proper ordering.
418 418 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
419 419 if _canusebundle2(pushop) and maypushback:
420 420 localwlock = pushop.repo.wlock()
421 421 locallock = pushop.repo.lock()
422 422 pushop.locallocked = True
423 423 except IOError as err:
424 424 pushop.locallocked = False
425 425 if err.errno != errno.EACCES:
426 426 raise
427 427 # source repo cannot be locked.
428 428 # We do not abort the push, but just disable the local phase
429 429 # synchronisation.
430 430 msg = 'cannot lock source repository: %s\n' % err
431 431 pushop.ui.debug(msg)
432 432 try:
433 433 if pushop.locallocked:
434 434 pushop.trmanager = transactionmanager(pushop.repo,
435 435 'push-response',
436 436 pushop.remote.url())
437 437 pushop.repo.checkpush(pushop)
438 438 lock = None
439 439 unbundle = pushop.remote.capable('unbundle')
440 440 if not unbundle:
441 441 lock = pushop.remote.lock()
442 442 try:
443 443 _pushdiscovery(pushop)
444 444 if _canusebundle2(pushop):
445 445 _pushbundle2(pushop)
446 446 _pushchangeset(pushop)
447 447 _pushsyncphase(pushop)
448 448 _pushobsolete(pushop)
449 449 _pushbookmark(pushop)
450 450 finally:
451 451 if lock is not None:
452 452 lock.release()
453 453 if pushop.trmanager:
454 454 pushop.trmanager.close()
455 455 finally:
456 456 if pushop.trmanager:
457 457 pushop.trmanager.release()
458 458 if locallock is not None:
459 459 locallock.release()
460 460 if localwlock is not None:
461 461 localwlock.release()
462 462
463 463 return pushop
464 464
465 465 # list of steps to perform discovery before push
466 466 pushdiscoveryorder = []
467 467
468 468 # Mapping between step name and function
469 469 #
470 470 # This exists to help extensions wrap steps if necessary
471 471 pushdiscoverymapping = {}
472 472
473 473 def pushdiscovery(stepname):
474 474 """decorator for function performing discovery before push
475 475
476 476 The function is added to the step -> function mapping and appended to the
477 477 list of steps. Beware that decorated function will be added in order (this
478 478 may matter).
479 479
480 480 You can only use this decorator for a new step, if you want to wrap a step
481 481 from an extension, change the pushdiscovery dictionary directly."""
482 482 def dec(func):
483 483 assert stepname not in pushdiscoverymapping
484 484 pushdiscoverymapping[stepname] = func
485 485 pushdiscoveryorder.append(stepname)
486 486 return func
487 487 return dec
488 488
489 489 def _pushdiscovery(pushop):
490 490 """Run all discovery steps"""
491 491 for stepname in pushdiscoveryorder:
492 492 step = pushdiscoverymapping[stepname]
493 493 step(pushop)
494 494
495 495 @pushdiscovery('changeset')
496 496 def _pushdiscoverychangeset(pushop):
497 497 """discover the changeset that need to be pushed"""
498 498 fci = discovery.findcommonincoming
499 499 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
500 500 common, inc, remoteheads = commoninc
501 501 fco = discovery.findcommonoutgoing
502 502 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
503 503 commoninc=commoninc, force=pushop.force)
504 504 pushop.outgoing = outgoing
505 505 pushop.remoteheads = remoteheads
506 506 pushop.incoming = inc
507 507
508 508 @pushdiscovery('phase')
509 509 def _pushdiscoveryphase(pushop):
510 510 """discover the phase that needs to be pushed
511 511
512 512 (computed for both success and failure case for changesets push)"""
513 513 outgoing = pushop.outgoing
514 514 unfi = pushop.repo.unfiltered()
515 515 remotephases = pushop.remote.listkeys('phases')
516 516 publishing = remotephases.get('publishing', False)
517 517 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
518 518 and remotephases # server supports phases
519 519 and not pushop.outgoing.missing # no changesets to be pushed
520 520 and publishing):
521 521 # When:
522 522 # - this is a subrepo push
523 523 # - and remote support phase
524 524 # - and no changeset are to be pushed
525 525 # - and remote is publishing
526 526 # We may be in issue 3871 case!
527 527 # We drop the possible phase synchronisation done by
528 528 # courtesy to publish changesets possibly locally draft
529 529 # on the remote.
530 530 remotephases = {'publishing': 'True'}
531 531 ana = phases.analyzeremotephases(pushop.repo,
532 532 pushop.fallbackheads,
533 533 remotephases)
534 534 pheads, droots = ana
535 535 extracond = ''
536 536 if not publishing:
537 537 extracond = ' and public()'
538 538 revset = 'heads((%%ln::%%ln) %s)' % extracond
539 539 # Get the list of all revs draft on remote by public here.
540 540 # XXX Beware that revset break if droots is not strictly
541 541 # XXX root we may want to ensure it is but it is costly
542 542 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
543 543 if not outgoing.missing:
544 544 future = fallback
545 545 else:
546 546 # adds changeset we are going to push as draft
547 547 #
548 548 # should not be necessary for publishing server, but because of an
549 549 # issue fixed in xxxxx we have to do it anyway.
550 550 fdroots = list(unfi.set('roots(%ln + %ln::)',
551 551 outgoing.missing, droots))
552 552 fdroots = [f.node() for f in fdroots]
553 553 future = list(unfi.set(revset, fdroots, pushop.futureheads))
554 554 pushop.outdatedphases = future
555 555 pushop.fallbackoutdatedphases = fallback
556 556
557 557 @pushdiscovery('obsmarker')
558 558 def _pushdiscoveryobsmarkers(pushop):
559 559 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
560 560 and pushop.repo.obsstore
561 561 and 'obsolete' in pushop.remote.listkeys('namespaces')):
562 562 repo = pushop.repo
563 563 # very naive computation, that can be quite expensive on big repo.
564 564 # However: evolution is currently slow on them anyway.
565 565 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
566 566 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
567 567
568 568 @pushdiscovery('bookmarks')
569 569 def _pushdiscoverybookmarks(pushop):
570 570 ui = pushop.ui
571 571 repo = pushop.repo.unfiltered()
572 572 remote = pushop.remote
573 573 ui.debug("checking for updated bookmarks\n")
574 574 ancestors = ()
575 575 if pushop.revs:
576 576 revnums = map(repo.changelog.rev, pushop.revs)
577 577 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
578 578 remotebookmark = remote.listkeys('bookmarks')
579 579
580 580 explicit = set([repo._bookmarks.expandname(bookmark)
581 581 for bookmark in pushop.bookmarks])
582 582
583 583 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
584 584 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
585 585 for b, scid, dcid in advsrc:
586 586 if b in explicit:
587 587 explicit.remove(b)
588 588 if not ancestors or repo[scid].rev() in ancestors:
589 589 pushop.outbookmarks.append((b, dcid, scid))
590 590 # search added bookmark
591 591 for b, scid, dcid in addsrc:
592 592 if b in explicit:
593 593 explicit.remove(b)
594 594 pushop.outbookmarks.append((b, '', scid))
595 595 # search for overwritten bookmark
596 596 for b, scid, dcid in advdst + diverge + differ:
597 597 if b in explicit:
598 598 explicit.remove(b)
599 599 pushop.outbookmarks.append((b, dcid, scid))
600 600 # search for bookmark to delete
601 601 for b, scid, dcid in adddst:
602 602 if b in explicit:
603 603 explicit.remove(b)
604 604 # treat as "deleted locally"
605 605 pushop.outbookmarks.append((b, dcid, ''))
606 606 # identical bookmarks shouldn't get reported
607 607 for b, scid, dcid in same:
608 608 if b in explicit:
609 609 explicit.remove(b)
610 610
611 611 if explicit:
612 612 explicit = sorted(explicit)
613 613 # we should probably list all of them
614 614 ui.warn(_('bookmark %s does not exist on the local '
615 615 'or remote repository!\n') % explicit[0])
616 616 pushop.bkresult = 2
617 617
618 618 pushop.outbookmarks.sort()
619 619
620 620 def _pushcheckoutgoing(pushop):
621 621 outgoing = pushop.outgoing
622 622 unfi = pushop.repo.unfiltered()
623 623 if not outgoing.missing:
624 624 # nothing to push
625 625 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
626 626 return False
627 627 # something to push
628 628 if not pushop.force:
629 629 # if repo.obsstore == False --> no obsolete
630 630 # then, save the iteration
631 631 if unfi.obsstore:
632 632 # this message are here for 80 char limit reason
633 633 mso = _("push includes obsolete changeset: %s!")
634 634 mst = {"unstable": _("push includes unstable changeset: %s!"),
635 635 "bumped": _("push includes bumped changeset: %s!"),
636 636 "divergent": _("push includes divergent changeset: %s!")}
637 637 # If we are to push if there is at least one
638 638 # obsolete or unstable changeset in missing, at
639 639 # least one of the missinghead will be obsolete or
640 640 # unstable. So checking heads only is ok
641 641 for node in outgoing.missingheads:
642 642 ctx = unfi[node]
643 643 if ctx.obsolete():
644 644 raise error.Abort(mso % ctx)
645 645 elif ctx.troubled():
646 646 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
647 647
648 648 discovery.checkheads(pushop)
649 649 return True
650 650
651 651 # List of names of steps to perform for an outgoing bundle2, order matters.
652 652 b2partsgenorder = []
653 653
654 654 # Mapping between step name and function
655 655 #
656 656 # This exists to help extensions wrap steps if necessary
657 657 b2partsgenmapping = {}
658 658
659 659 def b2partsgenerator(stepname, idx=None):
660 660 """decorator for function generating bundle2 part
661 661
662 662 The function is added to the step -> function mapping and appended to the
663 663 list of steps. Beware that decorated functions will be added in order
664 664 (this may matter).
665 665
666 666 You can only use this decorator for new steps, if you want to wrap a step
667 667 from an extension, attack the b2partsgenmapping dictionary directly."""
668 668 def dec(func):
669 669 assert stepname not in b2partsgenmapping
670 670 b2partsgenmapping[stepname] = func
671 671 if idx is None:
672 672 b2partsgenorder.append(stepname)
673 673 else:
674 674 b2partsgenorder.insert(idx, stepname)
675 675 return func
676 676 return dec
677 677
678 678 def _pushb2ctxcheckheads(pushop, bundler):
679 679 """Generate race condition checking parts
680 680
681 681 Exists as an independent function to aid extensions
682 682 """
683 683 if not pushop.force:
684 684 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
685 685
686 686 @b2partsgenerator('changeset')
687 687 def _pushb2ctx(pushop, bundler):
688 688 """handle changegroup push through bundle2
689 689
690 690 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
691 691 """
692 692 if 'changesets' in pushop.stepsdone:
693 693 return
694 694 pushop.stepsdone.add('changesets')
695 695 # Send known heads to the server for race detection.
696 696 if not _pushcheckoutgoing(pushop):
697 697 return
698 698 pushop.repo.prepushoutgoinghooks(pushop)
699 699
700 700 _pushb2ctxcheckheads(pushop, bundler)
701 701
702 702 b2caps = bundle2.bundle2caps(pushop.remote)
703 703 version = '01'
704 704 cgversions = b2caps.get('changegroup')
705 705 if cgversions: # 3.1 and 3.2 ship with an empty value
706 706 cgversions = [v for v in cgversions
707 707 if v in changegroup.supportedoutgoingversions(
708 708 pushop.repo)]
709 709 if not cgversions:
710 710 raise ValueError(_('no common changegroup version'))
711 711 version = max(cgversions)
712 712 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
713 713 pushop.outgoing,
714 714 version=version)
715 715 cgpart = bundler.newpart('changegroup', data=cg)
716 716 if cgversions:
717 717 cgpart.addparam('version', version)
718 718 if 'treemanifest' in pushop.repo.requirements:
719 719 cgpart.addparam('treemanifest', '1')
720 720 def handlereply(op):
721 721 """extract addchangegroup returns from server reply"""
722 722 cgreplies = op.records.getreplies(cgpart.id)
723 723 assert len(cgreplies['changegroup']) == 1
724 724 pushop.cgresult = cgreplies['changegroup'][0]['return']
725 725 return handlereply
726 726
727 727 @b2partsgenerator('phase')
728 728 def _pushb2phases(pushop, bundler):
729 729 """handle phase push through bundle2"""
730 730 if 'phases' in pushop.stepsdone:
731 731 return
732 732 b2caps = bundle2.bundle2caps(pushop.remote)
733 733 if not 'pushkey' in b2caps:
734 734 return
735 735 pushop.stepsdone.add('phases')
736 736 part2node = []
737 737
738 738 def handlefailure(pushop, exc):
739 739 targetid = int(exc.partid)
740 740 for partid, node in part2node:
741 741 if partid == targetid:
742 742 raise error.Abort(_('updating %s to public failed') % node)
743 743
744 744 enc = pushkey.encode
745 745 for newremotehead in pushop.outdatedphases:
746 746 part = bundler.newpart('pushkey')
747 747 part.addparam('namespace', enc('phases'))
748 748 part.addparam('key', enc(newremotehead.hex()))
749 749 part.addparam('old', enc(str(phases.draft)))
750 750 part.addparam('new', enc(str(phases.public)))
751 751 part2node.append((part.id, newremotehead))
752 752 pushop.pkfailcb[part.id] = handlefailure
753 753
754 754 def handlereply(op):
755 755 for partid, node in part2node:
756 756 partrep = op.records.getreplies(partid)
757 757 results = partrep['pushkey']
758 758 assert len(results) <= 1
759 759 msg = None
760 760 if not results:
761 761 msg = _('server ignored update of %s to public!\n') % node
762 762 elif not int(results[0]['return']):
763 763 msg = _('updating %s to public failed!\n') % node
764 764 if msg is not None:
765 765 pushop.ui.warn(msg)
766 766 return handlereply
767 767
768 768 @b2partsgenerator('obsmarkers')
769 769 def _pushb2obsmarkers(pushop, bundler):
770 770 if 'obsmarkers' in pushop.stepsdone:
771 771 return
772 772 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
773 773 if obsolete.commonversion(remoteversions) is None:
774 774 return
775 775 pushop.stepsdone.add('obsmarkers')
776 776 if pushop.outobsmarkers:
777 777 markers = sorted(pushop.outobsmarkers)
778 778 buildobsmarkerspart(bundler, markers)
779 779
780 780 @b2partsgenerator('bookmarks')
781 781 def _pushb2bookmarks(pushop, bundler):
782 782 """handle bookmark push through bundle2"""
783 783 if 'bookmarks' in pushop.stepsdone:
784 784 return
785 785 b2caps = bundle2.bundle2caps(pushop.remote)
786 786 if 'pushkey' not in b2caps:
787 787 return
788 788 pushop.stepsdone.add('bookmarks')
789 789 part2book = []
790 790 enc = pushkey.encode
791 791
792 792 def handlefailure(pushop, exc):
793 793 targetid = int(exc.partid)
794 794 for partid, book, action in part2book:
795 795 if partid == targetid:
796 796 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
797 797 # we should not be called for part we did not generated
798 798 assert False
799 799
800 800 for book, old, new in pushop.outbookmarks:
801 801 part = bundler.newpart('pushkey')
802 802 part.addparam('namespace', enc('bookmarks'))
803 803 part.addparam('key', enc(book))
804 804 part.addparam('old', enc(old))
805 805 part.addparam('new', enc(new))
806 806 action = 'update'
807 807 if not old:
808 808 action = 'export'
809 809 elif not new:
810 810 action = 'delete'
811 811 part2book.append((part.id, book, action))
812 812 pushop.pkfailcb[part.id] = handlefailure
813 813
814 814 def handlereply(op):
815 815 ui = pushop.ui
816 816 for partid, book, action in part2book:
817 817 partrep = op.records.getreplies(partid)
818 818 results = partrep['pushkey']
819 819 assert len(results) <= 1
820 820 if not results:
821 821 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
822 822 else:
823 823 ret = int(results[0]['return'])
824 824 if ret:
825 825 ui.status(bookmsgmap[action][0] % book)
826 826 else:
827 827 ui.warn(bookmsgmap[action][1] % book)
828 828 if pushop.bkresult is not None:
829 829 pushop.bkresult = 1
830 830 return handlereply
831 831
832 832
833 833 def _pushbundle2(pushop):
834 834 """push data to the remote using bundle2
835 835
836 836 The only currently supported type of data is changegroup but this will
837 837 evolve in the future."""
838 838 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
839 839 pushback = (pushop.trmanager
840 840 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
841 841
842 842 # create reply capability
843 843 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
844 844 allowpushback=pushback))
845 845 bundler.newpart('replycaps', data=capsblob)
846 846 replyhandlers = []
847 847 for partgenname in b2partsgenorder:
848 848 partgen = b2partsgenmapping[partgenname]
849 849 ret = partgen(pushop, bundler)
850 850 if callable(ret):
851 851 replyhandlers.append(ret)
852 852 # do not push if nothing to push
853 853 if bundler.nbparts <= 1:
854 854 return
855 855 stream = util.chunkbuffer(bundler.getchunks())
856 856 try:
857 857 try:
858 858 reply = pushop.remote.unbundle(stream, ['force'], 'push')
859 859 except error.BundleValueError as exc:
860 860 raise error.Abort('missing support for %s' % exc)
861 861 try:
862 862 trgetter = None
863 863 if pushback:
864 864 trgetter = pushop.trmanager.transaction
865 865 op = bundle2.processbundle(pushop.repo, reply, trgetter)
866 866 except error.BundleValueError as exc:
867 867 raise error.Abort('missing support for %s' % exc)
868 868 except bundle2.AbortFromPart as exc:
869 869 pushop.ui.status(_('remote: %s\n') % exc)
870 870 raise error.Abort(_('push failed on remote'), hint=exc.hint)
871 871 except error.PushkeyFailed as exc:
872 872 partid = int(exc.partid)
873 873 if partid not in pushop.pkfailcb:
874 874 raise
875 875 pushop.pkfailcb[partid](pushop, exc)
876 876 for rephand in replyhandlers:
877 877 rephand(op)
878 878
879 879 def _pushchangeset(pushop):
880 880 """Make the actual push of changeset bundle to remote repo"""
881 881 if 'changesets' in pushop.stepsdone:
882 882 return
883 883 pushop.stepsdone.add('changesets')
884 884 if not _pushcheckoutgoing(pushop):
885 885 return
886 886 pushop.repo.prepushoutgoinghooks(pushop)
887 887 outgoing = pushop.outgoing
888 888 unbundle = pushop.remote.capable('unbundle')
889 889 # TODO: get bundlecaps from remote
890 890 bundlecaps = None
891 891 # create a changegroup from local
892 892 if pushop.revs is None and not (outgoing.excluded
893 893 or pushop.repo.changelog.filteredrevs):
894 894 # push everything,
895 895 # use the fast path, no race possible on push
896 896 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
897 897 cg = changegroup.getsubset(pushop.repo,
898 898 outgoing,
899 899 bundler,
900 900 'push',
901 901 fastpath=True)
902 902 else:
903 903 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
904 904 bundlecaps)
905 905
906 906 # apply changegroup to remote
907 907 if unbundle:
908 908 # local repo finds heads on server, finds out what
909 909 # revs it must push. once revs transferred, if server
910 910 # finds it has different heads (someone else won
911 911 # commit/push race), server aborts.
912 912 if pushop.force:
913 913 remoteheads = ['force']
914 914 else:
915 915 remoteheads = pushop.remoteheads
916 916 # ssh: return remote's addchangegroup()
917 917 # http: return remote's addchangegroup() or 0 for error
918 918 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
919 919 pushop.repo.url())
920 920 else:
921 921 # we return an integer indicating remote head count
922 922 # change
923 923 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
924 924 pushop.repo.url())
925 925
926 926 def _pushsyncphase(pushop):
927 927 """synchronise phase information locally and remotely"""
928 928 cheads = pushop.commonheads
929 929 # even when we don't push, exchanging phase data is useful
930 930 remotephases = pushop.remote.listkeys('phases')
931 931 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
932 932 and remotephases # server supports phases
933 933 and pushop.cgresult is None # nothing was pushed
934 934 and remotephases.get('publishing', False)):
935 935 # When:
936 936 # - this is a subrepo push
937 937 # - and remote support phase
938 938 # - and no changeset was pushed
939 939 # - and remote is publishing
940 940 # We may be in issue 3871 case!
941 941 # We drop the possible phase synchronisation done by
942 942 # courtesy to publish changesets possibly locally draft
943 943 # on the remote.
944 944 remotephases = {'publishing': 'True'}
945 945 if not remotephases: # old server or public only reply from non-publishing
946 946 _localphasemove(pushop, cheads)
947 947 # don't push any phase data as there is nothing to push
948 948 else:
949 949 ana = phases.analyzeremotephases(pushop.repo, cheads,
950 950 remotephases)
951 951 pheads, droots = ana
952 952 ### Apply remote phase on local
953 953 if remotephases.get('publishing', False):
954 954 _localphasemove(pushop, cheads)
955 955 else: # publish = False
956 956 _localphasemove(pushop, pheads)
957 957 _localphasemove(pushop, cheads, phases.draft)
958 958 ### Apply local phase on remote
959 959
960 960 if pushop.cgresult:
961 961 if 'phases' in pushop.stepsdone:
962 962 # phases already pushed though bundle2
963 963 return
964 964 outdated = pushop.outdatedphases
965 965 else:
966 966 outdated = pushop.fallbackoutdatedphases
967 967
968 968 pushop.stepsdone.add('phases')
969 969
970 970 # filter heads already turned public by the push
971 971 outdated = [c for c in outdated if c.node() not in pheads]
972 972 # fallback to independent pushkey command
973 973 for newremotehead in outdated:
974 974 r = pushop.remote.pushkey('phases',
975 975 newremotehead.hex(),
976 976 str(phases.draft),
977 977 str(phases.public))
978 978 if not r:
979 979 pushop.ui.warn(_('updating %s to public failed!\n')
980 980 % newremotehead)
981 981
982 982 def _localphasemove(pushop, nodes, phase=phases.public):
983 983 """move <nodes> to <phase> in the local source repo"""
984 984 if pushop.trmanager:
985 985 phases.advanceboundary(pushop.repo,
986 986 pushop.trmanager.transaction(),
987 987 phase,
988 988 nodes)
989 989 else:
990 990 # repo is not locked, do not change any phases!
991 991 # Informs the user that phases should have been moved when
992 992 # applicable.
993 993 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
994 994 phasestr = phases.phasenames[phase]
995 995 if actualmoves:
996 996 pushop.ui.status(_('cannot lock source repo, skipping '
997 997 'local %s phase update\n') % phasestr)
998 998
999 999 def _pushobsolete(pushop):
1000 1000 """utility function to push obsolete markers to a remote"""
1001 1001 if 'obsmarkers' in pushop.stepsdone:
1002 1002 return
1003 1003 repo = pushop.repo
1004 1004 remote = pushop.remote
1005 1005 pushop.stepsdone.add('obsmarkers')
1006 1006 if pushop.outobsmarkers:
1007 1007 pushop.ui.debug('try to push obsolete markers to remote\n')
1008 1008 rslts = []
1009 1009 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1010 1010 for key in sorted(remotedata, reverse=True):
1011 1011 # reverse sort to ensure we end with dump0
1012 1012 data = remotedata[key]
1013 1013 rslts.append(remote.pushkey('obsolete', key, '', data))
1014 1014 if [r for r in rslts if not r]:
1015 1015 msg = _('failed to push some obsolete markers!\n')
1016 1016 repo.ui.warn(msg)
1017 1017
1018 1018 def _pushbookmark(pushop):
1019 1019 """Update bookmark position on remote"""
1020 1020 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1021 1021 return
1022 1022 pushop.stepsdone.add('bookmarks')
1023 1023 ui = pushop.ui
1024 1024 remote = pushop.remote
1025 1025
1026 1026 for b, old, new in pushop.outbookmarks:
1027 1027 action = 'update'
1028 1028 if not old:
1029 1029 action = 'export'
1030 1030 elif not new:
1031 1031 action = 'delete'
1032 1032 if remote.pushkey('bookmarks', b, old, new):
1033 1033 ui.status(bookmsgmap[action][0] % b)
1034 1034 else:
1035 1035 ui.warn(bookmsgmap[action][1] % b)
1036 1036 # discovery can have set the value form invalid entry
1037 1037 if pushop.bkresult is not None:
1038 1038 pushop.bkresult = 1
1039 1039
1040 1040 class pulloperation(object):
1041 1041 """A object that represent a single pull operation
1042 1042
1043 1043 It purpose is to carry pull related state and very common operation.
1044 1044
1045 1045 A new should be created at the beginning of each pull and discarded
1046 1046 afterward.
1047 1047 """
1048 1048
1049 1049 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1050 1050 remotebookmarks=None, streamclonerequested=None):
1051 1051 # repo we pull into
1052 1052 self.repo = repo
1053 1053 # repo we pull from
1054 1054 self.remote = remote
1055 1055 # revision we try to pull (None is "all")
1056 1056 self.heads = heads
1057 1057 # bookmark pulled explicitly
1058 1058 self.explicitbookmarks = bookmarks
1059 1059 # do we force pull?
1060 1060 self.force = force
1061 1061 # whether a streaming clone was requested
1062 1062 self.streamclonerequested = streamclonerequested
1063 1063 # transaction manager
1064 1064 self.trmanager = None
1065 1065 # set of common changeset between local and remote before pull
1066 1066 self.common = None
1067 1067 # set of pulled head
1068 1068 self.rheads = None
1069 1069 # list of missing changeset to fetch remotely
1070 1070 self.fetch = None
1071 1071 # remote bookmarks data
1072 1072 self.remotebookmarks = remotebookmarks
1073 1073 # result of changegroup pulling (used as return code by pull)
1074 1074 self.cgresult = None
1075 1075 # list of step already done
1076 1076 self.stepsdone = set()
1077 1077 # Whether we attempted a clone from pre-generated bundles.
1078 1078 self.clonebundleattempted = False
1079 1079
1080 1080 @util.propertycache
1081 1081 def pulledsubset(self):
1082 1082 """heads of the set of changeset target by the pull"""
1083 1083 # compute target subset
1084 1084 if self.heads is None:
1085 1085 # We pulled every thing possible
1086 1086 # sync on everything common
1087 1087 c = set(self.common)
1088 1088 ret = list(self.common)
1089 1089 for n in self.rheads:
1090 1090 if n not in c:
1091 1091 ret.append(n)
1092 1092 return ret
1093 1093 else:
1094 1094 # We pulled a specific subset
1095 1095 # sync on this subset
1096 1096 return self.heads
1097 1097
1098 1098 @util.propertycache
1099 1099 def canusebundle2(self):
1100 1100 return _canusebundle2(self)
1101 1101
1102 1102 @util.propertycache
1103 1103 def remotebundle2caps(self):
1104 1104 return bundle2.bundle2caps(self.remote)
1105 1105
1106 1106 def gettransaction(self):
1107 1107 # deprecated; talk to trmanager directly
1108 1108 return self.trmanager.transaction()
1109 1109
1110 1110 class transactionmanager(object):
1111 1111 """An object to manage the life cycle of a transaction
1112 1112
1113 1113 It creates the transaction on demand and calls the appropriate hooks when
1114 1114 closing the transaction."""
1115 1115 def __init__(self, repo, source, url):
1116 1116 self.repo = repo
1117 1117 self.source = source
1118 1118 self.url = url
1119 1119 self._tr = None
1120 1120
1121 1121 def transaction(self):
1122 1122 """Return an open transaction object, constructing if necessary"""
1123 1123 if not self._tr:
1124 1124 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1125 1125 self._tr = self.repo.transaction(trname)
1126 1126 self._tr.hookargs['source'] = self.source
1127 1127 self._tr.hookargs['url'] = self.url
1128 1128 return self._tr
1129 1129
1130 1130 def close(self):
1131 1131 """close transaction if created"""
1132 1132 if self._tr is not None:
1133 1133 self._tr.close()
1134 1134
1135 1135 def release(self):
1136 1136 """release transaction if created"""
1137 1137 if self._tr is not None:
1138 1138 self._tr.release()
1139 1139
1140 1140 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1141 1141 streamclonerequested=None):
1142 1142 """Fetch repository data from a remote.
1143 1143
1144 1144 This is the main function used to retrieve data from a remote repository.
1145 1145
1146 1146 ``repo`` is the local repository to clone into.
1147 1147 ``remote`` is a peer instance.
1148 1148 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1149 1149 default) means to pull everything from the remote.
1150 1150 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1151 1151 default, all remote bookmarks are pulled.
1152 1152 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1153 1153 initialization.
1154 1154 ``streamclonerequested`` is a boolean indicating whether a "streaming
1155 1155 clone" is requested. A "streaming clone" is essentially a raw file copy
1156 1156 of revlogs from the server. This only works when the local repository is
1157 1157 empty. The default value of ``None`` means to respect the server
1158 1158 configuration for preferring stream clones.
1159 1159
1160 1160 Returns the ``pulloperation`` created for this pull.
1161 1161 """
1162 1162 if opargs is None:
1163 1163 opargs = {}
1164 1164 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1165 1165 streamclonerequested=streamclonerequested, **opargs)
1166 1166 if pullop.remote.local():
1167 1167 missing = set(pullop.remote.requirements) - pullop.repo.supported
1168 1168 if missing:
1169 1169 msg = _("required features are not"
1170 1170 " supported in the destination:"
1171 1171 " %s") % (', '.join(sorted(missing)))
1172 1172 raise error.Abort(msg)
1173 1173
1174 1174 lock = pullop.repo.lock()
1175 1175 try:
1176 1176 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1177 1177 streamclone.maybeperformlegacystreamclone(pullop)
1178 1178 # This should ideally be in _pullbundle2(). However, it needs to run
1179 1179 # before discovery to avoid extra work.
1180 1180 _maybeapplyclonebundle(pullop)
1181 1181 _pulldiscovery(pullop)
1182 1182 if pullop.canusebundle2:
1183 1183 _pullbundle2(pullop)
1184 1184 _pullchangeset(pullop)
1185 1185 _pullphase(pullop)
1186 1186 _pullbookmarks(pullop)
1187 1187 _pullobsolete(pullop)
1188 1188 pullop.trmanager.close()
1189 1189 finally:
1190 1190 pullop.trmanager.release()
1191 1191 lock.release()
1192 1192
1193 1193 return pullop
1194 1194
1195 1195 # list of steps to perform discovery before pull
1196 1196 pulldiscoveryorder = []
1197 1197
1198 1198 # Mapping between step name and function
1199 1199 #
1200 1200 # This exists to help extensions wrap steps if necessary
1201 1201 pulldiscoverymapping = {}
1202 1202
1203 1203 def pulldiscovery(stepname):
1204 1204 """decorator for function performing discovery before pull
1205 1205
1206 1206 The function is added to the step -> function mapping and appended to the
1207 1207 list of steps. Beware that decorated function will be added in order (this
1208 1208 may matter).
1209 1209
1210 1210 You can only use this decorator for a new step, if you want to wrap a step
1211 1211 from an extension, change the pulldiscovery dictionary directly."""
1212 1212 def dec(func):
1213 1213 assert stepname not in pulldiscoverymapping
1214 1214 pulldiscoverymapping[stepname] = func
1215 1215 pulldiscoveryorder.append(stepname)
1216 1216 return func
1217 1217 return dec
1218 1218
1219 1219 def _pulldiscovery(pullop):
1220 1220 """Run all discovery steps"""
1221 1221 for stepname in pulldiscoveryorder:
1222 1222 step = pulldiscoverymapping[stepname]
1223 1223 step(pullop)
1224 1224
1225 1225 @pulldiscovery('b1:bookmarks')
1226 1226 def _pullbookmarkbundle1(pullop):
1227 1227 """fetch bookmark data in bundle1 case
1228 1228
1229 1229 If not using bundle2, we have to fetch bookmarks before changeset
1230 1230 discovery to reduce the chance and impact of race conditions."""
1231 1231 if pullop.remotebookmarks is not None:
1232 1232 return
1233 1233 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1234 1234 # all known bundle2 servers now support listkeys, but lets be nice with
1235 1235 # new implementation.
1236 1236 return
1237 1237 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1238 1238
1239 1239
1240 1240 @pulldiscovery('changegroup')
1241 1241 def _pulldiscoverychangegroup(pullop):
1242 1242 """discovery phase for the pull
1243 1243
1244 1244 Current handle changeset discovery only, will change handle all discovery
1245 1245 at some point."""
1246 1246 tmp = discovery.findcommonincoming(pullop.repo,
1247 1247 pullop.remote,
1248 1248 heads=pullop.heads,
1249 1249 force=pullop.force)
1250 1250 common, fetch, rheads = tmp
1251 1251 nm = pullop.repo.unfiltered().changelog.nodemap
1252 1252 if fetch and rheads:
1253 1253 # If a remote heads in filtered locally, lets drop it from the unknown
1254 1254 # remote heads and put in back in common.
1255 1255 #
1256 1256 # This is a hackish solution to catch most of "common but locally
1257 1257 # hidden situation". We do not performs discovery on unfiltered
1258 1258 # repository because it end up doing a pathological amount of round
1259 1259 # trip for w huge amount of changeset we do not care about.
1260 1260 #
1261 1261 # If a set of such "common but filtered" changeset exist on the server
1262 1262 # but are not including a remote heads, we'll not be able to detect it,
1263 1263 scommon = set(common)
1264 1264 filteredrheads = []
1265 1265 for n in rheads:
1266 1266 if n in nm:
1267 1267 if n not in scommon:
1268 1268 common.append(n)
1269 1269 else:
1270 1270 filteredrheads.append(n)
1271 1271 if not filteredrheads:
1272 1272 fetch = []
1273 1273 rheads = filteredrheads
1274 1274 pullop.common = common
1275 1275 pullop.fetch = fetch
1276 1276 pullop.rheads = rheads
1277 1277
1278 1278 def _pullbundle2(pullop):
1279 1279 """pull data using bundle2
1280 1280
1281 1281 For now, the only supported data are changegroup."""
1282 1282 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1283 1283
1284 1284 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1285 1285
1286 1286 # pulling changegroup
1287 1287 pullop.stepsdone.add('changegroup')
1288 1288
1289 1289 kwargs['common'] = pullop.common
1290 1290 kwargs['heads'] = pullop.heads or pullop.rheads
1291 1291 kwargs['cg'] = pullop.fetch
1292 1292 if 'listkeys' in pullop.remotebundle2caps:
1293 kwargs['listkeys'] = ['phase']
1293 kwargs['listkeys'] = ['phases']
1294 1294 if pullop.remotebookmarks is None:
1295 1295 # make sure to always includes bookmark data when migrating
1296 1296 # `hg incoming --bundle` to using this function.
1297 1297 kwargs['listkeys'].append('bookmarks')
1298 1298
1299 1299 # If this is a full pull / clone and the server supports the clone bundles
1300 1300 # feature, tell the server whether we attempted a clone bundle. The
1301 1301 # presence of this flag indicates the client supports clone bundles. This
1302 1302 # will enable the server to treat clients that support clone bundles
1303 1303 # differently from those that don't.
1304 1304 if (pullop.remote.capable('clonebundles')
1305 1305 and pullop.heads is None and list(pullop.common) == [nullid]):
1306 1306 kwargs['cbattempted'] = pullop.clonebundleattempted
1307 1307
1308 1308 if streaming:
1309 1309 pullop.repo.ui.status(_('streaming all changes\n'))
1310 1310 elif not pullop.fetch:
1311 1311 pullop.repo.ui.status(_("no changes found\n"))
1312 1312 pullop.cgresult = 0
1313 1313 else:
1314 1314 if pullop.heads is None and list(pullop.common) == [nullid]:
1315 1315 pullop.repo.ui.status(_("requesting all changes\n"))
1316 1316 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1317 1317 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1318 1318 if obsolete.commonversion(remoteversions) is not None:
1319 1319 kwargs['obsmarkers'] = True
1320 1320 pullop.stepsdone.add('obsmarkers')
1321 1321 _pullbundle2extraprepare(pullop, kwargs)
1322 1322 bundle = pullop.remote.getbundle('pull', **kwargs)
1323 1323 try:
1324 1324 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1325 1325 except error.BundleValueError as exc:
1326 1326 raise error.Abort('missing support for %s' % exc)
1327 1327
1328 1328 if pullop.fetch:
1329 1329 results = [cg['return'] for cg in op.records['changegroup']]
1330 1330 pullop.cgresult = changegroup.combineresults(results)
1331 1331
1332 1332 # processing phases change
1333 1333 for namespace, value in op.records['listkeys']:
1334 1334 if namespace == 'phases':
1335 1335 _pullapplyphases(pullop, value)
1336 1336
1337 1337 # processing bookmark update
1338 1338 for namespace, value in op.records['listkeys']:
1339 1339 if namespace == 'bookmarks':
1340 1340 pullop.remotebookmarks = value
1341 1341
1342 1342 # bookmark data were either already there or pulled in the bundle
1343 1343 if pullop.remotebookmarks is not None:
1344 1344 _pullbookmarks(pullop)
1345 1345
1346 1346 def _pullbundle2extraprepare(pullop, kwargs):
1347 1347 """hook function so that extensions can extend the getbundle call"""
1348 1348 pass
1349 1349
1350 1350 def _pullchangeset(pullop):
1351 1351 """pull changeset from unbundle into the local repo"""
1352 1352 # We delay the open of the transaction as late as possible so we
1353 1353 # don't open transaction for nothing or you break future useful
1354 1354 # rollback call
1355 1355 if 'changegroup' in pullop.stepsdone:
1356 1356 return
1357 1357 pullop.stepsdone.add('changegroup')
1358 1358 if not pullop.fetch:
1359 1359 pullop.repo.ui.status(_("no changes found\n"))
1360 1360 pullop.cgresult = 0
1361 1361 return
1362 1362 pullop.gettransaction()
1363 1363 if pullop.heads is None and list(pullop.common) == [nullid]:
1364 1364 pullop.repo.ui.status(_("requesting all changes\n"))
1365 1365 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1366 1366 # issue1320, avoid a race if remote changed after discovery
1367 1367 pullop.heads = pullop.rheads
1368 1368
1369 1369 if pullop.remote.capable('getbundle'):
1370 1370 # TODO: get bundlecaps from remote
1371 1371 cg = pullop.remote.getbundle('pull', common=pullop.common,
1372 1372 heads=pullop.heads or pullop.rheads)
1373 1373 elif pullop.heads is None:
1374 1374 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1375 1375 elif not pullop.remote.capable('changegroupsubset'):
1376 1376 raise error.Abort(_("partial pull cannot be done because "
1377 1377 "other repository doesn't support "
1378 1378 "changegroupsubset."))
1379 1379 else:
1380 1380 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1381 1381 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1382 1382
1383 1383 def _pullphase(pullop):
1384 1384 # Get remote phases data from remote
1385 1385 if 'phases' in pullop.stepsdone:
1386 1386 return
1387 1387 remotephases = pullop.remote.listkeys('phases')
1388 1388 _pullapplyphases(pullop, remotephases)
1389 1389
1390 1390 def _pullapplyphases(pullop, remotephases):
1391 1391 """apply phase movement from observed remote state"""
1392 1392 if 'phases' in pullop.stepsdone:
1393 1393 return
1394 1394 pullop.stepsdone.add('phases')
1395 1395 publishing = bool(remotephases.get('publishing', False))
1396 1396 if remotephases and not publishing:
1397 1397 # remote is new and unpublishing
1398 1398 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1399 1399 pullop.pulledsubset,
1400 1400 remotephases)
1401 1401 dheads = pullop.pulledsubset
1402 1402 else:
1403 1403 # Remote is old or publishing all common changesets
1404 1404 # should be seen as public
1405 1405 pheads = pullop.pulledsubset
1406 1406 dheads = []
1407 1407 unfi = pullop.repo.unfiltered()
1408 1408 phase = unfi._phasecache.phase
1409 1409 rev = unfi.changelog.nodemap.get
1410 1410 public = phases.public
1411 1411 draft = phases.draft
1412 1412
1413 1413 # exclude changesets already public locally and update the others
1414 1414 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1415 1415 if pheads:
1416 1416 tr = pullop.gettransaction()
1417 1417 phases.advanceboundary(pullop.repo, tr, public, pheads)
1418 1418
1419 1419 # exclude changesets already draft locally and update the others
1420 1420 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1421 1421 if dheads:
1422 1422 tr = pullop.gettransaction()
1423 1423 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1424 1424
1425 1425 def _pullbookmarks(pullop):
1426 1426 """process the remote bookmark information to update the local one"""
1427 1427 if 'bookmarks' in pullop.stepsdone:
1428 1428 return
1429 1429 pullop.stepsdone.add('bookmarks')
1430 1430 repo = pullop.repo
1431 1431 remotebookmarks = pullop.remotebookmarks
1432 1432 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1433 1433 pullop.remote.url(),
1434 1434 pullop.gettransaction,
1435 1435 explicit=pullop.explicitbookmarks)
1436 1436
1437 1437 def _pullobsolete(pullop):
1438 1438 """utility function to pull obsolete markers from a remote
1439 1439
1440 1440 The `gettransaction` is function that return the pull transaction, creating
1441 1441 one if necessary. We return the transaction to inform the calling code that
1442 1442 a new transaction have been created (when applicable).
1443 1443
1444 1444 Exists mostly to allow overriding for experimentation purpose"""
1445 1445 if 'obsmarkers' in pullop.stepsdone:
1446 1446 return
1447 1447 pullop.stepsdone.add('obsmarkers')
1448 1448 tr = None
1449 1449 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1450 1450 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1451 1451 remoteobs = pullop.remote.listkeys('obsolete')
1452 1452 if 'dump0' in remoteobs:
1453 1453 tr = pullop.gettransaction()
1454 1454 markers = []
1455 1455 for key in sorted(remoteobs, reverse=True):
1456 1456 if key.startswith('dump'):
1457 1457 data = base85.b85decode(remoteobs[key])
1458 1458 version, newmarks = obsolete._readmarkers(data)
1459 1459 markers += newmarks
1460 1460 if markers:
1461 1461 pullop.repo.obsstore.add(tr, markers)
1462 1462 pullop.repo.invalidatevolatilesets()
1463 1463 return tr
1464 1464
1465 1465 def caps20to10(repo):
1466 1466 """return a set with appropriate options to use bundle20 during getbundle"""
1467 1467 caps = set(['HG20'])
1468 1468 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1469 1469 caps.add('bundle2=' + urlreq.quote(capsblob))
1470 1470 return caps
1471 1471
1472 1472 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1473 1473 getbundle2partsorder = []
1474 1474
1475 1475 # Mapping between step name and function
1476 1476 #
1477 1477 # This exists to help extensions wrap steps if necessary
1478 1478 getbundle2partsmapping = {}
1479 1479
1480 1480 def getbundle2partsgenerator(stepname, idx=None):
1481 1481 """decorator for function generating bundle2 part for getbundle
1482 1482
1483 1483 The function is added to the step -> function mapping and appended to the
1484 1484 list of steps. Beware that decorated functions will be added in order
1485 1485 (this may matter).
1486 1486
1487 1487 You can only use this decorator for new steps, if you want to wrap a step
1488 1488 from an extension, attack the getbundle2partsmapping dictionary directly."""
1489 1489 def dec(func):
1490 1490 assert stepname not in getbundle2partsmapping
1491 1491 getbundle2partsmapping[stepname] = func
1492 1492 if idx is None:
1493 1493 getbundle2partsorder.append(stepname)
1494 1494 else:
1495 1495 getbundle2partsorder.insert(idx, stepname)
1496 1496 return func
1497 1497 return dec
1498 1498
1499 1499 def bundle2requested(bundlecaps):
1500 1500 if bundlecaps is not None:
1501 1501 return any(cap.startswith('HG2') for cap in bundlecaps)
1502 1502 return False
1503 1503
1504 1504 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1505 1505 **kwargs):
1506 1506 """return a full bundle (with potentially multiple kind of parts)
1507 1507
1508 1508 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1509 1509 passed. For now, the bundle can contain only changegroup, but this will
1510 1510 changes when more part type will be available for bundle2.
1511 1511
1512 1512 This is different from changegroup.getchangegroup that only returns an HG10
1513 1513 changegroup bundle. They may eventually get reunited in the future when we
1514 1514 have a clearer idea of the API we what to query different data.
1515 1515
1516 1516 The implementation is at a very early stage and will get massive rework
1517 1517 when the API of bundle is refined.
1518 1518 """
1519 1519 usebundle2 = bundle2requested(bundlecaps)
1520 1520 # bundle10 case
1521 1521 if not usebundle2:
1522 1522 if bundlecaps and not kwargs.get('cg', True):
1523 1523 raise ValueError(_('request for bundle10 must include changegroup'))
1524 1524
1525 1525 if kwargs:
1526 1526 raise ValueError(_('unsupported getbundle arguments: %s')
1527 1527 % ', '.join(sorted(kwargs.keys())))
1528 1528 return changegroup.getchangegroup(repo, source, heads=heads,
1529 1529 common=common, bundlecaps=bundlecaps)
1530 1530
1531 1531 # bundle20 case
1532 1532 b2caps = {}
1533 1533 for bcaps in bundlecaps:
1534 1534 if bcaps.startswith('bundle2='):
1535 1535 blob = urlreq.unquote(bcaps[len('bundle2='):])
1536 1536 b2caps.update(bundle2.decodecaps(blob))
1537 1537 bundler = bundle2.bundle20(repo.ui, b2caps)
1538 1538
1539 1539 kwargs['heads'] = heads
1540 1540 kwargs['common'] = common
1541 1541
1542 1542 for name in getbundle2partsorder:
1543 1543 func = getbundle2partsmapping[name]
1544 1544 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1545 1545 **kwargs)
1546 1546
1547 1547 return util.chunkbuffer(bundler.getchunks())
1548 1548
1549 1549 @getbundle2partsgenerator('changegroup')
1550 1550 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1551 1551 b2caps=None, heads=None, common=None, **kwargs):
1552 1552 """add a changegroup part to the requested bundle"""
1553 1553 cg = None
1554 1554 if kwargs.get('cg', True):
1555 1555 # build changegroup bundle here.
1556 1556 version = '01'
1557 1557 cgversions = b2caps.get('changegroup')
1558 1558 if cgversions: # 3.1 and 3.2 ship with an empty value
1559 1559 cgversions = [v for v in cgversions
1560 1560 if v in changegroup.supportedoutgoingversions(repo)]
1561 1561 if not cgversions:
1562 1562 raise ValueError(_('no common changegroup version'))
1563 1563 version = max(cgversions)
1564 1564 outgoing = changegroup.computeoutgoing(repo, heads, common)
1565 1565 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1566 1566 bundlecaps=bundlecaps,
1567 1567 version=version)
1568 1568
1569 1569 if cg:
1570 1570 part = bundler.newpart('changegroup', data=cg)
1571 1571 if cgversions:
1572 1572 part.addparam('version', version)
1573 1573 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1574 1574 if 'treemanifest' in repo.requirements:
1575 1575 part.addparam('treemanifest', '1')
1576 1576
1577 1577 @getbundle2partsgenerator('listkeys')
1578 1578 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1579 1579 b2caps=None, **kwargs):
1580 1580 """add parts containing listkeys namespaces to the requested bundle"""
1581 1581 listkeys = kwargs.get('listkeys', ())
1582 1582 for namespace in listkeys:
1583 1583 part = bundler.newpart('listkeys')
1584 1584 part.addparam('namespace', namespace)
1585 1585 keys = repo.listkeys(namespace).items()
1586 1586 part.data = pushkey.encodekeys(keys)
1587 1587
1588 1588 @getbundle2partsgenerator('obsmarkers')
1589 1589 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1590 1590 b2caps=None, heads=None, **kwargs):
1591 1591 """add an obsolescence markers part to the requested bundle"""
1592 1592 if kwargs.get('obsmarkers', False):
1593 1593 if heads is None:
1594 1594 heads = repo.heads()
1595 1595 subset = [c.node() for c in repo.set('::%ln', heads)]
1596 1596 markers = repo.obsstore.relevantmarkers(subset)
1597 1597 markers = sorted(markers)
1598 1598 buildobsmarkerspart(bundler, markers)
1599 1599
1600 1600 @getbundle2partsgenerator('hgtagsfnodes')
1601 1601 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1602 1602 b2caps=None, heads=None, common=None,
1603 1603 **kwargs):
1604 1604 """Transfer the .hgtags filenodes mapping.
1605 1605
1606 1606 Only values for heads in this bundle will be transferred.
1607 1607
1608 1608 The part data consists of pairs of 20 byte changeset node and .hgtags
1609 1609 filenodes raw values.
1610 1610 """
1611 1611 # Don't send unless:
1612 1612 # - changeset are being exchanged,
1613 1613 # - the client supports it.
1614 1614 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1615 1615 return
1616 1616
1617 1617 outgoing = changegroup.computeoutgoing(repo, heads, common)
1618 1618
1619 1619 if not outgoing.missingheads:
1620 1620 return
1621 1621
1622 1622 cache = tags.hgtagsfnodescache(repo.unfiltered())
1623 1623 chunks = []
1624 1624
1625 1625 # .hgtags fnodes are only relevant for head changesets. While we could
1626 1626 # transfer values for all known nodes, there will likely be little to
1627 1627 # no benefit.
1628 1628 #
1629 1629 # We don't bother using a generator to produce output data because
1630 1630 # a) we only have 40 bytes per head and even esoteric numbers of heads
1631 1631 # consume little memory (1M heads is 40MB) b) we don't want to send the
1632 1632 # part if we don't have entries and knowing if we have entries requires
1633 1633 # cache lookups.
1634 1634 for node in outgoing.missingheads:
1635 1635 # Don't compute missing, as this may slow down serving.
1636 1636 fnode = cache.getfnode(node, computemissing=False)
1637 1637 if fnode is not None:
1638 1638 chunks.extend([node, fnode])
1639 1639
1640 1640 if chunks:
1641 1641 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1642 1642
1643 1643 def check_heads(repo, their_heads, context):
1644 1644 """check if the heads of a repo have been modified
1645 1645
1646 1646 Used by peer for unbundling.
1647 1647 """
1648 1648 heads = repo.heads()
1649 1649 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1650 1650 if not (their_heads == ['force'] or their_heads == heads or
1651 1651 their_heads == ['hashed', heads_hash]):
1652 1652 # someone else committed/pushed/unbundled while we
1653 1653 # were transferring data
1654 1654 raise error.PushRaced('repository changed while %s - '
1655 1655 'please try again' % context)
1656 1656
1657 1657 def unbundle(repo, cg, heads, source, url):
1658 1658 """Apply a bundle to a repo.
1659 1659
1660 1660 this function makes sure the repo is locked during the application and have
1661 1661 mechanism to check that no push race occurred between the creation of the
1662 1662 bundle and its application.
1663 1663
1664 1664 If the push was raced as PushRaced exception is raised."""
1665 1665 r = 0
1666 1666 # need a transaction when processing a bundle2 stream
1667 1667 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1668 1668 lockandtr = [None, None, None]
1669 1669 recordout = None
1670 1670 # quick fix for output mismatch with bundle2 in 3.4
1671 1671 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1672 1672 False)
1673 1673 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1674 1674 captureoutput = True
1675 1675 try:
1676 1676 check_heads(repo, heads, 'uploading changes')
1677 1677 # push can proceed
1678 1678 if util.safehasattr(cg, 'params'):
1679 1679 r = None
1680 1680 try:
1681 1681 def gettransaction():
1682 1682 if not lockandtr[2]:
1683 1683 lockandtr[0] = repo.wlock()
1684 1684 lockandtr[1] = repo.lock()
1685 1685 lockandtr[2] = repo.transaction(source)
1686 1686 lockandtr[2].hookargs['source'] = source
1687 1687 lockandtr[2].hookargs['url'] = url
1688 1688 lockandtr[2].hookargs['bundle2'] = '1'
1689 1689 return lockandtr[2]
1690 1690
1691 1691 # Do greedy locking by default until we're satisfied with lazy
1692 1692 # locking.
1693 1693 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1694 1694 gettransaction()
1695 1695
1696 1696 op = bundle2.bundleoperation(repo, gettransaction,
1697 1697 captureoutput=captureoutput)
1698 1698 try:
1699 1699 op = bundle2.processbundle(repo, cg, op=op)
1700 1700 finally:
1701 1701 r = op.reply
1702 1702 if captureoutput and r is not None:
1703 1703 repo.ui.pushbuffer(error=True, subproc=True)
1704 1704 def recordout(output):
1705 1705 r.newpart('output', data=output, mandatory=False)
1706 1706 if lockandtr[2] is not None:
1707 1707 lockandtr[2].close()
1708 1708 except BaseException as exc:
1709 1709 exc.duringunbundle2 = True
1710 1710 if captureoutput and r is not None:
1711 1711 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1712 1712 def recordout(output):
1713 1713 part = bundle2.bundlepart('output', data=output,
1714 1714 mandatory=False)
1715 1715 parts.append(part)
1716 1716 raise
1717 1717 else:
1718 1718 lockandtr[1] = repo.lock()
1719 1719 r = cg.apply(repo, source, url)
1720 1720 finally:
1721 1721 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1722 1722 if recordout is not None:
1723 1723 recordout(repo.ui.popbuffer())
1724 1724 return r
1725 1725
1726 1726 def _maybeapplyclonebundle(pullop):
1727 1727 """Apply a clone bundle from a remote, if possible."""
1728 1728
1729 1729 repo = pullop.repo
1730 1730 remote = pullop.remote
1731 1731
1732 1732 if not repo.ui.configbool('ui', 'clonebundles', True):
1733 1733 return
1734 1734
1735 1735 # Only run if local repo is empty.
1736 1736 if len(repo):
1737 1737 return
1738 1738
1739 1739 if pullop.heads:
1740 1740 return
1741 1741
1742 1742 if not remote.capable('clonebundles'):
1743 1743 return
1744 1744
1745 1745 res = remote._call('clonebundles')
1746 1746
1747 1747 # If we call the wire protocol command, that's good enough to record the
1748 1748 # attempt.
1749 1749 pullop.clonebundleattempted = True
1750 1750
1751 1751 entries = parseclonebundlesmanifest(repo, res)
1752 1752 if not entries:
1753 1753 repo.ui.note(_('no clone bundles available on remote; '
1754 1754 'falling back to regular clone\n'))
1755 1755 return
1756 1756
1757 1757 entries = filterclonebundleentries(repo, entries)
1758 1758 if not entries:
1759 1759 # There is a thundering herd concern here. However, if a server
1760 1760 # operator doesn't advertise bundles appropriate for its clients,
1761 1761 # they deserve what's coming. Furthermore, from a client's
1762 1762 # perspective, no automatic fallback would mean not being able to
1763 1763 # clone!
1764 1764 repo.ui.warn(_('no compatible clone bundles available on server; '
1765 1765 'falling back to regular clone\n'))
1766 1766 repo.ui.warn(_('(you may want to report this to the server '
1767 1767 'operator)\n'))
1768 1768 return
1769 1769
1770 1770 entries = sortclonebundleentries(repo.ui, entries)
1771 1771
1772 1772 url = entries[0]['URL']
1773 1773 repo.ui.status(_('applying clone bundle from %s\n') % url)
1774 1774 if trypullbundlefromurl(repo.ui, repo, url):
1775 1775 repo.ui.status(_('finished applying clone bundle\n'))
1776 1776 # Bundle failed.
1777 1777 #
1778 1778 # We abort by default to avoid the thundering herd of
1779 1779 # clients flooding a server that was expecting expensive
1780 1780 # clone load to be offloaded.
1781 1781 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1782 1782 repo.ui.warn(_('falling back to normal clone\n'))
1783 1783 else:
1784 1784 raise error.Abort(_('error applying bundle'),
1785 1785 hint=_('if this error persists, consider contacting '
1786 1786 'the server operator or disable clone '
1787 1787 'bundles via '
1788 1788 '"--config ui.clonebundles=false"'))
1789 1789
1790 1790 def parseclonebundlesmanifest(repo, s):
1791 1791 """Parses the raw text of a clone bundles manifest.
1792 1792
1793 1793 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1794 1794 to the URL and other keys are the attributes for the entry.
1795 1795 """
1796 1796 m = []
1797 1797 for line in s.splitlines():
1798 1798 fields = line.split()
1799 1799 if not fields:
1800 1800 continue
1801 1801 attrs = {'URL': fields[0]}
1802 1802 for rawattr in fields[1:]:
1803 1803 key, value = rawattr.split('=', 1)
1804 1804 key = urlreq.unquote(key)
1805 1805 value = urlreq.unquote(value)
1806 1806 attrs[key] = value
1807 1807
1808 1808 # Parse BUNDLESPEC into components. This makes client-side
1809 1809 # preferences easier to specify since you can prefer a single
1810 1810 # component of the BUNDLESPEC.
1811 1811 if key == 'BUNDLESPEC':
1812 1812 try:
1813 1813 comp, version, params = parsebundlespec(repo, value,
1814 1814 externalnames=True)
1815 1815 attrs['COMPRESSION'] = comp
1816 1816 attrs['VERSION'] = version
1817 1817 except error.InvalidBundleSpecification:
1818 1818 pass
1819 1819 except error.UnsupportedBundleSpecification:
1820 1820 pass
1821 1821
1822 1822 m.append(attrs)
1823 1823
1824 1824 return m
1825 1825
1826 1826 def filterclonebundleentries(repo, entries):
1827 1827 """Remove incompatible clone bundle manifest entries.
1828 1828
1829 1829 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1830 1830 and returns a new list consisting of only the entries that this client
1831 1831 should be able to apply.
1832 1832
1833 1833 There is no guarantee we'll be able to apply all returned entries because
1834 1834 the metadata we use to filter on may be missing or wrong.
1835 1835 """
1836 1836 newentries = []
1837 1837 for entry in entries:
1838 1838 spec = entry.get('BUNDLESPEC')
1839 1839 if spec:
1840 1840 try:
1841 1841 parsebundlespec(repo, spec, strict=True)
1842 1842 except error.InvalidBundleSpecification as e:
1843 1843 repo.ui.debug(str(e) + '\n')
1844 1844 continue
1845 1845 except error.UnsupportedBundleSpecification as e:
1846 1846 repo.ui.debug('filtering %s because unsupported bundle '
1847 1847 'spec: %s\n' % (entry['URL'], str(e)))
1848 1848 continue
1849 1849
1850 1850 if 'REQUIRESNI' in entry and not sslutil.hassni:
1851 1851 repo.ui.debug('filtering %s because SNI not supported\n' %
1852 1852 entry['URL'])
1853 1853 continue
1854 1854
1855 1855 newentries.append(entry)
1856 1856
1857 1857 return newentries
1858 1858
1859 1859 def sortclonebundleentries(ui, entries):
1860 1860 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1861 1861 if not prefers:
1862 1862 return list(entries)
1863 1863
1864 1864 prefers = [p.split('=', 1) for p in prefers]
1865 1865
1866 1866 # Our sort function.
1867 1867 def compareentry(a, b):
1868 1868 for prefkey, prefvalue in prefers:
1869 1869 avalue = a.get(prefkey)
1870 1870 bvalue = b.get(prefkey)
1871 1871
1872 1872 # Special case for b missing attribute and a matches exactly.
1873 1873 if avalue is not None and bvalue is None and avalue == prefvalue:
1874 1874 return -1
1875 1875
1876 1876 # Special case for a missing attribute and b matches exactly.
1877 1877 if bvalue is not None and avalue is None and bvalue == prefvalue:
1878 1878 return 1
1879 1879
1880 1880 # We can't compare unless attribute present on both.
1881 1881 if avalue is None or bvalue is None:
1882 1882 continue
1883 1883
1884 1884 # Same values should fall back to next attribute.
1885 1885 if avalue == bvalue:
1886 1886 continue
1887 1887
1888 1888 # Exact matches come first.
1889 1889 if avalue == prefvalue:
1890 1890 return -1
1891 1891 if bvalue == prefvalue:
1892 1892 return 1
1893 1893
1894 1894 # Fall back to next attribute.
1895 1895 continue
1896 1896
1897 1897 # If we got here we couldn't sort by attributes and prefers. Fall
1898 1898 # back to index order.
1899 1899 return 0
1900 1900
1901 1901 return sorted(entries, cmp=compareentry)
1902 1902
1903 1903 def trypullbundlefromurl(ui, repo, url):
1904 1904 """Attempt to apply a bundle from a URL."""
1905 1905 lock = repo.lock()
1906 1906 try:
1907 1907 tr = repo.transaction('bundleurl')
1908 1908 try:
1909 1909 try:
1910 1910 fh = urlmod.open(ui, url)
1911 1911 cg = readbundle(ui, fh, 'stream')
1912 1912
1913 1913 if isinstance(cg, bundle2.unbundle20):
1914 1914 bundle2.processbundle(repo, cg, lambda: tr)
1915 1915 elif isinstance(cg, streamclone.streamcloneapplier):
1916 1916 cg.apply(repo)
1917 1917 else:
1918 1918 cg.apply(repo, 'clonebundles', url)
1919 1919 tr.close()
1920 1920 return True
1921 1921 except urlerr.httperror as e:
1922 1922 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1923 1923 except urlerr.urlerror as e:
1924 1924 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1925 1925
1926 1926 return False
1927 1927 finally:
1928 1928 tr.release()
1929 1929 finally:
1930 1930 lock.release()
@@ -1,92 +1,90
1 1 #require serve
2 2
3 3 Initialize repository
4 4 the status call is to check for issue5130
5 5
6 6 $ hg init server
7 7 $ cd server
8 8 $ touch foo
9 9 $ hg -q commit -A -m initial
10 10 >>> for i in range(1024):
11 11 ... with open(str(i), 'wb') as fh:
12 12 ... fh.write(str(i))
13 13 $ hg -q commit -A -m 'add a lot of files'
14 14 $ hg st
15 15 $ hg serve -p $HGPORT -d --pid-file=hg.pid
16 16 $ cat hg.pid >> $DAEMON_PIDS
17 17 $ cd ..
18 18
19 19 Basic clone
20 20
21 21 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1
22 22 streaming all changes
23 23 1027 files to transfer, 96.3 KB of data
24 24 transferred 96.3 KB in * seconds (*/sec) (glob)
25 25 searching for changes
26 26 no changes found
27 27
28 28 Clone with background file closing enabled
29 29
30 30 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --uncompressed -U http://localhost:$HGPORT clone-background | grep -v adding
31 31 using http://localhost:$HGPORT/
32 32 sending capabilities command
33 33 sending branchmap command
34 34 streaming all changes
35 35 sending stream_out command
36 36 1027 files to transfer, 96.3 KB of data
37 37 starting 4 threads for background file closing
38 38 transferred 96.3 KB in * seconds (*/sec) (glob)
39 39 query 1; heads
40 40 sending batch command
41 41 searching for changes
42 42 all remote heads known locally
43 43 no changes found
44 44 sending getbundle command
45 45 bundle2-input-bundle: with-transaction
46 46 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
47 bundle2-input-part: total payload size 58
47 48 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
48 49 bundle2-input-bundle: 1 parts total
49 50 checking for updated bookmarks
50 preparing listkeys for "phases"
51 sending listkeys command
52 received listkey for "phases": 58 bytes
53 51
54 52
55 53 Stream clone while repo is changing:
56 54
57 55 $ mkdir changing
58 56 $ cd changing
59 57
60 58 extension for delaying the server process so we reliably can modify the repo
61 59 while cloning
62 60
63 61 $ cat > delayer.py <<EOF
64 62 > import time
65 63 > from mercurial import extensions, scmutil
66 64 > def __call__(orig, self, path, *args, **kwargs):
67 65 > if path == 'data/f1.i':
68 66 > time.sleep(2)
69 67 > return orig(self, path, *args, **kwargs)
70 68 > extensions.wrapfunction(scmutil.vfs, '__call__', __call__)
71 69 > EOF
72 70
73 71 prepare repo with small and big file to cover both code paths in emitrevlogdata
74 72
75 73 $ hg init repo
76 74 $ touch repo/f1
77 75 $ $TESTDIR/seq.py 50000 > repo/f2
78 76 $ hg -R repo ci -Aqm "0"
79 77 $ hg -R repo serve -p $HGPORT1 -d --pid-file=hg.pid --config extensions.delayer=delayer.py
80 78 $ cat hg.pid >> $DAEMON_PIDS
81 79
82 80 clone while modifying the repo between stating file with write lock and
83 81 actually serving file content
84 82
85 83 $ hg clone -q --uncompressed -U http://localhost:$HGPORT1 clone &
86 84 $ sleep 1
87 85 $ echo >> repo/f1
88 86 $ echo >> repo/f2
89 87 $ hg -R repo ci -m "1"
90 88 $ wait
91 89 $ hg -R clone id
92 90 000000000000
@@ -1,434 +1,433
1 1 Set up a server
2 2
3 3 $ cat >> $HGRCPATH << EOF
4 4 > [format]
5 5 > usegeneraldelta=yes
6 6 > EOF
7 7 $ hg init server
8 8 $ cd server
9 9 $ cat >> .hg/hgrc << EOF
10 10 > [extensions]
11 11 > clonebundles =
12 12 > EOF
13 13
14 14 $ touch foo
15 15 $ hg -q commit -A -m 'add foo'
16 16 $ touch bar
17 17 $ hg -q commit -A -m 'add bar'
18 18
19 19 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
20 20 $ cat hg.pid >> $DAEMON_PIDS
21 21 $ cd ..
22 22
23 23 Missing manifest should not result in server lookup
24 24
25 25 $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
26 26 requesting all changes
27 27 adding changesets
28 28 adding manifests
29 29 adding file changes
30 30 added 2 changesets with 2 changes to 2 files
31 31
32 32 $ cat server/access.log
33 33 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
34 34 * - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
35 * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=phase%2Cbookmarks (glob)
36 * - - [*] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases (glob)
35 * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=phases%2Cbookmarks (glob)
37 36
38 37 Empty manifest file results in retrieval
39 38 (the extension only checks if the manifest file exists)
40 39
41 40 $ touch server/.hg/clonebundles.manifest
42 41 $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
43 42 no clone bundles available on remote; falling back to regular clone
44 43 requesting all changes
45 44 adding changesets
46 45 adding manifests
47 46 adding file changes
48 47 added 2 changesets with 2 changes to 2 files
49 48
50 49 Manifest file with invalid URL aborts
51 50
52 51 $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
53 52 $ hg clone http://localhost:$HGPORT 404-url
54 53 applying clone bundle from http://does.not.exist/bundle.hg
55 54 error fetching bundle: (.* not known|getaddrinfo failed|No address associated with hostname) (re)
56 55 abort: error applying bundle
57 56 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
58 57 [255]
59 58
60 59 Server is not running aborts
61 60
62 61 $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
63 62 $ hg clone http://localhost:$HGPORT server-not-runner
64 63 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
65 64 error fetching bundle: * refused* (glob)
66 65 abort: error applying bundle
67 66 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
68 67 [255]
69 68
70 69 Server returns 404
71 70
72 71 $ python $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
73 72 $ cat http.pid >> $DAEMON_PIDS
74 73 $ hg clone http://localhost:$HGPORT running-404
75 74 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
76 75 HTTP error fetching bundle: HTTP Error 404: File not found
77 76 abort: error applying bundle
78 77 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
79 78 [255]
80 79
81 80 We can override failure to fall back to regular clone
82 81
83 82 $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
84 83 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
85 84 HTTP error fetching bundle: HTTP Error 404: File not found
86 85 falling back to normal clone
87 86 requesting all changes
88 87 adding changesets
89 88 adding manifests
90 89 adding file changes
91 90 added 2 changesets with 2 changes to 2 files
92 91
93 92 Bundle with partial content works
94 93
95 94 $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
96 95 1 changesets found
97 96
98 97 We verify exact bundle content as an extra check against accidental future
99 98 changes. If this output changes, we could break old clients.
100 99
101 100 $ f --size --hexdump partial.hg
102 101 partial.hg: size=207
103 102 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
104 103 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
105 104 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
106 105 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
107 106 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
108 107 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
109 108 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
110 109 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
111 110 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
112 111 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
113 112 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
114 113 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
115 114 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.|
116 115
117 116 $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
118 117 $ hg clone -U http://localhost:$HGPORT partial-bundle
119 118 applying clone bundle from http://localhost:$HGPORT1/partial.hg
120 119 adding changesets
121 120 adding manifests
122 121 adding file changes
123 122 added 1 changesets with 1 changes to 1 files
124 123 finished applying clone bundle
125 124 searching for changes
126 125 adding changesets
127 126 adding manifests
128 127 adding file changes
129 128 added 1 changesets with 1 changes to 1 files
130 129
131 130 Incremental pull doesn't fetch bundle
132 131
133 132 $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone
134 133 adding changesets
135 134 adding manifests
136 135 adding file changes
137 136 added 1 changesets with 1 changes to 1 files
138 137
139 138 $ cd partial-clone
140 139 $ hg pull
141 140 pulling from http://localhost:$HGPORT/
142 141 searching for changes
143 142 adding changesets
144 143 adding manifests
145 144 adding file changes
146 145 added 1 changesets with 1 changes to 1 files
147 146 (run 'hg update' to get a working copy)
148 147 $ cd ..
149 148
150 149 Bundle with full content works
151 150
152 151 $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
153 152 2 changesets found
154 153
155 154 Again, we perform an extra check against bundle content changes. If this content
156 155 changes, clone bundles produced by new Mercurial versions may not be readable
157 156 by old clients.
158 157
159 158 $ f --size --hexdump full.hg
160 159 full.hg: size=406
161 160 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
162 161 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 90 e5 76 f6 70 |ion=GZx.c``..v.p|
163 162 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 06 76 a6 b2 |.swu....`..F.v..|
164 163 0030: d4 a2 e2 cc fc 3c 03 23 06 06 e6 65 40 b1 4d c1 |.....<.#...e@.M.|
165 164 0040: 2a 31 09 cf 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 |*1...:R.........|
166 165 0050: 97 17 b2 c9 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 |...........%....|
167 166 0060: a4 a4 1a 5b 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 |...[X..'..Y..Y..|
168 167 0070: a4 59 26 5a 18 9a 18 59 5a 26 1a 27 27 25 99 a6 |.Y&Z...YZ&.''%..|
169 168 0080: 99 1a 70 95 a4 16 97 70 19 28 18 70 a5 e5 e7 73 |..p....p.(.p...s|
170 169 0090: 71 25 a6 a4 28 00 19 40 13 0e ac fa df ab ff 7b |q%..(..@.......{|
171 170 00a0: 3f fb 92 dc 8b 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 |?.....b......=ZD|
172 171 00b0: ac 2f b0 a9 c3 66 1e 54 b9 26 08 a7 1a 1b 1a a7 |./...f.T.&......|
173 172 00c0: 25 1b 9a 1b 99 19 9a 5a 18 9b a6 18 19 00 dd 67 |%......Z.......g|
174 173 00d0: 61 61 98 06 f4 80 49 4a 8a 65 52 92 41 9a 81 81 |aa....IJ.eR.A...|
175 174 00e0: a5 11 17 50 31 30 58 19 cc 80 98 25 29 b1 08 c4 |...P10X....%)...|
176 175 00f0: 37 07 79 19 88 d9 41 ee 07 8a 41 cd 5d 98 65 fb |7.y...A...A.].e.|
177 176 0100: e5 9e 45 bf 8d 7f 9f c6 97 9f 2b 44 34 67 d9 ec |..E.......+D4g..|
178 177 0110: 8e 0f a0 61 a8 eb 82 82 2e c9 c2 20 25 d5 34 c5 |...a....... %.4.|
179 178 0120: d0 d8 c2 dc d4 c2 d4 c4 30 d9 34 cd c0 d4 c8 cc |........0.4.....|
180 179 0130: 34 31 c5 d0 c4 24 31 c9 32 2d d1 c2 2c c5 30 25 |41...$1.2-..,.0%|
181 180 0140: 09 e4 ee 85 8f 85 ff 88 ab 89 36 c7 2a c4 47 34 |..........6.*.G4|
182 181 0150: fe f8 ec 7b 73 37 3f c3 24 62 1d 8d 4d 1d 9e 40 |...{s7?.$b..M..@|
183 182 0160: 06 3b 10 14 36 a4 38 10 04 d8 21 01 9a b1 83 f7 |.;..6.8...!.....|
184 183 0170: e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a 78 ed fc d5 |.E..V....R..x...|
185 184 0180: 76 f1 36 25 81 89 c7 ad ec 90 34 48 75 2b 89 49 |v.6%......4Hu+.I|
186 185 0190: bf 00 d6 97 f0 8d |......|
187 186
188 187 $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
189 188 $ hg clone -U http://localhost:$HGPORT full-bundle
190 189 applying clone bundle from http://localhost:$HGPORT1/full.hg
191 190 adding changesets
192 191 adding manifests
193 192 adding file changes
194 193 added 2 changesets with 2 changes to 2 files
195 194 finished applying clone bundle
196 195 searching for changes
197 196 no changes found
198 197
199 198 Feature works over SSH
200 199
201 200 $ hg clone -U -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone
202 201 applying clone bundle from http://localhost:$HGPORT1/full.hg
203 202 adding changesets
204 203 adding manifests
205 204 adding file changes
206 205 added 2 changesets with 2 changes to 2 files
207 206 finished applying clone bundle
208 207 searching for changes
209 208 no changes found
210 209
211 210 Entry with unknown BUNDLESPEC is filtered and not used
212 211
213 212 $ cat > server/.hg/clonebundles.manifest << EOF
214 213 > http://bad.entry1 BUNDLESPEC=UNKNOWN
215 214 > http://bad.entry2 BUNDLESPEC=xz-v1
216 215 > http://bad.entry3 BUNDLESPEC=none-v100
217 216 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
218 217 > EOF
219 218
220 219 $ hg clone -U http://localhost:$HGPORT filter-unknown-type
221 220 applying clone bundle from http://localhost:$HGPORT1/full.hg
222 221 adding changesets
223 222 adding manifests
224 223 adding file changes
225 224 added 2 changesets with 2 changes to 2 files
226 225 finished applying clone bundle
227 226 searching for changes
228 227 no changes found
229 228
230 229 Automatic fallback when all entries are filtered
231 230
232 231 $ cat > server/.hg/clonebundles.manifest << EOF
233 232 > http://bad.entry BUNDLESPEC=UNKNOWN
234 233 > EOF
235 234
236 235 $ hg clone -U http://localhost:$HGPORT filter-all
237 236 no compatible clone bundles available on server; falling back to regular clone
238 237 (you may want to report this to the server operator)
239 238 requesting all changes
240 239 adding changesets
241 240 adding manifests
242 241 adding file changes
243 242 added 2 changesets with 2 changes to 2 files
244 243
245 244 URLs requiring SNI are filtered in Python <2.7.9
246 245
247 246 $ cp full.hg sni.hg
248 247 $ cat > server/.hg/clonebundles.manifest << EOF
249 248 > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
250 249 > http://localhost:$HGPORT1/full.hg
251 250 > EOF
252 251
253 252 #if sslcontext
254 253 Python 2.7.9+ support SNI
255 254
256 255 $ hg clone -U http://localhost:$HGPORT sni-supported
257 256 applying clone bundle from http://localhost:$HGPORT1/sni.hg
258 257 adding changesets
259 258 adding manifests
260 259 adding file changes
261 260 added 2 changesets with 2 changes to 2 files
262 261 finished applying clone bundle
263 262 searching for changes
264 263 no changes found
265 264 #else
266 265 Python <2.7.9 will filter SNI URLs
267 266
268 267 $ hg clone -U http://localhost:$HGPORT sni-unsupported
269 268 applying clone bundle from http://localhost:$HGPORT1/full.hg
270 269 adding changesets
271 270 adding manifests
272 271 adding file changes
273 272 added 2 changesets with 2 changes to 2 files
274 273 finished applying clone bundle
275 274 searching for changes
276 275 no changes found
277 276 #endif
278 277
279 278 Stream clone bundles are supported
280 279
281 280 $ hg -R server debugcreatestreamclonebundle packed.hg
282 281 writing 613 bytes for 4 files
283 282 bundle requirements: generaldelta, revlogv1
284 283
285 284 No bundle spec should work
286 285
287 286 $ cat > server/.hg/clonebundles.manifest << EOF
288 287 > http://localhost:$HGPORT1/packed.hg
289 288 > EOF
290 289
291 290 $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
292 291 applying clone bundle from http://localhost:$HGPORT1/packed.hg
293 292 4 files to transfer, 613 bytes of data
294 293 transferred 613 bytes in *.* seconds (*) (glob)
295 294 finished applying clone bundle
296 295 searching for changes
297 296 no changes found
298 297
299 298 Bundle spec without parameters should work
300 299
301 300 $ cat > server/.hg/clonebundles.manifest << EOF
302 301 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
303 302 > EOF
304 303
305 304 $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
306 305 applying clone bundle from http://localhost:$HGPORT1/packed.hg
307 306 4 files to transfer, 613 bytes of data
308 307 transferred 613 bytes in *.* seconds (*) (glob)
309 308 finished applying clone bundle
310 309 searching for changes
311 310 no changes found
312 311
313 312 Bundle spec with format requirements should work
314 313
315 314 $ cat > server/.hg/clonebundles.manifest << EOF
316 315 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
317 316 > EOF
318 317
319 318 $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
320 319 applying clone bundle from http://localhost:$HGPORT1/packed.hg
321 320 4 files to transfer, 613 bytes of data
322 321 transferred 613 bytes in *.* seconds (*) (glob)
323 322 finished applying clone bundle
324 323 searching for changes
325 324 no changes found
326 325
327 326 Stream bundle spec with unknown requirements should be filtered out
328 327
329 328 $ cat > server/.hg/clonebundles.manifest << EOF
330 329 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
331 330 > EOF
332 331
333 332 $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
334 333 no compatible clone bundles available on server; falling back to regular clone
335 334 (you may want to report this to the server operator)
336 335 requesting all changes
337 336 adding changesets
338 337 adding manifests
339 338 adding file changes
340 339 added 2 changesets with 2 changes to 2 files
341 340
342 341 Set up manifest for testing preferences
343 342 (Remember, the TYPE does not have to match reality - the URL is
344 343 important)
345 344
346 345 $ cp full.hg gz-a.hg
347 346 $ cp full.hg gz-b.hg
348 347 $ cp full.hg bz2-a.hg
349 348 $ cp full.hg bz2-b.hg
350 349 $ cat > server/.hg/clonebundles.manifest << EOF
351 350 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
352 351 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
353 352 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
354 353 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
355 354 > EOF
356 355
357 356 Preferring an undefined attribute will take first entry
358 357
359 358 $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
360 359 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
361 360 adding changesets
362 361 adding manifests
363 362 adding file changes
364 363 added 2 changesets with 2 changes to 2 files
365 364 finished applying clone bundle
366 365 searching for changes
367 366 no changes found
368 367
369 368 Preferring bz2 type will download first entry of that type
370 369
371 370 $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
372 371 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
373 372 adding changesets
374 373 adding manifests
375 374 adding file changes
376 375 added 2 changesets with 2 changes to 2 files
377 376 finished applying clone bundle
378 377 searching for changes
379 378 no changes found
380 379
381 380 Preferring multiple values of an option works
382 381
383 382 $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
384 383 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
385 384 adding changesets
386 385 adding manifests
387 386 adding file changes
388 387 added 2 changesets with 2 changes to 2 files
389 388 finished applying clone bundle
390 389 searching for changes
391 390 no changes found
392 391
393 392 Sorting multiple values should get us back to original first entry
394 393
395 394 $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
396 395 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
397 396 adding changesets
398 397 adding manifests
399 398 adding file changes
400 399 added 2 changesets with 2 changes to 2 files
401 400 finished applying clone bundle
402 401 searching for changes
403 402 no changes found
404 403
405 404 Preferring multiple attributes has correct order
406 405
407 406 $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
408 407 applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
409 408 adding changesets
410 409 adding manifests
411 410 adding file changes
412 411 added 2 changesets with 2 changes to 2 files
413 412 finished applying clone bundle
414 413 searching for changes
415 414 no changes found
416 415
417 416 Test where attribute is missing from some entries
418 417
419 418 $ cat > server/.hg/clonebundles.manifest << EOF
420 419 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
421 420 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
422 421 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
423 422 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
424 423 > EOF
425 424
426 425 $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
427 426 applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
428 427 adding changesets
429 428 adding manifests
430 429 adding file changes
431 430 added 2 changesets with 2 changes to 2 files
432 431 finished applying clone bundle
433 432 searching for changes
434 433 no changes found
@@ -1,876 +1,875
1 1 commit hooks can see env vars
2 2 (and post-transaction one are run unlocked)
3 3
4 4 $ cat << EOF >> $HGRCPATH
5 5 > [experimental]
6 6 > # drop me once bundle2 is the default,
7 7 > # added to get test change early.
8 8 > bundle2-exp = True
9 9 > EOF
10 10
11 11 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
12 12 > def showargs(ui, repo, hooktype, **kwargs):
13 13 > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
14 14 > EOF
15 15
16 16 $ hg init a
17 17 $ cd a
18 18 $ cat > .hg/hgrc <<EOF
19 19 > [hooks]
20 20 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit"
21 21 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b"
22 22 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit"
23 23 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit"
24 24 > pretxncommit.tip = hg -q tip
25 25 > pre-identify = printenv.py pre-identify 1
26 26 > pre-cat = printenv.py pre-cat
27 27 > post-cat = printenv.py post-cat
28 28 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen"
29 29 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose"
30 30 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose"
31 31 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
32 32 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort"
33 33 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
34 34 > EOF
35 35 $ echo a > a
36 36 $ hg add a
37 37 $ hg commit -m a
38 38 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
39 39 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
40 40 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
41 41 0:cb9a9f314b8b
42 42 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
43 43 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
44 44 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
45 45 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
46 46
47 47 $ hg clone . ../b
48 48 updating to branch default
49 49 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 50 $ cd ../b
51 51
52 52 changegroup hooks can see env vars
53 53
54 54 $ cat > .hg/hgrc <<EOF
55 55 > [hooks]
56 56 > prechangegroup = printenv.py prechangegroup
57 57 > changegroup = printenv.py changegroup
58 58 > incoming = printenv.py incoming
59 59 > EOF
60 60
61 61 pretxncommit and commit hooks can see both parents of merge
62 62
63 63 $ cd ../a
64 64 $ echo b >> a
65 65 $ hg commit -m a1 -d "1 0"
66 66 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
67 67 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
68 68 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
69 69 1:ab228980c14d
70 70 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
71 71 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
72 72 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
73 73 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
74 74 $ hg update -C 0
75 75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 76 $ echo b > b
77 77 $ hg add b
78 78 $ hg commit -m b -d '1 0'
79 79 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
80 80 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
81 81 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
82 82 2:ee9deb46ab31
83 83 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
84 84 created new head
85 85 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
86 86 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
87 87 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
88 88 $ hg merge 1
89 89 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 90 (branch merge, don't forget to commit)
91 91 $ hg commit -m merge -d '2 0'
92 92 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
93 93 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
94 94 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
95 95 3:07f3376c1e65
96 96 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
97 97 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
98 98 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
99 99 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
100 100
101 101 test generic hooks
102 102
103 103 $ hg id
104 104 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
105 105 abort: pre-identify hook exited with status 1
106 106 [255]
107 107 $ hg cat b
108 108 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
109 109 b
110 110 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
111 111
112 112 $ cd ../b
113 113 $ hg pull ../a
114 114 pulling from ../a
115 115 searching for changes
116 116 prechangegroup hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
117 117 adding changesets
118 118 adding manifests
119 119 adding file changes
120 120 added 3 changesets with 2 changes to 2 files
121 121 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
122 122 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
123 123 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
124 124 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
125 125 (run 'hg update' to get a working copy)
126 126
127 127 tag hooks can see env vars
128 128
129 129 $ cd ../a
130 130 $ cat >> .hg/hgrc <<EOF
131 131 > pretag = printenv.py pretag
132 132 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag"
133 133 > EOF
134 134 $ hg tag -d '3 0' a
135 135 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
136 136 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
137 137 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
138 138 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
139 139 4:539e4b31b6dc
140 140 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
141 141 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
142 142 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
143 143 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
144 144 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
145 145 $ hg tag -l la
146 146 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
147 147 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
148 148
149 149 pretag hook can forbid tagging
150 150
151 151 $ echo "pretag.forbid = printenv.py pretag.forbid 1" >> .hg/hgrc
152 152 $ hg tag -d '4 0' fa
153 153 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
154 154 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
155 155 abort: pretag.forbid hook exited with status 1
156 156 [255]
157 157 $ hg tag -l fla
158 158 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
159 159 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
160 160 abort: pretag.forbid hook exited with status 1
161 161 [255]
162 162
163 163 pretxncommit hook can see changeset, can roll back txn, changeset no
164 164 more there after
165 165
166 166 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
167 167 $ echo "pretxncommit.forbid1 = printenv.py pretxncommit.forbid 1" >> .hg/hgrc
168 168 $ echo z > z
169 169 $ hg add z
170 170 $ hg -q tip
171 171 4:539e4b31b6dc
172 172 $ hg commit -m 'fail' -d '4 0'
173 173 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
174 174 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
175 175 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
176 176 5:6f611f8018c1
177 177 5:6f611f8018c1
178 178 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
179 179 transaction abort!
180 180 txnabort python hook: txnid,txnname
181 181 txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
182 182 rollback completed
183 183 abort: pretxncommit.forbid1 hook exited with status 1
184 184 [255]
185 185 $ hg -q tip
186 186 4:539e4b31b6dc
187 187
188 188 (Check that no 'changelog.i.a' file were left behind)
189 189
190 190 $ ls -1 .hg/store/
191 191 00changelog.i
192 192 00manifest.i
193 193 data
194 194 fncache
195 195 journal.phaseroots
196 196 phaseroots
197 197 undo
198 198 undo.backup.fncache
199 199 undo.backupfiles
200 200 undo.phaseroots
201 201
202 202
203 203 precommit hook can prevent commit
204 204
205 205 $ echo "precommit.forbid = printenv.py precommit.forbid 1" >> .hg/hgrc
206 206 $ hg commit -m 'fail' -d '4 0'
207 207 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
208 208 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
209 209 abort: precommit.forbid hook exited with status 1
210 210 [255]
211 211 $ hg -q tip
212 212 4:539e4b31b6dc
213 213
214 214 preupdate hook can prevent update
215 215
216 216 $ echo "preupdate = printenv.py preupdate" >> .hg/hgrc
217 217 $ hg update 1
218 218 preupdate hook: HG_PARENT1=ab228980c14d
219 219 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
220 220
221 221 update hook
222 222
223 223 $ echo "update = printenv.py update" >> .hg/hgrc
224 224 $ hg update
225 225 preupdate hook: HG_PARENT1=539e4b31b6dc
226 226 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
227 227 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
228 228
229 229 pushkey hook
230 230
231 231 $ echo "pushkey = printenv.py pushkey" >> .hg/hgrc
232 232 $ cd ../b
233 233 $ hg bookmark -r null foo
234 234 $ hg push -B foo ../a
235 235 pushing to ../a
236 236 searching for changes
237 237 no changes found
238 238 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
239 239 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
240 240 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
241 241 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
242 242 exporting bookmark foo
243 243 [1]
244 244 $ cd ../a
245 245
246 246 listkeys hook
247 247
248 248 $ echo "listkeys = printenv.py listkeys" >> .hg/hgrc
249 249 $ hg bookmark -r null bar
250 250 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
251 251 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
252 252 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
253 253 $ cd ../b
254 254 $ hg pull -B bar ../a
255 255 pulling from ../a
256 256 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
257 257 no changes found
258 listkeys hook: HG_NAMESPACE=phase HG_VALUES={}
258 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
259 259 adding remote bookmark bar
260 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
261 260 $ cd ../a
262 261
263 262 test that prepushkey can prevent incoming keys
264 263
265 264 $ echo "prepushkey = printenv.py prepushkey.forbid 1" >> .hg/hgrc
266 265 $ cd ../b
267 266 $ hg bookmark -r null baz
268 267 $ hg push -B baz ../a
269 268 pushing to ../a
270 269 searching for changes
271 270 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
272 271 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
273 272 no changes found
274 273 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
275 274 prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_SOURCE=push HG_TXNID=TXN:* HG_URL=push (glob)
276 275 pushkey-abort: prepushkey hook exited with status 1
277 276 abort: exporting bookmark baz failed!
278 277 [255]
279 278 $ cd ../a
280 279
281 280 test that prelistkeys can prevent listing keys
282 281
283 282 $ echo "prelistkeys = printenv.py prelistkeys.forbid 1" >> .hg/hgrc
284 283 $ hg bookmark -r null quux
285 284 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
286 285 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
287 286 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
288 287 $ cd ../b
289 288 $ hg pull -B quux ../a
290 289 pulling from ../a
291 290 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
292 291 abort: prelistkeys hook exited with status 1
293 292 [255]
294 293 $ cd ../a
295 294 $ rm .hg/hgrc
296 295
297 296 prechangegroup hook can prevent incoming changes
298 297
299 298 $ cd ../b
300 299 $ hg -q tip
301 300 3:07f3376c1e65
302 301 $ cat > .hg/hgrc <<EOF
303 302 > [hooks]
304 303 > prechangegroup.forbid = printenv.py prechangegroup.forbid 1
305 304 > EOF
306 305 $ hg pull ../a
307 306 pulling from ../a
308 307 searching for changes
309 308 prechangegroup.forbid hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
310 309 abort: prechangegroup.forbid hook exited with status 1
311 310 [255]
312 311
313 312 pretxnchangegroup hook can see incoming changes, can roll back txn,
314 313 incoming changes no longer there after
315 314
316 315 $ cat > .hg/hgrc <<EOF
317 316 > [hooks]
318 317 > pretxnchangegroup.forbid0 = hg tip -q
319 318 > pretxnchangegroup.forbid1 = printenv.py pretxnchangegroup.forbid 1
320 319 > EOF
321 320 $ hg pull ../a
322 321 pulling from ../a
323 322 searching for changes
324 323 adding changesets
325 324 adding manifests
326 325 adding file changes
327 326 added 1 changesets with 1 changes to 1 files
328 327 4:539e4b31b6dc
329 328 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
330 329 transaction abort!
331 330 rollback completed
332 331 abort: pretxnchangegroup.forbid1 hook exited with status 1
333 332 [255]
334 333 $ hg -q tip
335 334 3:07f3376c1e65
336 335
337 336 outgoing hooks can see env vars
338 337
339 338 $ rm .hg/hgrc
340 339 $ cat > ../a/.hg/hgrc <<EOF
341 340 > [hooks]
342 341 > preoutgoing = printenv.py preoutgoing
343 342 > outgoing = printenv.py outgoing
344 343 > EOF
345 344 $ hg pull ../a
346 345 pulling from ../a
347 346 searching for changes
348 347 preoutgoing hook: HG_SOURCE=pull
349 348 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
350 349 adding changesets
351 350 adding manifests
352 351 adding file changes
353 352 added 1 changesets with 1 changes to 1 files
354 353 adding remote bookmark quux
355 354 (run 'hg update' to get a working copy)
356 355 $ hg rollback
357 356 repository tip rolled back to revision 3 (undo pull)
358 357
359 358 preoutgoing hook can prevent outgoing changes
360 359
361 360 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> ../a/.hg/hgrc
362 361 $ hg pull ../a
363 362 pulling from ../a
364 363 searching for changes
365 364 preoutgoing hook: HG_SOURCE=pull
366 365 preoutgoing.forbid hook: HG_SOURCE=pull
367 366 abort: preoutgoing.forbid hook exited with status 1
368 367 [255]
369 368
370 369 outgoing hooks work for local clones
371 370
372 371 $ cd ..
373 372 $ cat > a/.hg/hgrc <<EOF
374 373 > [hooks]
375 374 > preoutgoing = printenv.py preoutgoing
376 375 > outgoing = printenv.py outgoing
377 376 > EOF
378 377 $ hg clone a c
379 378 preoutgoing hook: HG_SOURCE=clone
380 379 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
381 380 updating to branch default
382 381 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
383 382 $ rm -rf c
384 383
385 384 preoutgoing hook can prevent outgoing changes for local clones
386 385
387 386 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> a/.hg/hgrc
388 387 $ hg clone a zzz
389 388 preoutgoing hook: HG_SOURCE=clone
390 389 preoutgoing.forbid hook: HG_SOURCE=clone
391 390 abort: preoutgoing.forbid hook exited with status 1
392 391 [255]
393 392
394 393 $ cd "$TESTTMP/b"
395 394
396 395 $ cat > hooktests.py <<EOF
397 396 > from mercurial import error
398 397 >
399 398 > uncallable = 0
400 399 >
401 400 > def printargs(args):
402 401 > args.pop('ui', None)
403 402 > args.pop('repo', None)
404 403 > a = list(args.items())
405 404 > a.sort()
406 405 > print 'hook args:'
407 406 > for k, v in a:
408 407 > print ' ', k, v
409 408 >
410 409 > def passhook(**args):
411 410 > printargs(args)
412 411 >
413 412 > def failhook(**args):
414 413 > printargs(args)
415 414 > return True
416 415 >
417 416 > class LocalException(Exception):
418 417 > pass
419 418 >
420 419 > def raisehook(**args):
421 420 > raise LocalException('exception from hook')
422 421 >
423 422 > def aborthook(**args):
424 423 > raise error.Abort('raise abort from hook')
425 424 >
426 425 > def brokenhook(**args):
427 426 > return 1 + {}
428 427 >
429 428 > def verbosehook(ui, **args):
430 429 > ui.note('verbose output from hook\n')
431 430 >
432 431 > def printtags(ui, repo, **args):
433 432 > print sorted(repo.tags())
434 433 >
435 434 > class container:
436 435 > unreachable = 1
437 436 > EOF
438 437
439 438 $ cat > syntaxerror.py << EOF
440 439 > (foo
441 440 > EOF
442 441
443 442 test python hooks
444 443
445 444 #if windows
446 445 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
447 446 #else
448 447 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
449 448 #endif
450 449 $ export PYTHONPATH
451 450
452 451 $ echo '[hooks]' > ../a/.hg/hgrc
453 452 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
454 453 $ hg pull ../a 2>&1 | grep 'raised an exception'
455 454 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
456 455
457 456 $ echo '[hooks]' > ../a/.hg/hgrc
458 457 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
459 458 $ hg pull ../a 2>&1 | grep 'raised an exception'
460 459 error: preoutgoing.raise hook raised an exception: exception from hook
461 460
462 461 $ echo '[hooks]' > ../a/.hg/hgrc
463 462 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
464 463 $ hg pull ../a
465 464 pulling from ../a
466 465 searching for changes
467 466 error: preoutgoing.abort hook failed: raise abort from hook
468 467 abort: raise abort from hook
469 468 [255]
470 469
471 470 $ echo '[hooks]' > ../a/.hg/hgrc
472 471 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
473 472 $ hg pull ../a
474 473 pulling from ../a
475 474 searching for changes
476 475 hook args:
477 476 hooktype preoutgoing
478 477 source pull
479 478 abort: preoutgoing.fail hook failed
480 479 [255]
481 480
482 481 $ echo '[hooks]' > ../a/.hg/hgrc
483 482 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
484 483 $ hg pull ../a
485 484 pulling from ../a
486 485 searching for changes
487 486 abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable
488 487 [255]
489 488
490 489 $ echo '[hooks]' > ../a/.hg/hgrc
491 490 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
492 491 $ hg pull ../a
493 492 pulling from ../a
494 493 searching for changes
495 494 abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined
496 495 [255]
497 496
498 497 $ echo '[hooks]' > ../a/.hg/hgrc
499 498 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
500 499 $ hg pull ../a
501 500 pulling from ../a
502 501 searching for changes
503 502 abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module
504 503 [255]
505 504
506 505 $ echo '[hooks]' > ../a/.hg/hgrc
507 506 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
508 507 $ hg pull ../a
509 508 pulling from ../a
510 509 searching for changes
511 510 abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed
512 511 (run with --traceback for stack trace)
513 512 [255]
514 513
515 514 $ echo '[hooks]' > ../a/.hg/hgrc
516 515 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
517 516 $ hg pull ../a
518 517 pulling from ../a
519 518 searching for changes
520 519 abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed
521 520 (run with --traceback for stack trace)
522 521 [255]
523 522
524 523 $ echo '[hooks]' > ../a/.hg/hgrc
525 524 $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc
526 525 $ hg pull ../a
527 526 pulling from ../a
528 527 searching for changes
529 528 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
530 529 (run with --traceback for stack trace)
531 530 [255]
532 531
533 532 The second egrep is to filter out lines like ' ^', which are slightly
534 533 different between Python 2.6 and Python 2.7.
535 534 $ hg pull ../a --traceback 2>&1 | egrep -v '^( +File| [_a-zA-Z*(])' | egrep -v '^( )+(\^)?$'
536 535 pulling from ../a
537 536 searching for changes
538 537 exception from first failed import attempt:
539 538 Traceback (most recent call last):
540 539 SyntaxError: * (glob)
541 540 exception from second failed import attempt:
542 541 Traceback (most recent call last):
543 542 ImportError: No module named hgext_syntaxerror
544 543 Traceback (most recent call last):
545 544 HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
546 545 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
547 546
548 547 $ echo '[hooks]' > ../a/.hg/hgrc
549 548 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
550 549 $ hg pull ../a
551 550 pulling from ../a
552 551 searching for changes
553 552 hook args:
554 553 hooktype preoutgoing
555 554 source pull
556 555 adding changesets
557 556 adding manifests
558 557 adding file changes
559 558 added 1 changesets with 1 changes to 1 files
560 559 adding remote bookmark quux
561 560 (run 'hg update' to get a working copy)
562 561
563 562 post- python hooks that fail to *run* don't cause an abort
564 563 $ rm ../a/.hg/hgrc
565 564 $ echo '[hooks]' > .hg/hgrc
566 565 $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc
567 566 $ hg pull ../a
568 567 pulling from ../a
569 568 searching for changes
570 569 no changes found
571 570 error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
572 571 (run with --traceback for stack trace)
573 572
574 573 but post- python hooks that fail to *load* do
575 574 $ echo '[hooks]' > .hg/hgrc
576 575 $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc
577 576 $ hg pull ../a
578 577 pulling from ../a
579 578 searching for changes
580 579 no changes found
581 580 abort: post-pull.nomodule hook is invalid: "nomodule" not in a module
582 581 [255]
583 582
584 583 $ echo '[hooks]' > .hg/hgrc
585 584 $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc
586 585 $ hg pull ../a
587 586 pulling from ../a
588 587 searching for changes
589 588 no changes found
590 589 abort: post-pull.badmodule hook is invalid: import of "nomodule" failed
591 590 (run with --traceback for stack trace)
592 591 [255]
593 592
594 593 $ echo '[hooks]' > .hg/hgrc
595 594 $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc
596 595 $ hg pull ../a
597 596 pulling from ../a
598 597 searching for changes
599 598 no changes found
600 599 abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined
601 600 [255]
602 601
603 602 make sure --traceback works
604 603
605 604 $ echo '[hooks]' > .hg/hgrc
606 605 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
607 606
608 607 $ echo aa > a
609 608 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
610 609 Traceback (most recent call last):
611 610
612 611 $ cd ..
613 612 $ hg init c
614 613 $ cd c
615 614
616 615 $ cat > hookext.py <<EOF
617 616 > def autohook(**args):
618 617 > print "Automatically installed hook"
619 618 >
620 619 > def reposetup(ui, repo):
621 620 > repo.ui.setconfig("hooks", "commit.auto", autohook)
622 621 > EOF
623 622 $ echo '[extensions]' >> .hg/hgrc
624 623 $ echo 'hookext = hookext.py' >> .hg/hgrc
625 624
626 625 $ touch foo
627 626 $ hg add foo
628 627 $ hg ci -d '0 0' -m 'add foo'
629 628 Automatically installed hook
630 629 $ echo >> foo
631 630 $ hg ci --debug -d '0 0' -m 'change foo'
632 631 committing files:
633 632 foo
634 633 committing manifest
635 634 committing changelog
636 635 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
637 636 calling hook commit.auto: hgext_hookext.autohook
638 637 Automatically installed hook
639 638
640 639 $ hg showconfig hooks
641 640 hooks.commit.auto=<function autohook at *> (glob)
642 641
643 642 test python hook configured with python:[file]:[hook] syntax
644 643
645 644 $ cd ..
646 645 $ mkdir d
647 646 $ cd d
648 647 $ hg init repo
649 648 $ mkdir hooks
650 649
651 650 $ cd hooks
652 651 $ cat > testhooks.py <<EOF
653 652 > def testhook(**args):
654 653 > print 'hook works'
655 654 > EOF
656 655 $ echo '[hooks]' > ../repo/.hg/hgrc
657 656 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
658 657
659 658 $ cd ../repo
660 659 $ hg commit -d '0 0'
661 660 hook works
662 661 nothing changed
663 662 [1]
664 663
665 664 $ echo '[hooks]' > .hg/hgrc
666 665 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
667 666 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
668 667
669 668 $ hg up null
670 669 loading update.ne hook failed:
671 670 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
672 671 [255]
673 672
674 673 $ hg id
675 674 loading pre-identify.npmd hook failed:
676 675 abort: No module named repo!
677 676 [255]
678 677
679 678 $ cd ../../b
680 679
681 680 make sure --traceback works on hook import failure
682 681
683 682 $ cat > importfail.py <<EOF
684 683 > import somebogusmodule
685 684 > # dereference something in the module to force demandimport to load it
686 685 > somebogusmodule.whatever
687 686 > EOF
688 687
689 688 $ echo '[hooks]' > .hg/hgrc
690 689 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
691 690
692 691 $ echo a >> a
693 692 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
694 693 exception from first failed import attempt:
695 694 Traceback (most recent call last):
696 695 ImportError: No module named somebogusmodule
697 696 exception from second failed import attempt:
698 697 Traceback (most recent call last):
699 698 ImportError: No module named hgext_importfail
700 699 Traceback (most recent call last):
701 700 HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
702 701 abort: precommit.importfail hook is invalid: import of "importfail" failed
703 702
704 703 Issue1827: Hooks Update & Commit not completely post operation
705 704
706 705 commit and update hooks should run after command completion. The largefiles
707 706 use demonstrates a recursive wlock, showing the hook doesn't run until the
708 707 final release (and dirstate flush).
709 708
710 709 $ echo '[hooks]' > .hg/hgrc
711 710 $ echo 'commit = hg id' >> .hg/hgrc
712 711 $ echo 'update = hg id' >> .hg/hgrc
713 712 $ echo bb > a
714 713 $ hg ci -ma
715 714 223eafe2750c tip
716 715 $ hg up 0 --config extensions.largefiles=
717 716 cb9a9f314b8b
718 717 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
719 718
720 719 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
721 720 that is passed to pre/post hooks
722 721
723 722 $ echo '[hooks]' > .hg/hgrc
724 723 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
725 724 $ hg id
726 725 cb9a9f314b8b
727 726 $ hg id --verbose
728 727 calling hook pre-identify: hooktests.verbosehook
729 728 verbose output from hook
730 729 cb9a9f314b8b
731 730
732 731 Ensure hooks can be prioritized
733 732
734 733 $ echo '[hooks]' > .hg/hgrc
735 734 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
736 735 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
737 736 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
738 737 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
739 738 $ hg id --verbose
740 739 calling hook pre-identify.b: hooktests.verbosehook
741 740 verbose output from hook
742 741 calling hook pre-identify.a: hooktests.verbosehook
743 742 verbose output from hook
744 743 calling hook pre-identify.c: hooktests.verbosehook
745 744 verbose output from hook
746 745 cb9a9f314b8b
747 746
748 747 new tags must be visible in pretxncommit (issue3210)
749 748
750 749 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
751 750 $ hg tag -f foo
752 751 ['a', 'foo', 'tip']
753 752
754 753 post-init hooks must not crash (issue4983)
755 754 This also creates the `to` repo for the next test block.
756 755
757 756 $ cd ..
758 757 $ cat << EOF >> hgrc-with-post-init-hook
759 758 > [hooks]
760 759 > post-init = printenv.py post-init
761 760 > EOF
762 761 $ HGRCPATH=hgrc-with-post-init-hook hg init to
763 762 post-init hook: HG_ARGS=init to HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0
764 763
765 764 new commits must be visible in pretxnchangegroup (issue3428)
766 765
767 766 $ echo '[hooks]' >> to/.hg/hgrc
768 767 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
769 768 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
770 769 $ echo a >> to/a
771 770 $ hg --cwd to ci -Ama
772 771 adding a
773 772 $ hg clone to from
774 773 updating to branch default
775 774 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
776 775 $ echo aa >> from/a
777 776 $ hg --cwd from ci -mb
778 777 $ hg --cwd from push
779 778 pushing to $TESTTMP/to (glob)
780 779 searching for changes
781 780 changeset: 0:cb9a9f314b8b
782 781 tag: tip
783 782 user: test
784 783 date: Thu Jan 01 00:00:00 1970 +0000
785 784 summary: a
786 785
787 786 adding changesets
788 787 adding manifests
789 788 adding file changes
790 789 added 1 changesets with 1 changes to 1 files
791 790 changeset: 1:9836a07b9b9d
792 791 tag: tip
793 792 user: test
794 793 date: Thu Jan 01 00:00:00 1970 +0000
795 794 summary: b
796 795
797 796
798 797 pretxnclose hook failure should abort the transaction
799 798
800 799 $ hg init txnfailure
801 800 $ cd txnfailure
802 801 $ touch a && hg commit -Aqm a
803 802 $ cat >> .hg/hgrc <<EOF
804 803 > [hooks]
805 804 > pretxnclose.error = exit 1
806 805 > EOF
807 806 $ hg strip -r 0 --config extensions.strip=
808 807 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
809 808 saved backup bundle to * (glob)
810 809 transaction abort!
811 810 rollback completed
812 811 strip failed, full bundle stored in * (glob)
813 812 abort: pretxnclose.error hook exited with status 1
814 813 [255]
815 814 $ hg recover
816 815 no interrupted transaction available
817 816 [1]
818 817 $ cd ..
819 818
820 819 Hook from untrusted hgrc are reported as failure
821 820 ================================================
822 821
823 822 $ cat << EOF > $TESTTMP/untrusted.py
824 823 > from mercurial import scmutil, util
825 824 > def uisetup(ui):
826 825 > class untrustedui(ui.__class__):
827 826 > def _trusted(self, fp, f):
828 827 > if util.normpath(fp.name).endswith('untrusted/.hg/hgrc'):
829 828 > return False
830 829 > return super(untrustedui, self)._trusted(fp, f)
831 830 > ui.__class__ = untrustedui
832 831 > EOF
833 832 $ cat << EOF >> $HGRCPATH
834 833 > [extensions]
835 834 > untrusted=$TESTTMP/untrusted.py
836 835 > EOF
837 836 $ hg init untrusted
838 837 $ cd untrusted
839 838
840 839 Non-blocking hook
841 840 -----------------
842 841
843 842 $ cat << EOF >> .hg/hgrc
844 843 > [hooks]
845 844 > txnclose.testing=echo txnclose hook called
846 845 > EOF
847 846 $ touch a && hg commit -Aqm a
848 847 warning: untrusted hook txnclose not executed
849 848 $ hg log
850 849 changeset: 0:3903775176ed
851 850 tag: tip
852 851 user: test
853 852 date: Thu Jan 01 00:00:00 1970 +0000
854 853 summary: a
855 854
856 855
857 856 Non-blocking hook
858 857 -----------------
859 858
860 859 $ cat << EOF >> .hg/hgrc
861 860 > [hooks]
862 861 > pretxnclose.testing=echo pre-txnclose hook called
863 862 > EOF
864 863 $ touch b && hg commit -Aqm a
865 864 transaction abort!
866 865 rollback completed
867 866 abort: untrusted hook pretxnclose not executed
868 867 (see 'hg help config.trusted')
869 868 [255]
870 869 $ hg log
871 870 changeset: 0:3903775176ed
872 871 tag: tip
873 872 user: test
874 873 date: Thu Jan 01 00:00:00 1970 +0000
875 874 summary: a
876 875
@@ -1,130 +1,125
1 1 #require serve
2 2 $ cat << EOF >> $HGRCPATH
3 3 > [experimental]
4 4 > # drop me once bundle2 is the default,
5 5 > # added to get test change early.
6 6 > bundle2-exp = True
7 7 > EOF
8 8
9 9 $ hg init a
10 10 $ cd a
11 11 $ echo a > a
12 12 $ hg ci -Ama -d '1123456789 0'
13 13 adding a
14 14 $ hg serve --config server.uncompressed=True -p $HGPORT -d --pid-file=hg.pid
15 15 $ cat hg.pid >> $DAEMON_PIDS
16 16 $ cd ..
17 17 $ tinyproxy.py $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
18 18 $ while [ ! -f proxy.pid ]; do sleep 0; done
19 19 $ cat proxy.pid >> $DAEMON_PIDS
20 20
21 21 url for proxy, stream
22 22
23 23 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --uncompressed http://localhost:$HGPORT/ b
24 24 streaming all changes
25 25 3 files to transfer, 303 bytes of data
26 26 transferred * bytes in * seconds (*/sec) (glob)
27 27 searching for changes
28 28 no changes found
29 29 updating to branch default
30 30 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 31 $ cd b
32 32 $ hg verify
33 33 checking changesets
34 34 checking manifests
35 35 crosschecking files in changesets and manifests
36 36 checking files
37 37 1 files, 1 changesets, 1 total revisions
38 38 $ cd ..
39 39
40 40 url for proxy, pull
41 41
42 42 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
43 43 requesting all changes
44 44 adding changesets
45 45 adding manifests
46 46 adding file changes
47 47 added 1 changesets with 1 changes to 1 files
48 48 updating to branch default
49 49 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 50 $ cd b-pull
51 51 $ hg verify
52 52 checking changesets
53 53 checking manifests
54 54 crosschecking files in changesets and manifests
55 55 checking files
56 56 1 files, 1 changesets, 1 total revisions
57 57 $ cd ..
58 58
59 59 host:port for proxy
60 60
61 61 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
62 62 requesting all changes
63 63 adding changesets
64 64 adding manifests
65 65 adding file changes
66 66 added 1 changesets with 1 changes to 1 files
67 67 updating to branch default
68 68 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
69 69
70 70 proxy url with user name and password
71 71
72 72 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
73 73 requesting all changes
74 74 adding changesets
75 75 adding manifests
76 76 adding file changes
77 77 added 1 changesets with 1 changes to 1 files
78 78 updating to branch default
79 79 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
80 80
81 81 url with user name and password
82 82
83 83 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
84 84 requesting all changes
85 85 adding changesets
86 86 adding manifests
87 87 adding file changes
88 88 added 1 changesets with 1 changes to 1 files
89 89 updating to branch default
90 90 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 91
92 92 bad host:port for proxy
93 93
94 94 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
95 95 abort: error: Connection refused
96 96 [255]
97 97
98 98 do not use the proxy if it is in the no list
99 99
100 100 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
101 101 requesting all changes
102 102 adding changesets
103 103 adding manifests
104 104 adding file changes
105 105 added 1 changesets with 1 changes to 1 files
106 106 updating to branch default
107 107 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
108 108 $ cat proxy.log
109 109 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
110 110 * - - [*] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - (glob)
111 111 * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob)
112 112 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
113 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=83180e7845de420a1bb46896fd5fe05294f8d629&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob)
114 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
113 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=83180e7845de420a1bb46896fd5fe05294f8d629&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks (glob)
115 114 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
116 115 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
117 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob)
118 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
116 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks (glob)
119 117 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
120 118 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
121 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob)
122 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
119 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks (glob)
123 120 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
124 121 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
125 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob)
126 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
122 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks (glob)
127 123 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
128 124 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
129 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob)
130 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
125 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phases%2Cbookmarks (glob)
@@ -1,323 +1,321
1 1 #require serve
2 2
3 3 $ hg init test
4 4 $ cd test
5 5 $ echo foo>foo
6 6 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
7 7 $ echo foo>foo.d/foo
8 8 $ echo bar>foo.d/bAr.hg.d/BaR
9 9 $ echo bar>foo.d/baR.d.hg/bAR
10 10 $ hg commit -A -m 1
11 11 adding foo
12 12 adding foo.d/bAr.hg.d/BaR
13 13 adding foo.d/baR.d.hg/bAR
14 14 adding foo.d/foo
15 15 $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log
16 16 $ hg serve --config server.uncompressed=False -p $HGPORT1 -d --pid-file=../hg2.pid
17 17
18 18 Test server address cannot be reused
19 19
20 20 #if windows
21 21 $ hg serve -p $HGPORT1 2>&1
22 22 abort: cannot start server at ':$HGPORT1': * (glob)
23 23 [255]
24 24 #else
25 25 $ hg serve -p $HGPORT1 2>&1
26 26 abort: cannot start server at ':$HGPORT1': Address already in use
27 27 [255]
28 28 #endif
29 29 $ cd ..
30 30 $ cat hg1.pid hg2.pid >> $DAEMON_PIDS
31 31
32 32 clone via stream
33 33
34 34 $ hg clone --uncompressed http://localhost:$HGPORT/ copy 2>&1
35 35 streaming all changes
36 36 6 files to transfer, 606 bytes of data
37 37 transferred * bytes in * seconds (*/sec) (glob)
38 38 searching for changes
39 39 no changes found
40 40 updating to branch default
41 41 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 42 $ hg verify -R copy
43 43 checking changesets
44 44 checking manifests
45 45 crosschecking files in changesets and manifests
46 46 checking files
47 47 4 files, 1 changesets, 4 total revisions
48 48
49 49 try to clone via stream, should use pull instead
50 50
51 51 $ hg clone --uncompressed http://localhost:$HGPORT1/ copy2
52 52 requesting all changes
53 53 adding changesets
54 54 adding manifests
55 55 adding file changes
56 56 added 1 changesets with 4 changes to 4 files
57 57 updating to branch default
58 58 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 59
60 60 clone via pull
61 61
62 62 $ hg clone http://localhost:$HGPORT1/ copy-pull
63 63 requesting all changes
64 64 adding changesets
65 65 adding manifests
66 66 adding file changes
67 67 added 1 changesets with 4 changes to 4 files
68 68 updating to branch default
69 69 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
70 70 $ hg verify -R copy-pull
71 71 checking changesets
72 72 checking manifests
73 73 crosschecking files in changesets and manifests
74 74 checking files
75 75 4 files, 1 changesets, 4 total revisions
76 76 $ cd test
77 77 $ echo bar > bar
78 78 $ hg commit -A -d '1 0' -m 2
79 79 adding bar
80 80 $ cd ..
81 81
82 82 clone over http with --update
83 83
84 84 $ hg clone http://localhost:$HGPORT1/ updated --update 0
85 85 requesting all changes
86 86 adding changesets
87 87 adding manifests
88 88 adding file changes
89 89 added 2 changesets with 5 changes to 5 files
90 90 updating to branch default
91 91 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
92 92 $ hg log -r . -R updated
93 93 changeset: 0:8b6053c928fe
94 94 user: test
95 95 date: Thu Jan 01 00:00:00 1970 +0000
96 96 summary: 1
97 97
98 98 $ rm -rf updated
99 99
100 100 incoming via HTTP
101 101
102 102 $ hg clone http://localhost:$HGPORT1/ --rev 0 partial
103 103 adding changesets
104 104 adding manifests
105 105 adding file changes
106 106 added 1 changesets with 4 changes to 4 files
107 107 updating to branch default
108 108 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
109 109 $ cd partial
110 110 $ touch LOCAL
111 111 $ hg ci -qAm LOCAL
112 112 $ hg incoming http://localhost:$HGPORT1/ --template '{desc}\n'
113 113 comparing with http://localhost:$HGPORT1/
114 114 searching for changes
115 115 2
116 116 $ cd ..
117 117
118 118 pull
119 119
120 120 $ cd copy-pull
121 121 $ echo '[hooks]' >> .hg/hgrc
122 122 $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc
123 123 $ hg pull
124 124 pulling from http://localhost:$HGPORT1/
125 125 searching for changes
126 126 adding changesets
127 127 adding manifests
128 128 adding file changes
129 129 added 1 changesets with 1 changes to 1 files
130 130 changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=http://localhost:$HGPORT1/ (glob)
131 131 (run 'hg update' to get a working copy)
132 132 $ cd ..
133 133
134 134 clone from invalid URL
135 135
136 136 $ hg clone http://localhost:$HGPORT/bad
137 137 abort: HTTP Error 404: Not Found
138 138 [255]
139 139
140 140 test http authentication
141 141 + use the same server to test server side streaming preference
142 142
143 143 $ cd test
144 144 $ cat << EOT > userpass.py
145 145 > import base64
146 146 > from mercurial.hgweb import common
147 147 > def perform_authentication(hgweb, req, op):
148 148 > auth = req.env.get('HTTP_AUTHORIZATION')
149 149 > if not auth:
150 150 > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, 'who',
151 151 > [('WWW-Authenticate', 'Basic Realm="mercurial"')])
152 152 > if base64.b64decode(auth.split()[1]).split(':', 1) != ['user', 'pass']:
153 153 > raise common.ErrorResponse(common.HTTP_FORBIDDEN, 'no')
154 154 > def extsetup():
155 155 > common.permhooks.insert(0, perform_authentication)
156 156 > EOT
157 157 $ hg serve --config extensions.x=userpass.py -p $HGPORT2 -d --pid-file=pid \
158 158 > --config server.preferuncompressed=True \
159 159 > --config web.push_ssl=False --config web.allow_push=* -A ../access.log
160 160 $ cat pid >> $DAEMON_PIDS
161 161
162 162 $ cat << EOF > get_pass.py
163 163 > import getpass
164 164 > def newgetpass(arg):
165 165 > return "pass"
166 166 > getpass.getpass = newgetpass
167 167 > EOF
168 168
169 169 $ hg id http://localhost:$HGPORT2/
170 170 abort: http authorization required for http://localhost:$HGPORT2/
171 171 [255]
172 172 $ hg id http://localhost:$HGPORT2/
173 173 abort: http authorization required for http://localhost:$HGPORT2/
174 174 [255]
175 175 $ hg id --config ui.interactive=true --config extensions.getpass=get_pass.py http://user@localhost:$HGPORT2/
176 176 http authorization required for http://localhost:$HGPORT2/
177 177 realm: mercurial
178 178 user: user
179 179 password: 5fed3813f7f5
180 180 $ hg id http://user:pass@localhost:$HGPORT2/
181 181 5fed3813f7f5
182 182 $ echo '[auth]' >> .hg/hgrc
183 183 $ echo 'l.schemes=http' >> .hg/hgrc
184 184 $ echo 'l.prefix=lo' >> .hg/hgrc
185 185 $ echo 'l.username=user' >> .hg/hgrc
186 186 $ echo 'l.password=pass' >> .hg/hgrc
187 187 $ hg id http://localhost:$HGPORT2/
188 188 5fed3813f7f5
189 189 $ hg id http://localhost:$HGPORT2/
190 190 5fed3813f7f5
191 191 $ hg id http://user@localhost:$HGPORT2/
192 192 5fed3813f7f5
193 193 $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
194 194 streaming all changes
195 195 7 files to transfer, 916 bytes of data
196 196 transferred * bytes in * seconds (*/sec) (glob)
197 197 searching for changes
198 198 no changes found
199 199 updating to branch default
200 200 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
201 201 --pull should override server's preferuncompressed
202 202 $ hg clone --pull http://user:pass@localhost:$HGPORT2/ dest-pull 2>&1
203 203 requesting all changes
204 204 adding changesets
205 205 adding manifests
206 206 adding file changes
207 207 added 2 changesets with 5 changes to 5 files
208 208 updating to branch default
209 209 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
210 210
211 211 $ hg id http://user2@localhost:$HGPORT2/
212 212 abort: http authorization required for http://localhost:$HGPORT2/
213 213 [255]
214 214 $ hg id http://user:pass2@localhost:$HGPORT2/
215 215 abort: HTTP Error 403: no
216 216 [255]
217 217
218 218 $ hg -R dest tag -r tip top
219 219 $ hg -R dest push http://user:pass@localhost:$HGPORT2/
220 220 pushing to http://user:***@localhost:$HGPORT2/
221 221 searching for changes
222 222 remote: adding changesets
223 223 remote: adding manifests
224 224 remote: adding file changes
225 225 remote: added 1 changesets with 1 changes to 1 files
226 226 $ hg rollback -q
227 227
228 228 $ cut -c38- ../access.log
229 229 "GET /?cmd=capabilities HTTP/1.1" 200 -
230 230 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
231 231 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
232 232 "GET /?cmd=capabilities HTTP/1.1" 200 -
233 233 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
234 234 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
235 235 "GET /?cmd=capabilities HTTP/1.1" 200 -
236 236 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
237 237 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
238 238 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
239 239 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
240 240 "GET /?cmd=capabilities HTTP/1.1" 200 -
241 241 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
242 242 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
243 243 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
244 244 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
245 245 "GET /?cmd=capabilities HTTP/1.1" 200 -
246 246 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
247 247 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
248 248 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
249 249 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
250 250 "GET /?cmd=capabilities HTTP/1.1" 200 -
251 251 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
252 252 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
253 253 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
254 254 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
255 255 "GET /?cmd=capabilities HTTP/1.1" 200 -
256 256 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
257 257 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
258 258 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces
259 259 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
260 260 "GET /?cmd=capabilities HTTP/1.1" 200 -
261 261 "GET /?cmd=branchmap HTTP/1.1" 200 -
262 262 "GET /?cmd=stream_out HTTP/1.1" 401 -
263 263 "GET /?cmd=stream_out HTTP/1.1" 200 -
264 264 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d
265 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phase%2Cbookmarks
266 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
265 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phases%2Cbookmarks
267 266 "GET /?cmd=capabilities HTTP/1.1" 200 -
268 267 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D
269 "GET /?cmd=getbundle HTTP/1.1" 401 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phase%2Cbookmarks
270 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phase%2Cbookmarks
271 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
268 "GET /?cmd=getbundle HTTP/1.1" 401 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phases%2Cbookmarks
269 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phases%2Cbookmarks
272 270 "GET /?cmd=capabilities HTTP/1.1" 200 -
273 271 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
274 272 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
275 273 "GET /?cmd=capabilities HTTP/1.1" 200 -
276 274 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
277 275 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
278 276 "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces
279 277 "GET /?cmd=capabilities HTTP/1.1" 200 -
280 278 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872
281 279 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases
282 280 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
283 281 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
284 282 "GET /?cmd=branchmap HTTP/1.1" 200 -
285 283 "GET /?cmd=branchmap HTTP/1.1" 200 -
286 284 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
287 285 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365* (glob)
288 286 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
289 287
290 288 $ cd ..
291 289
292 290 clone of serve with repo in root and unserved subrepo (issue2970)
293 291
294 292 $ hg --cwd test init sub
295 293 $ echo empty > test/sub/empty
296 294 $ hg --cwd test/sub add empty
297 295 $ hg --cwd test/sub commit -qm 'add empty'
298 296 $ hg --cwd test/sub tag -r 0 something
299 297 $ echo sub = sub > test/.hgsub
300 298 $ hg --cwd test add .hgsub
301 299 $ hg --cwd test commit -qm 'add subrepo'
302 300 $ hg clone http://localhost:$HGPORT noslash-clone
303 301 requesting all changes
304 302 adding changesets
305 303 adding manifests
306 304 adding file changes
307 305 added 3 changesets with 7 changes to 7 files
308 306 updating to branch default
309 307 abort: HTTP Error 404: Not Found
310 308 [255]
311 309 $ hg clone http://localhost:$HGPORT/ slash-clone
312 310 requesting all changes
313 311 adding changesets
314 312 adding manifests
315 313 adding file changes
316 314 added 3 changesets with 7 changes to 7 files
317 315 updating to branch default
318 316 abort: HTTP Error 404: Not Found
319 317 [255]
320 318
321 319 check error log
322 320
323 321 $ cat error.log
@@ -1,154 +1,154
1 1 Test changesets filtering during exchanges (some tests are still in
2 2 test-obsolete.t)
3 3
4 4 $ cat >> $HGRCPATH << EOF
5 5 > [experimental]
6 6 > evolution=createmarkers
7 7 > EOF
8 8
9 9 Push does not corrupt remote
10 10 ----------------------------
11 11
12 12 Create a DAG where a changeset reuses a revision from a file first used in an
13 13 extinct changeset.
14 14
15 15 $ hg init local
16 16 $ cd local
17 17 $ echo 'base' > base
18 18 $ hg commit -Am base
19 19 adding base
20 20 $ echo 'A' > A
21 21 $ hg commit -Am A
22 22 adding A
23 23 $ hg up 0
24 24 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
25 25 $ hg revert -ar 1
26 26 adding A
27 27 $ hg commit -Am "A'"
28 28 created new head
29 29 $ hg log -G --template='{desc} {node}'
30 30 @ A' f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
31 31 |
32 32 | o A 9d73aac1b2ed7d53835eaeec212ed41ea47da53a
33 33 |/
34 34 o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3
35 35
36 36 $ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
37 37
38 38 Push it. The bundle should not refer to the extinct changeset.
39 39
40 40 $ hg init ../other
41 41 $ hg push ../other
42 42 pushing to ../other
43 43 searching for changes
44 44 adding changesets
45 45 adding manifests
46 46 adding file changes
47 47 added 2 changesets with 2 changes to 2 files
48 48 $ hg -R ../other verify
49 49 checking changesets
50 50 checking manifests
51 51 crosschecking files in changesets and manifests
52 52 checking files
53 53 2 files, 2 changesets, 2 total revisions
54 54
55 55 Adding a changeset going extinct locally
56 56 ------------------------------------------
57 57
58 58 Pull a changeset that will immediatly goes extinct (because you already have a
59 59 marker to obsolete him)
60 60 (test resolution of issue3788)
61 61
62 62 $ hg phase --draft --force f89bcc95eba5
63 63 $ hg phase -R ../other --draft --force f89bcc95eba5
64 64 $ hg commit --amend -m "A''"
65 65 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
66 66 $ hg pull ../other
67 67 pulling from ../other
68 68 searching for changes
69 69 adding changesets
70 70 adding manifests
71 71 adding file changes
72 72 added 1 changesets with 0 changes to 1 files (+1 heads)
73 73 (run 'hg heads' to see heads, 'hg merge' to merge)
74 74
75 75 check that bundle is not affected
76 76
77 77 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5.hg
78 78 1 changesets found
79 79 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
80 80 $ hg unbundle ../f89bcc95eba5.hg
81 81 adding changesets
82 82 adding manifests
83 83 adding file changes
84 84 added 1 changesets with 0 changes to 1 files (+1 heads)
85 85 (run 'hg heads' to see heads)
86 86 $ cd ..
87 87
88 88 pull does not fetch excessive changesets when common node is hidden (issue4982)
89 89 -------------------------------------------------------------------------------
90 90
91 91 initial repo with server and client matching
92 92
93 93 $ hg init pull-hidden-common
94 94 $ cd pull-hidden-common
95 95 $ touch foo
96 96 $ hg -q commit -A -m initial
97 97 $ echo 1 > foo
98 98 $ hg commit -m 1
99 99 $ echo 2a > foo
100 100 $ hg commit -m 2a
101 101 $ cd ..
102 102 $ hg clone --pull pull-hidden-common pull-hidden-common-client
103 103 requesting all changes
104 104 adding changesets
105 105 adding manifests
106 106 adding file changes
107 107 added 3 changesets with 3 changes to 1 files
108 108 updating to branch default
109 109 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
110 110
111 111 server obsoletes the old head
112 112
113 113 $ cd pull-hidden-common
114 114 $ hg -q up -r 1
115 115 $ echo 2b > foo
116 116 $ hg -q commit -m 2b
117 117 $ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129
118 118 $ cd ..
119 119
120 120 client only pulls down 1 changeset
121 121
122 122 $ cd pull-hidden-common-client
123 123 $ hg pull --debug
124 124 pulling from $TESTTMP/pull-hidden-common (glob)
125 125 query 1; heads
126 126 searching for changes
127 127 taking quick initial sample
128 128 query 2; still undecided: 2, sample size is: 2
129 129 2 total queries
130 130 1 changesets found
131 131 list of changesets:
132 132 bec0734cd68e84477ba7fc1d13e6cff53ab70129
133 listing keys for "phase"
133 listing keys for "phases"
134 134 listing keys for "bookmarks"
135 135 bundle2-output-bundle: "HG20", 3 parts total
136 136 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
137 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
137 bundle2-output-part: "listkeys" (params: 1 mandatory) 58 bytes payload
138 138 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
139 139 bundle2-input-bundle: with-transaction
140 140 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
141 141 adding changesets
142 142 add changeset bec0734cd68e
143 143 adding manifests
144 144 adding file changes
145 145 adding foo revisions
146 146 added 1 changesets with 1 changes to 1 files (+1 heads)
147 147 bundle2-input-part: total payload size 474
148 148 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
149 bundle2-input-part: total payload size 58
149 150 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
150 151 bundle2-input-bundle: 2 parts total
151 152 checking for updated bookmarks
152 listing keys for "phases"
153 153 updating the branch cache
154 154 (run 'hg heads' to see heads, 'hg merge' to merge)
@@ -1,539 +1,537
1 1
2 2 This test tries to exercise the ssh functionality with a dummy script
3 3
4 4 $ cat <<EOF >> $HGRCPATH
5 5 > [format]
6 6 > usegeneraldelta=yes
7 7 > EOF
8 8
9 9 creating 'remote' repo
10 10
11 11 $ hg init remote
12 12 $ cd remote
13 13 $ echo this > foo
14 14 $ echo this > fooO
15 15 $ hg ci -A -m "init" foo fooO
16 16
17 17 insert a closed branch (issue4428)
18 18
19 19 $ hg up null
20 20 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
21 21 $ hg branch closed
22 22 marked working directory as branch closed
23 23 (branches are permanent and global, did you want a bookmark?)
24 24 $ hg ci -mc0
25 25 $ hg ci --close-branch -mc1
26 26 $ hg up -q default
27 27
28 28 configure for serving
29 29
30 30 $ cat <<EOF > .hg/hgrc
31 31 > [server]
32 32 > uncompressed = True
33 33 >
34 34 > [hooks]
35 35 > changegroup = printenv.py changegroup-in-remote 0 ../dummylog
36 36 > EOF
37 37 $ cd ..
38 38
39 39 repo not found error
40 40
41 41 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
42 42 remote: abort: repository nonexistent not found!
43 43 abort: no suitable response from remote hg!
44 44 [255]
45 45
46 46 non-existent absolute path
47 47
48 48 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
49 49 remote: abort: repository $TESTTMP/nonexistent not found!
50 50 abort: no suitable response from remote hg!
51 51 [255]
52 52
53 53 clone remote via stream
54 54
55 55 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
56 56 streaming all changes
57 57 4 files to transfer, 615 bytes of data
58 58 transferred 615 bytes in * seconds (*) (glob)
59 59 searching for changes
60 60 no changes found
61 61 updating to branch default
62 62 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 63 $ cd local-stream
64 64 $ hg verify
65 65 checking changesets
66 66 checking manifests
67 67 crosschecking files in changesets and manifests
68 68 checking files
69 69 2 files, 3 changesets, 2 total revisions
70 70 $ hg branches
71 71 default 0:1160648e36ce
72 72 $ cd ..
73 73
74 74 clone bookmarks via stream
75 75
76 76 $ hg -R local-stream book mybook
77 77 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
78 78 streaming all changes
79 79 4 files to transfer, 615 bytes of data
80 80 transferred 615 bytes in * seconds (*) (glob)
81 81 searching for changes
82 82 no changes found
83 83 updating to branch default
84 84 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
85 85 $ cd stream2
86 86 $ hg book
87 87 mybook 0:1160648e36ce
88 88 $ cd ..
89 89 $ rm -rf local-stream stream2
90 90
91 91 clone remote via pull
92 92
93 93 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
94 94 requesting all changes
95 95 adding changesets
96 96 adding manifests
97 97 adding file changes
98 98 added 3 changesets with 2 changes to 2 files
99 99 updating to branch default
100 100 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
101 101
102 102 verify
103 103
104 104 $ cd local
105 105 $ hg verify
106 106 checking changesets
107 107 checking manifests
108 108 crosschecking files in changesets and manifests
109 109 checking files
110 110 2 files, 3 changesets, 2 total revisions
111 111 $ echo '[hooks]' >> .hg/hgrc
112 112 $ echo "changegroup = printenv.py changegroup-in-local 0 ../dummylog" >> .hg/hgrc
113 113
114 114 empty default pull
115 115
116 116 $ hg paths
117 117 default = ssh://user@dummy/remote
118 118 $ hg pull -e "python \"$TESTDIR/dummyssh\""
119 119 pulling from ssh://user@dummy/remote
120 120 searching for changes
121 121 no changes found
122 122
123 123 pull from wrong ssh URL
124 124
125 125 $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
126 126 pulling from ssh://user@dummy/doesnotexist
127 127 remote: abort: repository doesnotexist not found!
128 128 abort: no suitable response from remote hg!
129 129 [255]
130 130
131 131 local change
132 132
133 133 $ echo bleah > foo
134 134 $ hg ci -m "add"
135 135
136 136 updating rc
137 137
138 138 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
139 139 $ echo "[ui]" >> .hg/hgrc
140 140 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
141 141
142 142 find outgoing
143 143
144 144 $ hg out ssh://user@dummy/remote
145 145 comparing with ssh://user@dummy/remote
146 146 searching for changes
147 147 changeset: 3:a28a9d1a809c
148 148 tag: tip
149 149 parent: 0:1160648e36ce
150 150 user: test
151 151 date: Thu Jan 01 00:00:00 1970 +0000
152 152 summary: add
153 153
154 154
155 155 find incoming on the remote side
156 156
157 157 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
158 158 comparing with ssh://user@dummy/local
159 159 searching for changes
160 160 changeset: 3:a28a9d1a809c
161 161 tag: tip
162 162 parent: 0:1160648e36ce
163 163 user: test
164 164 date: Thu Jan 01 00:00:00 1970 +0000
165 165 summary: add
166 166
167 167
168 168 find incoming on the remote side (using absolute path)
169 169
170 170 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
171 171 comparing with ssh://user@dummy/$TESTTMP/local
172 172 searching for changes
173 173 changeset: 3:a28a9d1a809c
174 174 tag: tip
175 175 parent: 0:1160648e36ce
176 176 user: test
177 177 date: Thu Jan 01 00:00:00 1970 +0000
178 178 summary: add
179 179
180 180
181 181 push
182 182
183 183 $ hg push
184 184 pushing to ssh://user@dummy/remote
185 185 searching for changes
186 186 remote: adding changesets
187 187 remote: adding manifests
188 188 remote: adding file changes
189 189 remote: added 1 changesets with 1 changes to 1 files
190 190 $ cd ../remote
191 191
192 192 check remote tip
193 193
194 194 $ hg tip
195 195 changeset: 3:a28a9d1a809c
196 196 tag: tip
197 197 parent: 0:1160648e36ce
198 198 user: test
199 199 date: Thu Jan 01 00:00:00 1970 +0000
200 200 summary: add
201 201
202 202 $ hg verify
203 203 checking changesets
204 204 checking manifests
205 205 crosschecking files in changesets and manifests
206 206 checking files
207 207 2 files, 4 changesets, 3 total revisions
208 208 $ hg cat -r tip foo
209 209 bleah
210 210 $ echo z > z
211 211 $ hg ci -A -m z z
212 212 created new head
213 213
214 214 test pushkeys and bookmarks
215 215
216 216 $ cd ../local
217 217 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
218 218 bookmarks
219 219 namespaces
220 220 phases
221 221 $ hg book foo -r 0
222 222 $ hg out -B
223 223 comparing with ssh://user@dummy/remote
224 224 searching for changed bookmarks
225 225 foo 1160648e36ce
226 226 $ hg push -B foo
227 227 pushing to ssh://user@dummy/remote
228 228 searching for changes
229 229 no changes found
230 230 exporting bookmark foo
231 231 [1]
232 232 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
233 233 foo 1160648e36cec0054048a7edc4110c6f84fde594
234 234 $ hg book -f foo
235 235 $ hg push --traceback
236 236 pushing to ssh://user@dummy/remote
237 237 searching for changes
238 238 no changes found
239 239 updating bookmark foo
240 240 [1]
241 241 $ hg book -d foo
242 242 $ hg in -B
243 243 comparing with ssh://user@dummy/remote
244 244 searching for changed bookmarks
245 245 foo a28a9d1a809c
246 246 $ hg book -f -r 0 foo
247 247 $ hg pull -B foo
248 248 pulling from ssh://user@dummy/remote
249 249 no changes found
250 250 updating bookmark foo
251 251 $ hg book -d foo
252 252 $ hg push -B foo
253 253 pushing to ssh://user@dummy/remote
254 254 searching for changes
255 255 no changes found
256 256 deleting remote bookmark foo
257 257 [1]
258 258
259 259 a bad, evil hook that prints to stdout
260 260
261 261 $ cat <<EOF > $TESTTMP/badhook
262 262 > import sys
263 263 > sys.stdout.write("KABOOM\n")
264 264 > EOF
265 265
266 266 $ echo '[hooks]' >> ../remote/.hg/hgrc
267 267 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
268 268 $ echo r > r
269 269 $ hg ci -A -m z r
270 270
271 271 push should succeed even though it has an unexpected response
272 272
273 273 $ hg push
274 274 pushing to ssh://user@dummy/remote
275 275 searching for changes
276 276 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
277 277 remote: adding changesets
278 278 remote: adding manifests
279 279 remote: adding file changes
280 280 remote: added 1 changesets with 1 changes to 1 files
281 281 remote: KABOOM
282 282 $ hg -R ../remote heads
283 283 changeset: 5:1383141674ec
284 284 tag: tip
285 285 parent: 3:a28a9d1a809c
286 286 user: test
287 287 date: Thu Jan 01 00:00:00 1970 +0000
288 288 summary: z
289 289
290 290 changeset: 4:6c0482d977a3
291 291 parent: 0:1160648e36ce
292 292 user: test
293 293 date: Thu Jan 01 00:00:00 1970 +0000
294 294 summary: z
295 295
296 296
297 297 clone bookmarks
298 298
299 299 $ hg -R ../remote bookmark test
300 300 $ hg -R ../remote bookmarks
301 301 * test 4:6c0482d977a3
302 302 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
303 303 requesting all changes
304 304 adding changesets
305 305 adding manifests
306 306 adding file changes
307 307 added 6 changesets with 5 changes to 4 files (+1 heads)
308 308 updating to branch default
309 309 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
310 310 $ hg -R local-bookmarks bookmarks
311 311 test 4:6c0482d977a3
312 312
313 313 passwords in ssh urls are not supported
314 314 (we use a glob here because different Python versions give different
315 315 results here)
316 316
317 317 $ hg push ssh://user:erroneouspwd@dummy/remote
318 318 pushing to ssh://user:*@dummy/remote (glob)
319 319 abort: password in URL not supported!
320 320 [255]
321 321
322 322 $ cd ..
323 323
324 324 hide outer repo
325 325 $ hg init
326 326
327 327 Test remote paths with spaces (issue2983):
328 328
329 329 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
330 330 $ touch "$TESTTMP/a repo/test"
331 331 $ hg -R 'a repo' commit -A -m "test"
332 332 adding test
333 333 $ hg -R 'a repo' tag tag
334 334 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
335 335 73649e48688a
336 336
337 337 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
338 338 abort: unknown revision 'noNoNO'!
339 339 [255]
340 340
341 341 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
342 342
343 343 $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
344 344 destination directory: a repo
345 345 abort: destination 'a repo' is not empty
346 346 [255]
347 347
348 348 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
349 349 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
350 350 parameters:
351 351
352 352 $ cat > ssh.sh << EOF
353 353 > userhost="\$1"
354 354 > SSH_ORIGINAL_COMMAND="\$2"
355 355 > export SSH_ORIGINAL_COMMAND
356 356 > PYTHONPATH="$PYTHONPATH"
357 357 > export PYTHONPATH
358 358 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
359 359 > EOF
360 360
361 361 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
362 362 73649e48688a
363 363
364 364 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
365 365 remote: Illegal repository "$TESTTMP/a'repo" (glob)
366 366 abort: no suitable response from remote hg!
367 367 [255]
368 368
369 369 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
370 370 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
371 371 abort: no suitable response from remote hg!
372 372 [255]
373 373
374 374 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
375 375 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
376 376 [255]
377 377
378 378 Test hg-ssh in read-only mode:
379 379
380 380 $ cat > ssh.sh << EOF
381 381 > userhost="\$1"
382 382 > SSH_ORIGINAL_COMMAND="\$2"
383 383 > export SSH_ORIGINAL_COMMAND
384 384 > PYTHONPATH="$PYTHONPATH"
385 385 > export PYTHONPATH
386 386 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
387 387 > EOF
388 388
389 389 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
390 390 requesting all changes
391 391 adding changesets
392 392 adding manifests
393 393 adding file changes
394 394 added 6 changesets with 5 changes to 4 files (+1 heads)
395 395 updating to branch default
396 396 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
397 397
398 398 $ cd read-only-local
399 399 $ echo "baz" > bar
400 400 $ hg ci -A -m "unpushable commit" bar
401 401 $ hg push --ssh "sh ../ssh.sh"
402 402 pushing to ssh://user@dummy/*/remote (glob)
403 403 searching for changes
404 404 remote: Permission denied
405 405 remote: pretxnopen.hg-ssh hook failed
406 406 abort: push failed on remote
407 407 [255]
408 408
409 409 $ cd ..
410 410
411 411 stderr from remote commands should be printed before stdout from local code (issue4336)
412 412
413 413 $ hg clone remote stderr-ordering
414 414 updating to branch default
415 415 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
416 416 $ cd stderr-ordering
417 417 $ cat >> localwrite.py << EOF
418 418 > from mercurial import exchange, extensions
419 419 >
420 420 > def wrappedpush(orig, repo, *args, **kwargs):
421 421 > res = orig(repo, *args, **kwargs)
422 422 > repo.ui.write('local stdout\n')
423 423 > return res
424 424 >
425 425 > def extsetup(ui):
426 426 > extensions.wrapfunction(exchange, 'push', wrappedpush)
427 427 > EOF
428 428
429 429 $ cat >> .hg/hgrc << EOF
430 430 > [paths]
431 431 > default-push = ssh://user@dummy/remote
432 432 > [ui]
433 433 > ssh = python "$TESTDIR/dummyssh"
434 434 > [extensions]
435 435 > localwrite = localwrite.py
436 436 > EOF
437 437
438 438 $ echo localwrite > foo
439 439 $ hg commit -m 'testing localwrite'
440 440 $ hg push
441 441 pushing to ssh://user@dummy/remote
442 442 searching for changes
443 443 remote: adding changesets
444 444 remote: adding manifests
445 445 remote: adding file changes
446 446 remote: added 1 changesets with 1 changes to 1 files
447 447 remote: KABOOM
448 448 local stdout
449 449
450 450 debug output
451 451
452 452 $ hg pull --debug ssh://user@dummy/remote
453 453 pulling from ssh://user@dummy/remote
454 454 running python ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re)
455 455 sending hello command
456 456 sending between command
457 457 remote: 371
458 458 remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024
459 459 remote: 1
460 460 query 1; heads
461 461 sending batch command
462 462 searching for changes
463 463 all remote heads known locally
464 464 no changes found
465 465 sending getbundle command
466 466 bundle2-input-bundle: with-transaction
467 467 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
468 bundle2-input-part: total payload size 15
468 469 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
469 470 bundle2-input-part: total payload size 45
470 471 bundle2-input-bundle: 1 parts total
471 472 checking for updated bookmarks
472 preparing listkeys for "phases"
473 sending listkeys command
474 received listkey for "phases": 15 bytes
475 473
476 474 $ cd ..
477 475
478 476 $ cat dummylog
479 477 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
480 478 Got arguments 1:user@dummy 2:hg -R $TESTTMP/nonexistent serve --stdio
481 479 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
482 480 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
483 481 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
484 482 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
485 483 Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
486 484 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
487 485 Got arguments 1:user@dummy 2:hg -R local serve --stdio
488 486 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
489 487 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
490 488 changegroup-in-remote hook: HG_BUNDLE2=1 HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob)
491 489 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
492 490 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
493 491 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
494 492 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
495 493 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
496 494 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
497 495 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
498 496 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
499 497 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
500 498 changegroup-in-remote hook: HG_BUNDLE2=1 HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob)
501 499 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
502 500 Got arguments 1:user@dummy 2:hg init 'a repo'
503 501 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
504 502 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
505 503 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
506 504 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
507 505 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
508 506 changegroup-in-remote hook: HG_BUNDLE2=1 HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob)
509 507 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
510 508
511 509 remote hook failure is attributed to remote
512 510
513 511 $ cat > $TESTTMP/failhook << EOF
514 512 > def hook(ui, repo, **kwargs):
515 513 > ui.write('hook failure!\n')
516 514 > ui.flush()
517 515 > return 1
518 516 > EOF
519 517
520 518 $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
521 519
522 520 $ hg -q --config ui.ssh="python $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
523 521 $ cd hookout
524 522 $ touch hookfailure
525 523 $ hg -q commit -A -m 'remote hook failure'
526 524 $ hg --config ui.ssh="python $TESTDIR/dummyssh" push
527 525 pushing to ssh://user@dummy/remote
528 526 searching for changes
529 527 remote: adding changesets
530 528 remote: adding manifests
531 529 remote: adding file changes
532 530 remote: added 1 changesets with 1 changes to 1 files
533 531 remote: hook failure!
534 532 remote: transaction abort!
535 533 remote: rollback completed
536 534 remote: pretxnchangegroup.fail hook failed
537 535 abort: push failed on remote
538 536 [255]
539 537
General Comments 0
You need to be logged in to leave comments. Login now