##// END OF EJS Templates
localrepo: add unbundle support...
Pierre-Yves David -
r20969:7a679918 default
parent child Browse files
Show More
@@ -1,656 +1,649 b''
1 1 # exchange.py - utily to exchange data between repo.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import sys
9 9 from i18n import _
10 10 from node import hex, nullid
11 11 import cStringIO
12 12 import errno
13 13 import util, scmutil, changegroup, base85
14 14 import discovery, phases, obsolete, bookmarks, bundle2
15 15
16 16
17 17 class pushoperation(object):
18 18 """A object that represent a single push operation
19 19
20 20 It purpose is to carry push related state and very common operation.
21 21
22 22 A new should be created at the begining of each push and discarded
23 23 afterward.
24 24 """
25 25
26 26 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
27 27 # repo we push from
28 28 self.repo = repo
29 29 self.ui = repo.ui
30 30 # repo we push to
31 31 self.remote = remote
32 32 # force option provided
33 33 self.force = force
34 34 # revs to be pushed (None is "all")
35 35 self.revs = revs
36 36 # allow push of new branch
37 37 self.newbranch = newbranch
38 38 # did a local lock get acquired?
39 39 self.locallocked = None
40 40 # Integer version of the push result
41 41 # - None means nothing to push
42 42 # - 0 means HTTP error
43 43 # - 1 means we pushed and remote head count is unchanged *or*
44 44 # we have outgoing changesets but refused to push
45 45 # - other values as described by addchangegroup()
46 46 self.ret = None
47 47 # discover.outgoing object (contains common and outgoin data)
48 48 self.outgoing = None
49 49 # all remote heads before the push
50 50 self.remoteheads = None
51 51 # testable as a boolean indicating if any nodes are missing locally.
52 52 self.incoming = None
53 53 # set of all heads common after changeset bundle push
54 54 self.commonheads = None
55 55
56 56 def push(repo, remote, force=False, revs=None, newbranch=False):
57 57 '''Push outgoing changesets (limited by revs) from a local
58 58 repository to remote. Return an integer:
59 59 - None means nothing to push
60 60 - 0 means HTTP error
61 61 - 1 means we pushed and remote head count is unchanged *or*
62 62 we have outgoing changesets but refused to push
63 63 - other values as described by addchangegroup()
64 64 '''
65 65 pushop = pushoperation(repo, remote, force, revs, newbranch)
66 66 if pushop.remote.local():
67 67 missing = (set(pushop.repo.requirements)
68 68 - pushop.remote.local().supported)
69 69 if missing:
70 70 msg = _("required features are not"
71 71 " supported in the destination:"
72 72 " %s") % (', '.join(sorted(missing)))
73 73 raise util.Abort(msg)
74 74
75 75 # there are two ways to push to remote repo:
76 76 #
77 77 # addchangegroup assumes local user can lock remote
78 78 # repo (local filesystem, old ssh servers).
79 79 #
80 80 # unbundle assumes local user cannot lock remote repo (new ssh
81 81 # servers, http servers).
82 82
83 83 if not pushop.remote.canpush():
84 84 raise util.Abort(_("destination does not support push"))
85 85 # get local lock as we might write phase data
86 86 locallock = None
87 87 try:
88 88 locallock = pushop.repo.lock()
89 89 pushop.locallocked = True
90 90 except IOError, err:
91 91 pushop.locallocked = False
92 92 if err.errno != errno.EACCES:
93 93 raise
94 94 # source repo cannot be locked.
95 95 # We do not abort the push, but just disable the local phase
96 96 # synchronisation.
97 97 msg = 'cannot lock source repository: %s\n' % err
98 98 pushop.ui.debug(msg)
99 99 try:
100 100 pushop.repo.checkpush(pushop)
101 101 lock = None
102 102 unbundle = pushop.remote.capable('unbundle')
103 103 if not unbundle:
104 104 lock = pushop.remote.lock()
105 105 try:
106 106 _pushdiscovery(pushop)
107 107 if _pushcheckoutgoing(pushop):
108 108 _pushchangeset(pushop)
109 109 _pushcomputecommonheads(pushop)
110 110 _pushsyncphase(pushop)
111 111 _pushobsolete(pushop)
112 112 finally:
113 113 if lock is not None:
114 114 lock.release()
115 115 finally:
116 116 if locallock is not None:
117 117 locallock.release()
118 118
119 119 _pushbookmark(pushop)
120 120 return pushop.ret
121 121
122 122 def _pushdiscovery(pushop):
123 123 # discovery
124 124 unfi = pushop.repo.unfiltered()
125 125 fci = discovery.findcommonincoming
126 126 commoninc = fci(unfi, pushop.remote, force=pushop.force)
127 127 common, inc, remoteheads = commoninc
128 128 fco = discovery.findcommonoutgoing
129 129 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
130 130 commoninc=commoninc, force=pushop.force)
131 131 pushop.outgoing = outgoing
132 132 pushop.remoteheads = remoteheads
133 133 pushop.incoming = inc
134 134
135 135 def _pushcheckoutgoing(pushop):
136 136 outgoing = pushop.outgoing
137 137 unfi = pushop.repo.unfiltered()
138 138 if not outgoing.missing:
139 139 # nothing to push
140 140 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
141 141 return False
142 142 # something to push
143 143 if not pushop.force:
144 144 # if repo.obsstore == False --> no obsolete
145 145 # then, save the iteration
146 146 if unfi.obsstore:
147 147 # this message are here for 80 char limit reason
148 148 mso = _("push includes obsolete changeset: %s!")
149 149 mst = "push includes %s changeset: %s!"
150 150 # plain versions for i18n tool to detect them
151 151 _("push includes unstable changeset: %s!")
152 152 _("push includes bumped changeset: %s!")
153 153 _("push includes divergent changeset: %s!")
154 154 # If we are to push if there is at least one
155 155 # obsolete or unstable changeset in missing, at
156 156 # least one of the missinghead will be obsolete or
157 157 # unstable. So checking heads only is ok
158 158 for node in outgoing.missingheads:
159 159 ctx = unfi[node]
160 160 if ctx.obsolete():
161 161 raise util.Abort(mso % ctx)
162 162 elif ctx.troubled():
163 163 raise util.Abort(_(mst)
164 164 % (ctx.troubles()[0],
165 165 ctx))
166 166 newbm = pushop.ui.configlist('bookmarks', 'pushing')
167 167 discovery.checkheads(unfi, pushop.remote, outgoing,
168 168 pushop.remoteheads,
169 169 pushop.newbranch,
170 170 bool(pushop.incoming),
171 171 newbm)
172 172 return True
173 173
174 174 def _pushchangeset(pushop):
175 175 """Make the actual push of changeset bundle to remote repo"""
176 176 outgoing = pushop.outgoing
177 177 unbundle = pushop.remote.capable('unbundle')
178 178 # TODO: get bundlecaps from remote
179 179 bundlecaps = None
180 180 # create a changegroup from local
181 181 if pushop.revs is None and not (outgoing.excluded
182 182 or pushop.repo.changelog.filteredrevs):
183 183 # push everything,
184 184 # use the fast path, no race possible on push
185 185 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
186 186 cg = changegroup.getsubset(pushop.repo,
187 187 outgoing,
188 188 bundler,
189 189 'push',
190 190 fastpath=True)
191 191 else:
192 192 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
193 193 bundlecaps)
194 194
195 195 # apply changegroup to remote
196 196 if unbundle:
197 197 # local repo finds heads on server, finds out what
198 198 # revs it must push. once revs transferred, if server
199 199 # finds it has different heads (someone else won
200 200 # commit/push race), server aborts.
201 201 if pushop.force:
202 202 remoteheads = ['force']
203 203 else:
204 204 remoteheads = pushop.remoteheads
205 205 # ssh: return remote's addchangegroup()
206 206 # http: return remote's addchangegroup() or 0 for error
207 207 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
208 208 'push')
209 209 else:
210 210 # we return an integer indicating remote head count
211 211 # change
212 212 pushop.ret = pushop.remote.addchangegroup(cg, 'push',
213 213 pushop.repo.url())
214 214
215 215 def _pushcomputecommonheads(pushop):
216 216 unfi = pushop.repo.unfiltered()
217 217 if pushop.ret:
218 218 # push succeed, synchronize target of the push
219 219 cheads = pushop.outgoing.missingheads
220 220 elif pushop.revs is None:
221 221 # All out push fails. synchronize all common
222 222 cheads = pushop.outgoing.commonheads
223 223 else:
224 224 # I want cheads = heads(::missingheads and ::commonheads)
225 225 # (missingheads is revs with secret changeset filtered out)
226 226 #
227 227 # This can be expressed as:
228 228 # cheads = ( (missingheads and ::commonheads)
229 229 # + (commonheads and ::missingheads))"
230 230 # )
231 231 #
232 232 # while trying to push we already computed the following:
233 233 # common = (::commonheads)
234 234 # missing = ((commonheads::missingheads) - commonheads)
235 235 #
236 236 # We can pick:
237 237 # * missingheads part of common (::commonheads)
238 238 common = set(pushop.outgoing.common)
239 239 nm = pushop.repo.changelog.nodemap
240 240 cheads = [node for node in pushop.revs if nm[node] in common]
241 241 # and
242 242 # * commonheads parents on missing
243 243 revset = unfi.set('%ln and parents(roots(%ln))',
244 244 pushop.outgoing.commonheads,
245 245 pushop.outgoing.missing)
246 246 cheads.extend(c.node() for c in revset)
247 247 pushop.commonheads = cheads
248 248
249 249 def _pushsyncphase(pushop):
250 250 """synchronise phase information locally and remotly"""
251 251 unfi = pushop.repo.unfiltered()
252 252 cheads = pushop.commonheads
253 253 if pushop.ret:
254 254 # push succeed, synchronize target of the push
255 255 cheads = pushop.outgoing.missingheads
256 256 elif pushop.revs is None:
257 257 # All out push fails. synchronize all common
258 258 cheads = pushop.outgoing.commonheads
259 259 else:
260 260 # I want cheads = heads(::missingheads and ::commonheads)
261 261 # (missingheads is revs with secret changeset filtered out)
262 262 #
263 263 # This can be expressed as:
264 264 # cheads = ( (missingheads and ::commonheads)
265 265 # + (commonheads and ::missingheads))"
266 266 # )
267 267 #
268 268 # while trying to push we already computed the following:
269 269 # common = (::commonheads)
270 270 # missing = ((commonheads::missingheads) - commonheads)
271 271 #
272 272 # We can pick:
273 273 # * missingheads part of common (::commonheads)
274 274 common = set(pushop.outgoing.common)
275 275 nm = pushop.repo.changelog.nodemap
276 276 cheads = [node for node in pushop.revs if nm[node] in common]
277 277 # and
278 278 # * commonheads parents on missing
279 279 revset = unfi.set('%ln and parents(roots(%ln))',
280 280 pushop.outgoing.commonheads,
281 281 pushop.outgoing.missing)
282 282 cheads.extend(c.node() for c in revset)
283 283 pushop.commonheads = cheads
284 284 # even when we don't push, exchanging phase data is useful
285 285 remotephases = pushop.remote.listkeys('phases')
286 286 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
287 287 and remotephases # server supports phases
288 288 and pushop.ret is None # nothing was pushed
289 289 and remotephases.get('publishing', False)):
290 290 # When:
291 291 # - this is a subrepo push
292 292 # - and remote support phase
293 293 # - and no changeset was pushed
294 294 # - and remote is publishing
295 295 # We may be in issue 3871 case!
296 296 # We drop the possible phase synchronisation done by
297 297 # courtesy to publish changesets possibly locally draft
298 298 # on the remote.
299 299 remotephases = {'publishing': 'True'}
300 300 if not remotephases: # old server or public only rer
301 301 _localphasemove(pushop, cheads)
302 302 # don't push any phase data as there is nothing to push
303 303 else:
304 304 ana = phases.analyzeremotephases(pushop.repo, cheads,
305 305 remotephases)
306 306 pheads, droots = ana
307 307 ### Apply remote phase on local
308 308 if remotephases.get('publishing', False):
309 309 _localphasemove(pushop, cheads)
310 310 else: # publish = False
311 311 _localphasemove(pushop, pheads)
312 312 _localphasemove(pushop, cheads, phases.draft)
313 313 ### Apply local phase on remote
314 314
315 315 # Get the list of all revs draft on remote by public here.
316 316 # XXX Beware that revset break if droots is not strictly
317 317 # XXX root we may want to ensure it is but it is costly
318 318 outdated = unfi.set('heads((%ln::%ln) and public())',
319 319 droots, cheads)
320 320 for newremotehead in outdated:
321 321 r = pushop.remote.pushkey('phases',
322 322 newremotehead.hex(),
323 323 str(phases.draft),
324 324 str(phases.public))
325 325 if not r:
326 326 pushop.ui.warn(_('updating %s to public failed!\n')
327 327 % newremotehead)
328 328
329 329 def _localphasemove(pushop, nodes, phase=phases.public):
330 330 """move <nodes> to <phase> in the local source repo"""
331 331 if pushop.locallocked:
332 332 phases.advanceboundary(pushop.repo, phase, nodes)
333 333 else:
334 334 # repo is not locked, do not change any phases!
335 335 # Informs the user that phases should have been moved when
336 336 # applicable.
337 337 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
338 338 phasestr = phases.phasenames[phase]
339 339 if actualmoves:
340 340 pushop.ui.status(_('cannot lock source repo, skipping '
341 341 'local %s phase update\n') % phasestr)
342 342
343 343 def _pushobsolete(pushop):
344 344 """utility function to push obsolete markers to a remote"""
345 345 pushop.ui.debug('try to push obsolete markers to remote\n')
346 346 repo = pushop.repo
347 347 remote = pushop.remote
348 348 if (obsolete._enabled and repo.obsstore and
349 349 'obsolete' in remote.listkeys('namespaces')):
350 350 rslts = []
351 351 remotedata = repo.listkeys('obsolete')
352 352 for key in sorted(remotedata, reverse=True):
353 353 # reverse sort to ensure we end with dump0
354 354 data = remotedata[key]
355 355 rslts.append(remote.pushkey('obsolete', key, '', data))
356 356 if [r for r in rslts if not r]:
357 357 msg = _('failed to push some obsolete markers!\n')
358 358 repo.ui.warn(msg)
359 359
360 360 def _pushbookmark(pushop):
361 361 """Update bookmark position on remote"""
362 362 ui = pushop.ui
363 363 repo = pushop.repo.unfiltered()
364 364 remote = pushop.remote
365 365 ui.debug("checking for updated bookmarks\n")
366 366 revnums = map(repo.changelog.rev, pushop.revs or [])
367 367 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
368 368 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
369 369 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
370 370 srchex=hex)
371 371
372 372 for b, scid, dcid in advsrc:
373 373 if ancestors and repo[scid].rev() not in ancestors:
374 374 continue
375 375 if remote.pushkey('bookmarks', b, dcid, scid):
376 376 ui.status(_("updating bookmark %s\n") % b)
377 377 else:
378 378 ui.warn(_('updating bookmark %s failed!\n') % b)
379 379
380 380 class pulloperation(object):
381 381 """A object that represent a single pull operation
382 382
383 383 It purpose is to carry push related state and very common operation.
384 384
385 385 A new should be created at the begining of each pull and discarded
386 386 afterward.
387 387 """
388 388
389 389 def __init__(self, repo, remote, heads=None, force=False):
390 390 # repo we pull into
391 391 self.repo = repo
392 392 # repo we pull from
393 393 self.remote = remote
394 394 # revision we try to pull (None is "all")
395 395 self.heads = heads
396 396 # do we force pull?
397 397 self.force = force
398 398 # the name the pull transaction
399 399 self._trname = 'pull\n' + util.hidepassword(remote.url())
400 400 # hold the transaction once created
401 401 self._tr = None
402 402 # set of common changeset between local and remote before pull
403 403 self.common = None
404 404 # set of pulled head
405 405 self.rheads = None
406 406 # list of missing changeset to fetch remotly
407 407 self.fetch = None
408 408 # result of changegroup pulling (used as returng code by pull)
409 409 self.cgresult = None
410 410 # list of step remaining todo (related to future bundle2 usage)
411 411 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
412 412
413 413 @util.propertycache
414 414 def pulledsubset(self):
415 415 """heads of the set of changeset target by the pull"""
416 416 # compute target subset
417 417 if self.heads is None:
418 418 # We pulled every thing possible
419 419 # sync on everything common
420 420 c = set(self.common)
421 421 ret = list(self.common)
422 422 for n in self.rheads:
423 423 if n not in c:
424 424 ret.append(n)
425 425 return ret
426 426 else:
427 427 # We pulled a specific subset
428 428 # sync on this subset
429 429 return self.heads
430 430
431 431 def gettransaction(self):
432 432 """get appropriate pull transaction, creating it if needed"""
433 433 if self._tr is None:
434 434 self._tr = self.repo.transaction(self._trname)
435 435 return self._tr
436 436
437 437 def closetransaction(self):
438 438 """close transaction if created"""
439 439 if self._tr is not None:
440 440 self._tr.close()
441 441
442 442 def releasetransaction(self):
443 443 """release transaction if created"""
444 444 if self._tr is not None:
445 445 self._tr.release()
446 446
447 447 def pull(repo, remote, heads=None, force=False):
448 448 pullop = pulloperation(repo, remote, heads, force)
449 449 if pullop.remote.local():
450 450 missing = set(pullop.remote.requirements) - pullop.repo.supported
451 451 if missing:
452 452 msg = _("required features are not"
453 453 " supported in the destination:"
454 454 " %s") % (', '.join(sorted(missing)))
455 455 raise util.Abort(msg)
456 456
457 457 lock = pullop.repo.lock()
458 458 try:
459 459 _pulldiscovery(pullop)
460 460 if pullop.remote.capable('bundle2'):
461 461 _pullbundle2(pullop)
462 462 if 'changegroup' in pullop.todosteps:
463 463 _pullchangeset(pullop)
464 464 if 'phases' in pullop.todosteps:
465 465 _pullphase(pullop)
466 466 if 'obsmarkers' in pullop.todosteps:
467 467 _pullobsolete(pullop)
468 468 pullop.closetransaction()
469 469 finally:
470 470 pullop.releasetransaction()
471 471 lock.release()
472 472
473 473 return pullop.cgresult
474 474
475 475 def _pulldiscovery(pullop):
476 476 """discovery phase for the pull
477 477
478 478 Current handle changeset discovery only, will change handle all discovery
479 479 at some point."""
480 480 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
481 481 pullop.remote,
482 482 heads=pullop.heads,
483 483 force=pullop.force)
484 484 pullop.common, pullop.fetch, pullop.rheads = tmp
485 485
486 486 def _pullbundle2(pullop):
487 487 """pull data using bundle2
488 488
489 489 For now, the only supported data are changegroup."""
490 490 kwargs = {'bundlecaps': set(['HG20'])}
491 491 # pulling changegroup
492 492 pullop.todosteps.remove('changegroup')
493 493 if not pullop.fetch:
494 494 pullop.repo.ui.status(_("no changes found\n"))
495 495 pullop.cgresult = 0
496 496 else:
497 497 kwargs['common'] = pullop.common
498 498 kwargs['heads'] = pullop.heads or pullop.rheads
499 499 if pullop.heads is None and list(pullop.common) == [nullid]:
500 500 pullop.repo.ui.status(_("requesting all changes\n"))
501 501 if kwargs.keys() == ['format']:
502 502 return # nothing to pull
503 503 bundle = pullop.remote.getbundle('pull', **kwargs)
504 504 try:
505 505 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
506 506 except KeyError, exc:
507 507 raise util.Abort('missing support for %s' % exc)
508 508 assert len(op.records['changegroup']) == 1
509 509 pullop.cgresult = op.records['changegroup'][0]['return']
510 510
511 511 def _pullchangeset(pullop):
512 512 """pull changeset from unbundle into the local repo"""
513 513 # We delay the open of the transaction as late as possible so we
514 514 # don't open transaction for nothing or you break future useful
515 515 # rollback call
516 516 pullop.todosteps.remove('changegroup')
517 517 if not pullop.fetch:
518 518 pullop.repo.ui.status(_("no changes found\n"))
519 519 pullop.cgresult = 0
520 520 return
521 521 pullop.gettransaction()
522 522 if pullop.heads is None and list(pullop.common) == [nullid]:
523 523 pullop.repo.ui.status(_("requesting all changes\n"))
524 524 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
525 525 # issue1320, avoid a race if remote changed after discovery
526 526 pullop.heads = pullop.rheads
527 527
528 528 if pullop.remote.capable('getbundle'):
529 529 # TODO: get bundlecaps from remote
530 530 cg = pullop.remote.getbundle('pull', common=pullop.common,
531 531 heads=pullop.heads or pullop.rheads)
532 532 elif pullop.heads is None:
533 533 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
534 534 elif not pullop.remote.capable('changegroupsubset'):
535 535 raise util.Abort(_("partial pull cannot be done because "
536 536 "other repository doesn't support "
537 537 "changegroupsubset."))
538 538 else:
539 539 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
540 540 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
541 541 pullop.remote.url())
542 542
543 543 def _pullphase(pullop):
544 544 # Get remote phases data from remote
545 545 pullop.todosteps.remove('phases')
546 546 remotephases = pullop.remote.listkeys('phases')
547 547 publishing = bool(remotephases.get('publishing', False))
548 548 if remotephases and not publishing:
549 549 # remote is new and unpublishing
550 550 pheads, _dr = phases.analyzeremotephases(pullop.repo,
551 551 pullop.pulledsubset,
552 552 remotephases)
553 553 phases.advanceboundary(pullop.repo, phases.public, pheads)
554 554 phases.advanceboundary(pullop.repo, phases.draft,
555 555 pullop.pulledsubset)
556 556 else:
557 557 # Remote is old or publishing all common changesets
558 558 # should be seen as public
559 559 phases.advanceboundary(pullop.repo, phases.public,
560 560 pullop.pulledsubset)
561 561
562 562 def _pullobsolete(pullop):
563 563 """utility function to pull obsolete markers from a remote
564 564
565 565 The `gettransaction` is function that return the pull transaction, creating
566 566 one if necessary. We return the transaction to inform the calling code that
567 567 a new transaction have been created (when applicable).
568 568
569 569 Exists mostly to allow overriding for experimentation purpose"""
570 570 pullop.todosteps.remove('obsmarkers')
571 571 tr = None
572 572 if obsolete._enabled:
573 573 pullop.repo.ui.debug('fetching remote obsolete markers\n')
574 574 remoteobs = pullop.remote.listkeys('obsolete')
575 575 if 'dump0' in remoteobs:
576 576 tr = pullop.gettransaction()
577 577 for key in sorted(remoteobs, reverse=True):
578 578 if key.startswith('dump'):
579 579 data = base85.b85decode(remoteobs[key])
580 580 pullop.repo.obsstore.mergemarkers(tr, data)
581 581 pullop.repo.invalidatevolatilesets()
582 582 return tr
583 583
584 584 def getbundle(repo, source, heads=None, common=None, bundlecaps=None):
585 585 """return a full bundle (with potentially multiple kind of parts)
586 586
587 587 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
588 588 passed. For now, the bundle can contain only changegroup, but this will
589 589 changes when more part type will be available for bundle2.
590 590
591 591 This is different from changegroup.getbundle that only returns an HG10
592 592 changegroup bundle. They may eventually get reunited in the future when we
593 593 have a clearer idea of the API we what to query different data.
594 594
595 595 The implementation is at a very early stage and will get massive rework
596 596 when the API of bundle is refined.
597 597 """
598 598 # build bundle here.
599 599 cg = changegroup.getbundle(repo, source, heads=heads,
600 600 common=common, bundlecaps=bundlecaps)
601 601 if bundlecaps is None or 'HG20' not in bundlecaps:
602 602 return cg
603 603 # very crude first implementation,
604 604 # the bundle API will change and the generation will be done lazily.
605 605 bundler = bundle2.bundle20(repo.ui)
606 606 tempname = changegroup.writebundle(cg, None, 'HG10UN')
607 607 data = open(tempname).read()
608 608 part = bundle2.part('changegroup', data=data)
609 609 bundler.addpart(part)
610 610 temp = cStringIO.StringIO()
611 611 for c in bundler.getchunks():
612 612 temp.write(c)
613 613 temp.seek(0)
614 614 return bundle2.unbundle20(repo.ui, temp)
615 615
616 616 class PushRaced(RuntimeError):
617 617 """An exception raised during unbunding that indicate a push race"""
618 618
619 619 def check_heads(repo, their_heads, context):
620 620 """check if the heads of a repo have been modified
621 621
622 622 Used by peer for unbundling.
623 623 """
624 624 heads = repo.heads()
625 625 heads_hash = util.sha1(''.join(sorted(heads))).digest()
626 626 if not (their_heads == ['force'] or their_heads == heads or
627 627 their_heads == ['hashed', heads_hash]):
628 628 # someone else committed/pushed/unbundled while we
629 629 # were transferring data
630 630 raise PushRaced('repository changed while %s - '
631 631 'please try again' % context)
632 632
633 633 def unbundle(repo, cg, heads, source, url):
634 634 """Apply a bundle to a repo.
635 635
636 636 this function makes sure the repo is locked during the application and have
637 637 mechanism to check that no push race occured between the creation of the
638 638 bundle and its application.
639 639
640 640 If the push was raced as PushRaced exception is raised."""
641 641 r = 0
642 642 lock = repo.lock()
643 643 try:
644 644 check_heads(repo, heads, 'uploading changes')
645 645 # push can proceed
646 try:
647 r = changegroup.addchangegroup(repo, cg, source, url)
648 except util.Abort, inst:
649 # The old code we moved used sys.stderr directly.
650 # We did not changed it to minise code change.
651 # This need to be moved to something proper.
652 # Feel free to do it.
653 sys.stderr.write("abort: %s\n" % inst)
646 r = changegroup.addchangegroup(repo, cg, source, url)
654 647 finally:
655 648 lock.release()
656 649 return r
@@ -1,1876 +1,1885 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from node import hex, nullid, short
8 8 from i18n import _
9 9 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 11 import lock as lockmod
12 12 import transaction, store, encoding, exchange
13 13 import scmutil, util, extensions, hook, error, revset
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 from lock import release
18 18 import weakref, errno, os, time, inspect
19 19 import branchmap, pathutil
20 20 propertycache = util.propertycache
21 21 filecache = scmutil.filecache
22 22
23 23 class repofilecache(filecache):
24 24 """All filecache usage on repo are done for logic that should be unfiltered
25 25 """
26 26
27 27 def __get__(self, repo, type=None):
28 28 return super(repofilecache, self).__get__(repo.unfiltered(), type)
29 29 def __set__(self, repo, value):
30 30 return super(repofilecache, self).__set__(repo.unfiltered(), value)
31 31 def __delete__(self, repo):
32 32 return super(repofilecache, self).__delete__(repo.unfiltered())
33 33
34 34 class storecache(repofilecache):
35 35 """filecache for files in the store"""
36 36 def join(self, obj, fname):
37 37 return obj.sjoin(fname)
38 38
39 39 class unfilteredpropertycache(propertycache):
40 40 """propertycache that apply to unfiltered repo only"""
41 41
42 42 def __get__(self, repo, type=None):
43 43 unfi = repo.unfiltered()
44 44 if unfi is repo:
45 45 return super(unfilteredpropertycache, self).__get__(unfi)
46 46 return getattr(unfi, self.name)
47 47
48 48 class filteredpropertycache(propertycache):
49 49 """propertycache that must take filtering in account"""
50 50
51 51 def cachevalue(self, obj, value):
52 52 object.__setattr__(obj, self.name, value)
53 53
54 54
55 55 def hasunfilteredcache(repo, name):
56 56 """check if a repo has an unfilteredpropertycache value for <name>"""
57 57 return name in vars(repo.unfiltered())
58 58
59 59 def unfilteredmethod(orig):
60 60 """decorate method that always need to be run on unfiltered version"""
61 61 def wrapper(repo, *args, **kwargs):
62 62 return orig(repo.unfiltered(), *args, **kwargs)
63 63 return wrapper
64 64
65 65 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
66 'bundle2'))
66 'bundle2', 'unbundle'))
67 67 legacycaps = moderncaps.union(set(['changegroupsubset']))
68 68
69 69 class localpeer(peer.peerrepository):
70 70 '''peer for a local repo; reflects only the most recent API'''
71 71
72 72 def __init__(self, repo, caps=moderncaps):
73 73 peer.peerrepository.__init__(self)
74 74 self._repo = repo.filtered('served')
75 75 self.ui = repo.ui
76 76 self._caps = repo._restrictcapabilities(caps)
77 77 self.requirements = repo.requirements
78 78 self.supportedformats = repo.supportedformats
79 79
80 80 def close(self):
81 81 self._repo.close()
82 82
83 83 def _capabilities(self):
84 84 return self._caps
85 85
86 86 def local(self):
87 87 return self._repo
88 88
89 89 def canpush(self):
90 90 return True
91 91
92 92 def url(self):
93 93 return self._repo.url()
94 94
95 95 def lookup(self, key):
96 96 return self._repo.lookup(key)
97 97
98 98 def branchmap(self):
99 99 return self._repo.branchmap()
100 100
101 101 def heads(self):
102 102 return self._repo.heads()
103 103
104 104 def known(self, nodes):
105 105 return self._repo.known(nodes)
106 106
107 107 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
108 108 format='HG10'):
109 109 return exchange.getbundle(self._repo, source, heads=heads,
110 110 common=common, bundlecaps=bundlecaps)
111 111
112 112 # TODO We might want to move the next two calls into legacypeer and add
113 113 # unbundle instead.
114 114
115 def unbundle(self, cg, heads, url):
116 """apply a bundle on a repo
117
118 This function handles the repo locking itself."""
119 try:
120 return exchange.unbundle(self._repo, cg, heads, 'push', url)
121 except exchange.PushRaced, exc:
122 raise error.ResponseError(_('push failed:'), exc.message)
123
115 124 def lock(self):
116 125 return self._repo.lock()
117 126
118 127 def addchangegroup(self, cg, source, url):
119 128 return changegroup.addchangegroup(self._repo, cg, source, url)
120 129
121 130 def pushkey(self, namespace, key, old, new):
122 131 return self._repo.pushkey(namespace, key, old, new)
123 132
124 133 def listkeys(self, namespace):
125 134 return self._repo.listkeys(namespace)
126 135
127 136 def debugwireargs(self, one, two, three=None, four=None, five=None):
128 137 '''used to test argument passing over the wire'''
129 138 return "%s %s %s %s %s" % (one, two, three, four, five)
130 139
131 140 class locallegacypeer(localpeer):
132 141 '''peer extension which implements legacy methods too; used for tests with
133 142 restricted capabilities'''
134 143
135 144 def __init__(self, repo):
136 145 localpeer.__init__(self, repo, caps=legacycaps)
137 146
138 147 def branches(self, nodes):
139 148 return self._repo.branches(nodes)
140 149
141 150 def between(self, pairs):
142 151 return self._repo.between(pairs)
143 152
144 153 def changegroup(self, basenodes, source):
145 154 return changegroup.changegroup(self._repo, basenodes, source)
146 155
147 156 def changegroupsubset(self, bases, heads, source):
148 157 return changegroup.changegroupsubset(self._repo, bases, heads, source)
149 158
150 159 class localrepository(object):
151 160
152 161 supportedformats = set(('revlogv1', 'generaldelta'))
153 162 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
154 163 'dotencode'))
155 164 openerreqs = set(('revlogv1', 'generaldelta'))
156 165 requirements = ['revlogv1']
157 166 filtername = None
158 167
159 168 # a list of (ui, featureset) functions.
160 169 # only functions defined in module of enabled extensions are invoked
161 170 featuresetupfuncs = set()
162 171
163 172 def _baserequirements(self, create):
164 173 return self.requirements[:]
165 174
166 175 def __init__(self, baseui, path=None, create=False):
167 176 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
168 177 self.wopener = self.wvfs
169 178 self.root = self.wvfs.base
170 179 self.path = self.wvfs.join(".hg")
171 180 self.origroot = path
172 181 self.auditor = pathutil.pathauditor(self.root, self._checknested)
173 182 self.vfs = scmutil.vfs(self.path)
174 183 self.opener = self.vfs
175 184 self.baseui = baseui
176 185 self.ui = baseui.copy()
177 186 self.ui.copy = baseui.copy # prevent copying repo configuration
178 187 # A list of callback to shape the phase if no data were found.
179 188 # Callback are in the form: func(repo, roots) --> processed root.
180 189 # This list it to be filled by extension during repo setup
181 190 self._phasedefaults = []
182 191 try:
183 192 self.ui.readconfig(self.join("hgrc"), self.root)
184 193 extensions.loadall(self.ui)
185 194 except IOError:
186 195 pass
187 196
188 197 if self.featuresetupfuncs:
189 198 self.supported = set(self._basesupported) # use private copy
190 199 extmods = set(m.__name__ for n, m
191 200 in extensions.extensions(self.ui))
192 201 for setupfunc in self.featuresetupfuncs:
193 202 if setupfunc.__module__ in extmods:
194 203 setupfunc(self.ui, self.supported)
195 204 else:
196 205 self.supported = self._basesupported
197 206
198 207 if not self.vfs.isdir():
199 208 if create:
200 209 if not self.wvfs.exists():
201 210 self.wvfs.makedirs()
202 211 self.vfs.makedir(notindexed=True)
203 212 requirements = self._baserequirements(create)
204 213 if self.ui.configbool('format', 'usestore', True):
205 214 self.vfs.mkdir("store")
206 215 requirements.append("store")
207 216 if self.ui.configbool('format', 'usefncache', True):
208 217 requirements.append("fncache")
209 218 if self.ui.configbool('format', 'dotencode', True):
210 219 requirements.append('dotencode')
211 220 # create an invalid changelog
212 221 self.vfs.append(
213 222 "00changelog.i",
214 223 '\0\0\0\2' # represents revlogv2
215 224 ' dummy changelog to prevent using the old repo layout'
216 225 )
217 226 if self.ui.configbool('format', 'generaldelta', False):
218 227 requirements.append("generaldelta")
219 228 requirements = set(requirements)
220 229 else:
221 230 raise error.RepoError(_("repository %s not found") % path)
222 231 elif create:
223 232 raise error.RepoError(_("repository %s already exists") % path)
224 233 else:
225 234 try:
226 235 requirements = scmutil.readrequires(self.vfs, self.supported)
227 236 except IOError, inst:
228 237 if inst.errno != errno.ENOENT:
229 238 raise
230 239 requirements = set()
231 240
232 241 self.sharedpath = self.path
233 242 try:
234 243 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
235 244 realpath=True)
236 245 s = vfs.base
237 246 if not vfs.exists():
238 247 raise error.RepoError(
239 248 _('.hg/sharedpath points to nonexistent directory %s') % s)
240 249 self.sharedpath = s
241 250 except IOError, inst:
242 251 if inst.errno != errno.ENOENT:
243 252 raise
244 253
245 254 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
246 255 self.spath = self.store.path
247 256 self.svfs = self.store.vfs
248 257 self.sopener = self.svfs
249 258 self.sjoin = self.store.join
250 259 self.vfs.createmode = self.store.createmode
251 260 self._applyrequirements(requirements)
252 261 if create:
253 262 self._writerequirements()
254 263
255 264
256 265 self._branchcaches = {}
257 266 self.filterpats = {}
258 267 self._datafilters = {}
259 268 self._transref = self._lockref = self._wlockref = None
260 269
261 270 # A cache for various files under .hg/ that tracks file changes,
262 271 # (used by the filecache decorator)
263 272 #
264 273 # Maps a property name to its util.filecacheentry
265 274 self._filecache = {}
266 275
267 276 # hold sets of revision to be filtered
268 277 # should be cleared when something might have changed the filter value:
269 278 # - new changesets,
270 279 # - phase change,
271 280 # - new obsolescence marker,
272 281 # - working directory parent change,
273 282 # - bookmark changes
274 283 self.filteredrevcache = {}
275 284
276 285 def close(self):
277 286 pass
278 287
279 288 def _restrictcapabilities(self, caps):
280 289 # bundle2 is not ready for prime time, drop it unless explicitly
281 290 # required by the tests (or some brave tester)
282 291 if not self.ui.configbool('server', 'bundle2', False):
283 292 caps = set(caps)
284 293 caps.discard('bundle2')
285 294 return caps
286 295
287 296 def _applyrequirements(self, requirements):
288 297 self.requirements = requirements
289 298 self.sopener.options = dict((r, 1) for r in requirements
290 299 if r in self.openerreqs)
291 300 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
292 301 if chunkcachesize is not None:
293 302 self.sopener.options['chunkcachesize'] = chunkcachesize
294 303
295 304 def _writerequirements(self):
296 305 reqfile = self.opener("requires", "w")
297 306 for r in sorted(self.requirements):
298 307 reqfile.write("%s\n" % r)
299 308 reqfile.close()
300 309
301 310 def _checknested(self, path):
302 311 """Determine if path is a legal nested repository."""
303 312 if not path.startswith(self.root):
304 313 return False
305 314 subpath = path[len(self.root) + 1:]
306 315 normsubpath = util.pconvert(subpath)
307 316
308 317 # XXX: Checking against the current working copy is wrong in
309 318 # the sense that it can reject things like
310 319 #
311 320 # $ hg cat -r 10 sub/x.txt
312 321 #
313 322 # if sub/ is no longer a subrepository in the working copy
314 323 # parent revision.
315 324 #
316 325 # However, it can of course also allow things that would have
317 326 # been rejected before, such as the above cat command if sub/
318 327 # is a subrepository now, but was a normal directory before.
319 328 # The old path auditor would have rejected by mistake since it
320 329 # panics when it sees sub/.hg/.
321 330 #
322 331 # All in all, checking against the working copy seems sensible
323 332 # since we want to prevent access to nested repositories on
324 333 # the filesystem *now*.
325 334 ctx = self[None]
326 335 parts = util.splitpath(subpath)
327 336 while parts:
328 337 prefix = '/'.join(parts)
329 338 if prefix in ctx.substate:
330 339 if prefix == normsubpath:
331 340 return True
332 341 else:
333 342 sub = ctx.sub(prefix)
334 343 return sub.checknested(subpath[len(prefix) + 1:])
335 344 else:
336 345 parts.pop()
337 346 return False
338 347
339 348 def peer(self):
340 349 return localpeer(self) # not cached to avoid reference cycle
341 350
342 351 def unfiltered(self):
343 352 """Return unfiltered version of the repository
344 353
345 354 Intended to be overwritten by filtered repo."""
346 355 return self
347 356
348 357 def filtered(self, name):
349 358 """Return a filtered version of a repository"""
350 359 # build a new class with the mixin and the current class
351 360 # (possibly subclass of the repo)
352 361 class proxycls(repoview.repoview, self.unfiltered().__class__):
353 362 pass
354 363 return proxycls(self, name)
355 364
356 365 @repofilecache('bookmarks')
357 366 def _bookmarks(self):
358 367 return bookmarks.bmstore(self)
359 368
360 369 @repofilecache('bookmarks.current')
361 370 def _bookmarkcurrent(self):
362 371 return bookmarks.readcurrent(self)
363 372
364 373 def bookmarkheads(self, bookmark):
365 374 name = bookmark.split('@', 1)[0]
366 375 heads = []
367 376 for mark, n in self._bookmarks.iteritems():
368 377 if mark.split('@', 1)[0] == name:
369 378 heads.append(n)
370 379 return heads
371 380
372 381 @storecache('phaseroots')
373 382 def _phasecache(self):
374 383 return phases.phasecache(self, self._phasedefaults)
375 384
376 385 @storecache('obsstore')
377 386 def obsstore(self):
378 387 store = obsolete.obsstore(self.sopener)
379 388 if store and not obsolete._enabled:
380 389 # message is rare enough to not be translated
381 390 msg = 'obsolete feature not enabled but %i markers found!\n'
382 391 self.ui.warn(msg % len(list(store)))
383 392 return store
384 393
385 394 @storecache('00changelog.i')
386 395 def changelog(self):
387 396 c = changelog.changelog(self.sopener)
388 397 if 'HG_PENDING' in os.environ:
389 398 p = os.environ['HG_PENDING']
390 399 if p.startswith(self.root):
391 400 c.readpending('00changelog.i.a')
392 401 return c
393 402
394 403 @storecache('00manifest.i')
395 404 def manifest(self):
396 405 return manifest.manifest(self.sopener)
397 406
398 407 @repofilecache('dirstate')
399 408 def dirstate(self):
400 409 warned = [0]
401 410 def validate(node):
402 411 try:
403 412 self.changelog.rev(node)
404 413 return node
405 414 except error.LookupError:
406 415 if not warned[0]:
407 416 warned[0] = True
408 417 self.ui.warn(_("warning: ignoring unknown"
409 418 " working parent %s!\n") % short(node))
410 419 return nullid
411 420
412 421 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
413 422
414 423 def __getitem__(self, changeid):
415 424 if changeid is None:
416 425 return context.workingctx(self)
417 426 return context.changectx(self, changeid)
418 427
419 428 def __contains__(self, changeid):
420 429 try:
421 430 return bool(self.lookup(changeid))
422 431 except error.RepoLookupError:
423 432 return False
424 433
425 434 def __nonzero__(self):
426 435 return True
427 436
428 437 def __len__(self):
429 438 return len(self.changelog)
430 439
431 440 def __iter__(self):
432 441 return iter(self.changelog)
433 442
434 443 def revs(self, expr, *args):
435 444 '''Return a list of revisions matching the given revset'''
436 445 expr = revset.formatspec(expr, *args)
437 446 m = revset.match(None, expr)
438 447 return m(self, revset.spanset(self))
439 448
440 449 def set(self, expr, *args):
441 450 '''
442 451 Yield a context for each matching revision, after doing arg
443 452 replacement via revset.formatspec
444 453 '''
445 454 for r in self.revs(expr, *args):
446 455 yield self[r]
447 456
448 457 def url(self):
449 458 return 'file:' + self.root
450 459
451 460 def hook(self, name, throw=False, **args):
452 461 return hook.hook(self.ui, self, name, throw, **args)
453 462
454 463 @unfilteredmethod
455 464 def _tag(self, names, node, message, local, user, date, extra={}):
456 465 if isinstance(names, str):
457 466 names = (names,)
458 467
459 468 branches = self.branchmap()
460 469 for name in names:
461 470 self.hook('pretag', throw=True, node=hex(node), tag=name,
462 471 local=local)
463 472 if name in branches:
464 473 self.ui.warn(_("warning: tag %s conflicts with existing"
465 474 " branch name\n") % name)
466 475
467 476 def writetags(fp, names, munge, prevtags):
468 477 fp.seek(0, 2)
469 478 if prevtags and prevtags[-1] != '\n':
470 479 fp.write('\n')
471 480 for name in names:
472 481 m = munge and munge(name) or name
473 482 if (self._tagscache.tagtypes and
474 483 name in self._tagscache.tagtypes):
475 484 old = self.tags().get(name, nullid)
476 485 fp.write('%s %s\n' % (hex(old), m))
477 486 fp.write('%s %s\n' % (hex(node), m))
478 487 fp.close()
479 488
480 489 prevtags = ''
481 490 if local:
482 491 try:
483 492 fp = self.opener('localtags', 'r+')
484 493 except IOError:
485 494 fp = self.opener('localtags', 'a')
486 495 else:
487 496 prevtags = fp.read()
488 497
489 498 # local tags are stored in the current charset
490 499 writetags(fp, names, None, prevtags)
491 500 for name in names:
492 501 self.hook('tag', node=hex(node), tag=name, local=local)
493 502 return
494 503
495 504 try:
496 505 fp = self.wfile('.hgtags', 'rb+')
497 506 except IOError, e:
498 507 if e.errno != errno.ENOENT:
499 508 raise
500 509 fp = self.wfile('.hgtags', 'ab')
501 510 else:
502 511 prevtags = fp.read()
503 512
504 513 # committed tags are stored in UTF-8
505 514 writetags(fp, names, encoding.fromlocal, prevtags)
506 515
507 516 fp.close()
508 517
509 518 self.invalidatecaches()
510 519
511 520 if '.hgtags' not in self.dirstate:
512 521 self[None].add(['.hgtags'])
513 522
514 523 m = matchmod.exact(self.root, '', ['.hgtags'])
515 524 tagnode = self.commit(message, user, date, extra=extra, match=m)
516 525
517 526 for name in names:
518 527 self.hook('tag', node=hex(node), tag=name, local=local)
519 528
520 529 return tagnode
521 530
522 531 def tag(self, names, node, message, local, user, date):
523 532 '''tag a revision with one or more symbolic names.
524 533
525 534 names is a list of strings or, when adding a single tag, names may be a
526 535 string.
527 536
528 537 if local is True, the tags are stored in a per-repository file.
529 538 otherwise, they are stored in the .hgtags file, and a new
530 539 changeset is committed with the change.
531 540
532 541 keyword arguments:
533 542
534 543 local: whether to store tags in non-version-controlled file
535 544 (default False)
536 545
537 546 message: commit message to use if committing
538 547
539 548 user: name of user to use if committing
540 549
541 550 date: date tuple to use if committing'''
542 551
543 552 if not local:
544 553 for x in self.status()[:5]:
545 554 if '.hgtags' in x:
546 555 raise util.Abort(_('working copy of .hgtags is changed '
547 556 '(please commit .hgtags manually)'))
548 557
549 558 self.tags() # instantiate the cache
550 559 self._tag(names, node, message, local, user, date)
551 560
552 561 @filteredpropertycache
553 562 def _tagscache(self):
554 563 '''Returns a tagscache object that contains various tags related
555 564 caches.'''
556 565
557 566 # This simplifies its cache management by having one decorated
558 567 # function (this one) and the rest simply fetch things from it.
559 568 class tagscache(object):
560 569 def __init__(self):
561 570 # These two define the set of tags for this repository. tags
562 571 # maps tag name to node; tagtypes maps tag name to 'global' or
563 572 # 'local'. (Global tags are defined by .hgtags across all
564 573 # heads, and local tags are defined in .hg/localtags.)
565 574 # They constitute the in-memory cache of tags.
566 575 self.tags = self.tagtypes = None
567 576
568 577 self.nodetagscache = self.tagslist = None
569 578
570 579 cache = tagscache()
571 580 cache.tags, cache.tagtypes = self._findtags()
572 581
573 582 return cache
574 583
575 584 def tags(self):
576 585 '''return a mapping of tag to node'''
577 586 t = {}
578 587 if self.changelog.filteredrevs:
579 588 tags, tt = self._findtags()
580 589 else:
581 590 tags = self._tagscache.tags
582 591 for k, v in tags.iteritems():
583 592 try:
584 593 # ignore tags to unknown nodes
585 594 self.changelog.rev(v)
586 595 t[k] = v
587 596 except (error.LookupError, ValueError):
588 597 pass
589 598 return t
590 599
591 600 def _findtags(self):
592 601 '''Do the hard work of finding tags. Return a pair of dicts
593 602 (tags, tagtypes) where tags maps tag name to node, and tagtypes
594 603 maps tag name to a string like \'global\' or \'local\'.
595 604 Subclasses or extensions are free to add their own tags, but
596 605 should be aware that the returned dicts will be retained for the
597 606 duration of the localrepo object.'''
598 607
599 608 # XXX what tagtype should subclasses/extensions use? Currently
600 609 # mq and bookmarks add tags, but do not set the tagtype at all.
601 610 # Should each extension invent its own tag type? Should there
602 611 # be one tagtype for all such "virtual" tags? Or is the status
603 612 # quo fine?
604 613
605 614 alltags = {} # map tag name to (node, hist)
606 615 tagtypes = {}
607 616
608 617 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
609 618 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
610 619
611 620 # Build the return dicts. Have to re-encode tag names because
612 621 # the tags module always uses UTF-8 (in order not to lose info
613 622 # writing to the cache), but the rest of Mercurial wants them in
614 623 # local encoding.
615 624 tags = {}
616 625 for (name, (node, hist)) in alltags.iteritems():
617 626 if node != nullid:
618 627 tags[encoding.tolocal(name)] = node
619 628 tags['tip'] = self.changelog.tip()
620 629 tagtypes = dict([(encoding.tolocal(name), value)
621 630 for (name, value) in tagtypes.iteritems()])
622 631 return (tags, tagtypes)
623 632
624 633 def tagtype(self, tagname):
625 634 '''
626 635 return the type of the given tag. result can be:
627 636
628 637 'local' : a local tag
629 638 'global' : a global tag
630 639 None : tag does not exist
631 640 '''
632 641
633 642 return self._tagscache.tagtypes.get(tagname)
634 643
635 644 def tagslist(self):
636 645 '''return a list of tags ordered by revision'''
637 646 if not self._tagscache.tagslist:
638 647 l = []
639 648 for t, n in self.tags().iteritems():
640 649 r = self.changelog.rev(n)
641 650 l.append((r, t, n))
642 651 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
643 652
644 653 return self._tagscache.tagslist
645 654
646 655 def nodetags(self, node):
647 656 '''return the tags associated with a node'''
648 657 if not self._tagscache.nodetagscache:
649 658 nodetagscache = {}
650 659 for t, n in self._tagscache.tags.iteritems():
651 660 nodetagscache.setdefault(n, []).append(t)
652 661 for tags in nodetagscache.itervalues():
653 662 tags.sort()
654 663 self._tagscache.nodetagscache = nodetagscache
655 664 return self._tagscache.nodetagscache.get(node, [])
656 665
657 666 def nodebookmarks(self, node):
658 667 marks = []
659 668 for bookmark, n in self._bookmarks.iteritems():
660 669 if n == node:
661 670 marks.append(bookmark)
662 671 return sorted(marks)
663 672
664 673 def branchmap(self):
665 674 '''returns a dictionary {branch: [branchheads]} with branchheads
666 675 ordered by increasing revision number'''
667 676 branchmap.updatecache(self)
668 677 return self._branchcaches[self.filtername]
669 678
670 679 def branchtip(self, branch):
671 680 '''return the tip node for a given branch'''
672 681 try:
673 682 return self.branchmap().branchtip(branch)
674 683 except KeyError:
675 684 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
676 685
677 686 def lookup(self, key):
678 687 return self[key].node()
679 688
680 689 def lookupbranch(self, key, remote=None):
681 690 repo = remote or self
682 691 if key in repo.branchmap():
683 692 return key
684 693
685 694 repo = (remote and remote.local()) and remote or self
686 695 return repo[key].branch()
687 696
688 697 def known(self, nodes):
689 698 nm = self.changelog.nodemap
690 699 pc = self._phasecache
691 700 result = []
692 701 for n in nodes:
693 702 r = nm.get(n)
694 703 resp = not (r is None or pc.phase(self, r) >= phases.secret)
695 704 result.append(resp)
696 705 return result
697 706
698 707 def local(self):
699 708 return self
700 709
701 710 def cancopy(self):
702 711 # so statichttprepo's override of local() works
703 712 if not self.local():
704 713 return False
705 714 if not self.ui.configbool('phases', 'publish', True):
706 715 return True
707 716 # if publishing we can't copy if there is filtered content
708 717 return not self.filtered('visible').changelog.filteredrevs
709 718
710 719 def join(self, f):
711 720 return os.path.join(self.path, f)
712 721
713 722 def wjoin(self, f):
714 723 return os.path.join(self.root, f)
715 724
716 725 def file(self, f):
717 726 if f[0] == '/':
718 727 f = f[1:]
719 728 return filelog.filelog(self.sopener, f)
720 729
721 730 def changectx(self, changeid):
722 731 return self[changeid]
723 732
724 733 def parents(self, changeid=None):
725 734 '''get list of changectxs for parents of changeid'''
726 735 return self[changeid].parents()
727 736
728 737 def setparents(self, p1, p2=nullid):
729 738 copies = self.dirstate.setparents(p1, p2)
730 739 pctx = self[p1]
731 740 if copies:
732 741 # Adjust copy records, the dirstate cannot do it, it
733 742 # requires access to parents manifests. Preserve them
734 743 # only for entries added to first parent.
735 744 for f in copies:
736 745 if f not in pctx and copies[f] in pctx:
737 746 self.dirstate.copy(copies[f], f)
738 747 if p2 == nullid:
739 748 for f, s in sorted(self.dirstate.copies().items()):
740 749 if f not in pctx and s not in pctx:
741 750 self.dirstate.copy(None, f)
742 751
743 752 def filectx(self, path, changeid=None, fileid=None):
744 753 """changeid can be a changeset revision, node, or tag.
745 754 fileid can be a file revision or node."""
746 755 return context.filectx(self, path, changeid, fileid)
747 756
748 757 def getcwd(self):
749 758 return self.dirstate.getcwd()
750 759
751 760 def pathto(self, f, cwd=None):
752 761 return self.dirstate.pathto(f, cwd)
753 762
754 763 def wfile(self, f, mode='r'):
755 764 return self.wopener(f, mode)
756 765
757 766 def _link(self, f):
758 767 return self.wvfs.islink(f)
759 768
760 769 def _loadfilter(self, filter):
761 770 if filter not in self.filterpats:
762 771 l = []
763 772 for pat, cmd in self.ui.configitems(filter):
764 773 if cmd == '!':
765 774 continue
766 775 mf = matchmod.match(self.root, '', [pat])
767 776 fn = None
768 777 params = cmd
769 778 for name, filterfn in self._datafilters.iteritems():
770 779 if cmd.startswith(name):
771 780 fn = filterfn
772 781 params = cmd[len(name):].lstrip()
773 782 break
774 783 if not fn:
775 784 fn = lambda s, c, **kwargs: util.filter(s, c)
776 785 # Wrap old filters not supporting keyword arguments
777 786 if not inspect.getargspec(fn)[2]:
778 787 oldfn = fn
779 788 fn = lambda s, c, **kwargs: oldfn(s, c)
780 789 l.append((mf, fn, params))
781 790 self.filterpats[filter] = l
782 791 return self.filterpats[filter]
783 792
784 793 def _filter(self, filterpats, filename, data):
785 794 for mf, fn, cmd in filterpats:
786 795 if mf(filename):
787 796 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
788 797 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
789 798 break
790 799
791 800 return data
792 801
793 802 @unfilteredpropertycache
794 803 def _encodefilterpats(self):
795 804 return self._loadfilter('encode')
796 805
797 806 @unfilteredpropertycache
798 807 def _decodefilterpats(self):
799 808 return self._loadfilter('decode')
800 809
801 810 def adddatafilter(self, name, filter):
802 811 self._datafilters[name] = filter
803 812
804 813 def wread(self, filename):
805 814 if self._link(filename):
806 815 data = self.wvfs.readlink(filename)
807 816 else:
808 817 data = self.wopener.read(filename)
809 818 return self._filter(self._encodefilterpats, filename, data)
810 819
811 820 def wwrite(self, filename, data, flags):
812 821 data = self._filter(self._decodefilterpats, filename, data)
813 822 if 'l' in flags:
814 823 self.wopener.symlink(data, filename)
815 824 else:
816 825 self.wopener.write(filename, data)
817 826 if 'x' in flags:
818 827 self.wvfs.setflags(filename, False, True)
819 828
820 829 def wwritedata(self, filename, data):
821 830 return self._filter(self._decodefilterpats, filename, data)
822 831
823 832 def transaction(self, desc, report=None):
824 833 tr = self._transref and self._transref() or None
825 834 if tr and tr.running():
826 835 return tr.nest()
827 836
828 837 # abort here if the journal already exists
829 838 if self.svfs.exists("journal"):
830 839 raise error.RepoError(
831 840 _("abandoned transaction found - run hg recover"))
832 841
833 842 def onclose():
834 843 self.store.write(tr)
835 844
836 845 self._writejournal(desc)
837 846 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
838 847 rp = report and report or self.ui.warn
839 848 tr = transaction.transaction(rp, self.sopener,
840 849 "journal",
841 850 aftertrans(renames),
842 851 self.store.createmode,
843 852 onclose)
844 853 self._transref = weakref.ref(tr)
845 854 return tr
846 855
847 856 def _journalfiles(self):
848 857 return ((self.svfs, 'journal'),
849 858 (self.vfs, 'journal.dirstate'),
850 859 (self.vfs, 'journal.branch'),
851 860 (self.vfs, 'journal.desc'),
852 861 (self.vfs, 'journal.bookmarks'),
853 862 (self.svfs, 'journal.phaseroots'))
854 863
855 864 def undofiles(self):
856 865 return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
857 866
858 867 def _writejournal(self, desc):
859 868 self.opener.write("journal.dirstate",
860 869 self.opener.tryread("dirstate"))
861 870 self.opener.write("journal.branch",
862 871 encoding.fromlocal(self.dirstate.branch()))
863 872 self.opener.write("journal.desc",
864 873 "%d\n%s\n" % (len(self), desc))
865 874 self.opener.write("journal.bookmarks",
866 875 self.opener.tryread("bookmarks"))
867 876 self.sopener.write("journal.phaseroots",
868 877 self.sopener.tryread("phaseroots"))
869 878
870 879 def recover(self):
871 880 lock = self.lock()
872 881 try:
873 882 if self.svfs.exists("journal"):
874 883 self.ui.status(_("rolling back interrupted transaction\n"))
875 884 transaction.rollback(self.sopener, "journal",
876 885 self.ui.warn)
877 886 self.invalidate()
878 887 return True
879 888 else:
880 889 self.ui.warn(_("no interrupted transaction available\n"))
881 890 return False
882 891 finally:
883 892 lock.release()
884 893
885 894 def rollback(self, dryrun=False, force=False):
886 895 wlock = lock = None
887 896 try:
888 897 wlock = self.wlock()
889 898 lock = self.lock()
890 899 if self.svfs.exists("undo"):
891 900 return self._rollback(dryrun, force)
892 901 else:
893 902 self.ui.warn(_("no rollback information available\n"))
894 903 return 1
895 904 finally:
896 905 release(lock, wlock)
897 906
898 907 @unfilteredmethod # Until we get smarter cache management
899 908 def _rollback(self, dryrun, force):
900 909 ui = self.ui
901 910 try:
902 911 args = self.opener.read('undo.desc').splitlines()
903 912 (oldlen, desc, detail) = (int(args[0]), args[1], None)
904 913 if len(args) >= 3:
905 914 detail = args[2]
906 915 oldtip = oldlen - 1
907 916
908 917 if detail and ui.verbose:
909 918 msg = (_('repository tip rolled back to revision %s'
910 919 ' (undo %s: %s)\n')
911 920 % (oldtip, desc, detail))
912 921 else:
913 922 msg = (_('repository tip rolled back to revision %s'
914 923 ' (undo %s)\n')
915 924 % (oldtip, desc))
916 925 except IOError:
917 926 msg = _('rolling back unknown transaction\n')
918 927 desc = None
919 928
920 929 if not force and self['.'] != self['tip'] and desc == 'commit':
921 930 raise util.Abort(
922 931 _('rollback of last commit while not checked out '
923 932 'may lose data'), hint=_('use -f to force'))
924 933
925 934 ui.status(msg)
926 935 if dryrun:
927 936 return 0
928 937
929 938 parents = self.dirstate.parents()
930 939 self.destroying()
931 940 transaction.rollback(self.sopener, 'undo', ui.warn)
932 941 if self.vfs.exists('undo.bookmarks'):
933 942 self.vfs.rename('undo.bookmarks', 'bookmarks')
934 943 if self.svfs.exists('undo.phaseroots'):
935 944 self.svfs.rename('undo.phaseroots', 'phaseroots')
936 945 self.invalidate()
937 946
938 947 parentgone = (parents[0] not in self.changelog.nodemap or
939 948 parents[1] not in self.changelog.nodemap)
940 949 if parentgone:
941 950 self.vfs.rename('undo.dirstate', 'dirstate')
942 951 try:
943 952 branch = self.opener.read('undo.branch')
944 953 self.dirstate.setbranch(encoding.tolocal(branch))
945 954 except IOError:
946 955 ui.warn(_('named branch could not be reset: '
947 956 'current branch is still \'%s\'\n')
948 957 % self.dirstate.branch())
949 958
950 959 self.dirstate.invalidate()
951 960 parents = tuple([p.rev() for p in self.parents()])
952 961 if len(parents) > 1:
953 962 ui.status(_('working directory now based on '
954 963 'revisions %d and %d\n') % parents)
955 964 else:
956 965 ui.status(_('working directory now based on '
957 966 'revision %d\n') % parents)
958 967 # TODO: if we know which new heads may result from this rollback, pass
959 968 # them to destroy(), which will prevent the branchhead cache from being
960 969 # invalidated.
961 970 self.destroyed()
962 971 return 0
963 972
964 973 def invalidatecaches(self):
965 974
966 975 if '_tagscache' in vars(self):
967 976 # can't use delattr on proxy
968 977 del self.__dict__['_tagscache']
969 978
970 979 self.unfiltered()._branchcaches.clear()
971 980 self.invalidatevolatilesets()
972 981
973 982 def invalidatevolatilesets(self):
974 983 self.filteredrevcache.clear()
975 984 obsolete.clearobscaches(self)
976 985
977 986 def invalidatedirstate(self):
978 987 '''Invalidates the dirstate, causing the next call to dirstate
979 988 to check if it was modified since the last time it was read,
980 989 rereading it if it has.
981 990
982 991 This is different to dirstate.invalidate() that it doesn't always
983 992 rereads the dirstate. Use dirstate.invalidate() if you want to
984 993 explicitly read the dirstate again (i.e. restoring it to a previous
985 994 known good state).'''
986 995 if hasunfilteredcache(self, 'dirstate'):
987 996 for k in self.dirstate._filecache:
988 997 try:
989 998 delattr(self.dirstate, k)
990 999 except AttributeError:
991 1000 pass
992 1001 delattr(self.unfiltered(), 'dirstate')
993 1002
994 1003 def invalidate(self):
995 1004 unfiltered = self.unfiltered() # all file caches are stored unfiltered
996 1005 for k in self._filecache:
997 1006 # dirstate is invalidated separately in invalidatedirstate()
998 1007 if k == 'dirstate':
999 1008 continue
1000 1009
1001 1010 try:
1002 1011 delattr(unfiltered, k)
1003 1012 except AttributeError:
1004 1013 pass
1005 1014 self.invalidatecaches()
1006 1015 self.store.invalidatecaches()
1007 1016
1008 1017 def invalidateall(self):
1009 1018 '''Fully invalidates both store and non-store parts, causing the
1010 1019 subsequent operation to reread any outside changes.'''
1011 1020 # extension should hook this to invalidate its caches
1012 1021 self.invalidate()
1013 1022 self.invalidatedirstate()
1014 1023
1015 1024 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1016 1025 try:
1017 1026 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1018 1027 except error.LockHeld, inst:
1019 1028 if not wait:
1020 1029 raise
1021 1030 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1022 1031 (desc, inst.locker))
1023 1032 # default to 600 seconds timeout
1024 1033 l = lockmod.lock(vfs, lockname,
1025 1034 int(self.ui.config("ui", "timeout", "600")),
1026 1035 releasefn, desc=desc)
1027 1036 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1028 1037 if acquirefn:
1029 1038 acquirefn()
1030 1039 return l
1031 1040
1032 1041 def _afterlock(self, callback):
1033 1042 """add a callback to the current repository lock.
1034 1043
1035 1044 The callback will be executed on lock release."""
1036 1045 l = self._lockref and self._lockref()
1037 1046 if l:
1038 1047 l.postrelease.append(callback)
1039 1048 else:
1040 1049 callback()
1041 1050
1042 1051 def lock(self, wait=True):
1043 1052 '''Lock the repository store (.hg/store) and return a weak reference
1044 1053 to the lock. Use this before modifying the store (e.g. committing or
1045 1054 stripping). If you are opening a transaction, get a lock as well.)'''
1046 1055 l = self._lockref and self._lockref()
1047 1056 if l is not None and l.held:
1048 1057 l.lock()
1049 1058 return l
1050 1059
1051 1060 def unlock():
1052 1061 if hasunfilteredcache(self, '_phasecache'):
1053 1062 self._phasecache.write()
1054 1063 for k, ce in self._filecache.items():
1055 1064 if k == 'dirstate' or k not in self.__dict__:
1056 1065 continue
1057 1066 ce.refresh()
1058 1067
1059 1068 l = self._lock(self.svfs, "lock", wait, unlock,
1060 1069 self.invalidate, _('repository %s') % self.origroot)
1061 1070 self._lockref = weakref.ref(l)
1062 1071 return l
1063 1072
1064 1073 def wlock(self, wait=True):
1065 1074 '''Lock the non-store parts of the repository (everything under
1066 1075 .hg except .hg/store) and return a weak reference to the lock.
1067 1076 Use this before modifying files in .hg.'''
1068 1077 l = self._wlockref and self._wlockref()
1069 1078 if l is not None and l.held:
1070 1079 l.lock()
1071 1080 return l
1072 1081
1073 1082 def unlock():
1074 1083 self.dirstate.write()
1075 1084 self._filecache['dirstate'].refresh()
1076 1085
1077 1086 l = self._lock(self.vfs, "wlock", wait, unlock,
1078 1087 self.invalidatedirstate, _('working directory of %s') %
1079 1088 self.origroot)
1080 1089 self._wlockref = weakref.ref(l)
1081 1090 return l
1082 1091
1083 1092 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1084 1093 """
1085 1094 commit an individual file as part of a larger transaction
1086 1095 """
1087 1096
1088 1097 fname = fctx.path()
1089 1098 text = fctx.data()
1090 1099 flog = self.file(fname)
1091 1100 fparent1 = manifest1.get(fname, nullid)
1092 1101 fparent2 = fparent2o = manifest2.get(fname, nullid)
1093 1102
1094 1103 meta = {}
1095 1104 copy = fctx.renamed()
1096 1105 if copy and copy[0] != fname:
1097 1106 # Mark the new revision of this file as a copy of another
1098 1107 # file. This copy data will effectively act as a parent
1099 1108 # of this new revision. If this is a merge, the first
1100 1109 # parent will be the nullid (meaning "look up the copy data")
1101 1110 # and the second one will be the other parent. For example:
1102 1111 #
1103 1112 # 0 --- 1 --- 3 rev1 changes file foo
1104 1113 # \ / rev2 renames foo to bar and changes it
1105 1114 # \- 2 -/ rev3 should have bar with all changes and
1106 1115 # should record that bar descends from
1107 1116 # bar in rev2 and foo in rev1
1108 1117 #
1109 1118 # this allows this merge to succeed:
1110 1119 #
1111 1120 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1112 1121 # \ / merging rev3 and rev4 should use bar@rev2
1113 1122 # \- 2 --- 4 as the merge base
1114 1123 #
1115 1124
1116 1125 cfname = copy[0]
1117 1126 crev = manifest1.get(cfname)
1118 1127 newfparent = fparent2
1119 1128
1120 1129 if manifest2: # branch merge
1121 1130 if fparent2 == nullid or crev is None: # copied on remote side
1122 1131 if cfname in manifest2:
1123 1132 crev = manifest2[cfname]
1124 1133 newfparent = fparent1
1125 1134
1126 1135 # find source in nearest ancestor if we've lost track
1127 1136 if not crev:
1128 1137 self.ui.debug(" %s: searching for copy revision for %s\n" %
1129 1138 (fname, cfname))
1130 1139 for ancestor in self[None].ancestors():
1131 1140 if cfname in ancestor:
1132 1141 crev = ancestor[cfname].filenode()
1133 1142 break
1134 1143
1135 1144 if crev:
1136 1145 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1137 1146 meta["copy"] = cfname
1138 1147 meta["copyrev"] = hex(crev)
1139 1148 fparent1, fparent2 = nullid, newfparent
1140 1149 else:
1141 1150 self.ui.warn(_("warning: can't find ancestor for '%s' "
1142 1151 "copied from '%s'!\n") % (fname, cfname))
1143 1152
1144 1153 elif fparent1 == nullid:
1145 1154 fparent1, fparent2 = fparent2, nullid
1146 1155 elif fparent2 != nullid:
1147 1156 # is one parent an ancestor of the other?
1148 1157 fparentancestor = flog.ancestor(fparent1, fparent2)
1149 1158 if fparentancestor == fparent1:
1150 1159 fparent1, fparent2 = fparent2, nullid
1151 1160 elif fparentancestor == fparent2:
1152 1161 fparent2 = nullid
1153 1162
1154 1163 # is the file changed?
1155 1164 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1156 1165 changelist.append(fname)
1157 1166 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1158 1167
1159 1168 # are just the flags changed during merge?
1160 1169 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1161 1170 changelist.append(fname)
1162 1171
1163 1172 return fparent1
1164 1173
1165 1174 @unfilteredmethod
1166 1175 def commit(self, text="", user=None, date=None, match=None, force=False,
1167 1176 editor=False, extra={}):
1168 1177 """Add a new revision to current repository.
1169 1178
1170 1179 Revision information is gathered from the working directory,
1171 1180 match can be used to filter the committed files. If editor is
1172 1181 supplied, it is called to get a commit message.
1173 1182 """
1174 1183
1175 1184 def fail(f, msg):
1176 1185 raise util.Abort('%s: %s' % (f, msg))
1177 1186
1178 1187 if not match:
1179 1188 match = matchmod.always(self.root, '')
1180 1189
1181 1190 if not force:
1182 1191 vdirs = []
1183 1192 match.explicitdir = vdirs.append
1184 1193 match.bad = fail
1185 1194
1186 1195 wlock = self.wlock()
1187 1196 try:
1188 1197 wctx = self[None]
1189 1198 merge = len(wctx.parents()) > 1
1190 1199
1191 1200 if (not force and merge and match and
1192 1201 (match.files() or match.anypats())):
1193 1202 raise util.Abort(_('cannot partially commit a merge '
1194 1203 '(do not specify files or patterns)'))
1195 1204
1196 1205 changes = self.status(match=match, clean=force)
1197 1206 if force:
1198 1207 changes[0].extend(changes[6]) # mq may commit unchanged files
1199 1208
1200 1209 # check subrepos
1201 1210 subs = []
1202 1211 commitsubs = set()
1203 1212 newstate = wctx.substate.copy()
1204 1213 # only manage subrepos and .hgsubstate if .hgsub is present
1205 1214 if '.hgsub' in wctx:
1206 1215 # we'll decide whether to track this ourselves, thanks
1207 1216 for c in changes[:3]:
1208 1217 if '.hgsubstate' in c:
1209 1218 c.remove('.hgsubstate')
1210 1219
1211 1220 # compare current state to last committed state
1212 1221 # build new substate based on last committed state
1213 1222 oldstate = wctx.p1().substate
1214 1223 for s in sorted(newstate.keys()):
1215 1224 if not match(s):
1216 1225 # ignore working copy, use old state if present
1217 1226 if s in oldstate:
1218 1227 newstate[s] = oldstate[s]
1219 1228 continue
1220 1229 if not force:
1221 1230 raise util.Abort(
1222 1231 _("commit with new subrepo %s excluded") % s)
1223 1232 if wctx.sub(s).dirty(True):
1224 1233 if not self.ui.configbool('ui', 'commitsubrepos'):
1225 1234 raise util.Abort(
1226 1235 _("uncommitted changes in subrepo %s") % s,
1227 1236 hint=_("use --subrepos for recursive commit"))
1228 1237 subs.append(s)
1229 1238 commitsubs.add(s)
1230 1239 else:
1231 1240 bs = wctx.sub(s).basestate()
1232 1241 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1233 1242 if oldstate.get(s, (None, None, None))[1] != bs:
1234 1243 subs.append(s)
1235 1244
1236 1245 # check for removed subrepos
1237 1246 for p in wctx.parents():
1238 1247 r = [s for s in p.substate if s not in newstate]
1239 1248 subs += [s for s in r if match(s)]
1240 1249 if subs:
1241 1250 if (not match('.hgsub') and
1242 1251 '.hgsub' in (wctx.modified() + wctx.added())):
1243 1252 raise util.Abort(
1244 1253 _("can't commit subrepos without .hgsub"))
1245 1254 changes[0].insert(0, '.hgsubstate')
1246 1255
1247 1256 elif '.hgsub' in changes[2]:
1248 1257 # clean up .hgsubstate when .hgsub is removed
1249 1258 if ('.hgsubstate' in wctx and
1250 1259 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1251 1260 changes[2].insert(0, '.hgsubstate')
1252 1261
1253 1262 # make sure all explicit patterns are matched
1254 1263 if not force and match.files():
1255 1264 matched = set(changes[0] + changes[1] + changes[2])
1256 1265
1257 1266 for f in match.files():
1258 1267 f = self.dirstate.normalize(f)
1259 1268 if f == '.' or f in matched or f in wctx.substate:
1260 1269 continue
1261 1270 if f in changes[3]: # missing
1262 1271 fail(f, _('file not found!'))
1263 1272 if f in vdirs: # visited directory
1264 1273 d = f + '/'
1265 1274 for mf in matched:
1266 1275 if mf.startswith(d):
1267 1276 break
1268 1277 else:
1269 1278 fail(f, _("no match under directory!"))
1270 1279 elif f not in self.dirstate:
1271 1280 fail(f, _("file not tracked!"))
1272 1281
1273 1282 cctx = context.workingctx(self, text, user, date, extra, changes)
1274 1283
1275 1284 if (not force and not extra.get("close") and not merge
1276 1285 and not cctx.files()
1277 1286 and wctx.branch() == wctx.p1().branch()):
1278 1287 return None
1279 1288
1280 1289 if merge and cctx.deleted():
1281 1290 raise util.Abort(_("cannot commit merge with missing files"))
1282 1291
1283 1292 ms = mergemod.mergestate(self)
1284 1293 for f in changes[0]:
1285 1294 if f in ms and ms[f] == 'u':
1286 1295 raise util.Abort(_("unresolved merge conflicts "
1287 1296 "(see hg help resolve)"))
1288 1297
1289 1298 if editor:
1290 1299 cctx._text = editor(self, cctx, subs)
1291 1300 edited = (text != cctx._text)
1292 1301
1293 1302 # Save commit message in case this transaction gets rolled back
1294 1303 # (e.g. by a pretxncommit hook). Leave the content alone on
1295 1304 # the assumption that the user will use the same editor again.
1296 1305 msgfn = self.savecommitmessage(cctx._text)
1297 1306
1298 1307 # commit subs and write new state
1299 1308 if subs:
1300 1309 for s in sorted(commitsubs):
1301 1310 sub = wctx.sub(s)
1302 1311 self.ui.status(_('committing subrepository %s\n') %
1303 1312 subrepo.subrelpath(sub))
1304 1313 sr = sub.commit(cctx._text, user, date)
1305 1314 newstate[s] = (newstate[s][0], sr)
1306 1315 subrepo.writestate(self, newstate)
1307 1316
1308 1317 p1, p2 = self.dirstate.parents()
1309 1318 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1310 1319 try:
1311 1320 self.hook("precommit", throw=True, parent1=hookp1,
1312 1321 parent2=hookp2)
1313 1322 ret = self.commitctx(cctx, True)
1314 1323 except: # re-raises
1315 1324 if edited:
1316 1325 self.ui.write(
1317 1326 _('note: commit message saved in %s\n') % msgfn)
1318 1327 raise
1319 1328
1320 1329 # update bookmarks, dirstate and mergestate
1321 1330 bookmarks.update(self, [p1, p2], ret)
1322 1331 cctx.markcommitted(ret)
1323 1332 ms.reset()
1324 1333 finally:
1325 1334 wlock.release()
1326 1335
1327 1336 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1328 1337 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1329 1338 self._afterlock(commithook)
1330 1339 return ret
1331 1340
1332 1341 @unfilteredmethod
1333 1342 def commitctx(self, ctx, error=False):
1334 1343 """Add a new revision to current repository.
1335 1344 Revision information is passed via the context argument.
1336 1345 """
1337 1346
1338 1347 tr = lock = None
1339 1348 removed = list(ctx.removed())
1340 1349 p1, p2 = ctx.p1(), ctx.p2()
1341 1350 user = ctx.user()
1342 1351
1343 1352 lock = self.lock()
1344 1353 try:
1345 1354 tr = self.transaction("commit")
1346 1355 trp = weakref.proxy(tr)
1347 1356
1348 1357 if ctx.files():
1349 1358 m1 = p1.manifest().copy()
1350 1359 m2 = p2.manifest()
1351 1360
1352 1361 # check in files
1353 1362 new = {}
1354 1363 changed = []
1355 1364 linkrev = len(self)
1356 1365 for f in sorted(ctx.modified() + ctx.added()):
1357 1366 self.ui.note(f + "\n")
1358 1367 try:
1359 1368 fctx = ctx[f]
1360 1369 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1361 1370 changed)
1362 1371 m1.set(f, fctx.flags())
1363 1372 except OSError, inst:
1364 1373 self.ui.warn(_("trouble committing %s!\n") % f)
1365 1374 raise
1366 1375 except IOError, inst:
1367 1376 errcode = getattr(inst, 'errno', errno.ENOENT)
1368 1377 if error or errcode and errcode != errno.ENOENT:
1369 1378 self.ui.warn(_("trouble committing %s!\n") % f)
1370 1379 raise
1371 1380 else:
1372 1381 removed.append(f)
1373 1382
1374 1383 # update manifest
1375 1384 m1.update(new)
1376 1385 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1377 1386 drop = [f for f in removed if f in m1]
1378 1387 for f in drop:
1379 1388 del m1[f]
1380 1389 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1381 1390 p2.manifestnode(), (new, drop))
1382 1391 files = changed + removed
1383 1392 else:
1384 1393 mn = p1.manifestnode()
1385 1394 files = []
1386 1395
1387 1396 # update changelog
1388 1397 self.changelog.delayupdate()
1389 1398 n = self.changelog.add(mn, files, ctx.description(),
1390 1399 trp, p1.node(), p2.node(),
1391 1400 user, ctx.date(), ctx.extra().copy())
1392 1401 p = lambda: self.changelog.writepending() and self.root or ""
1393 1402 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1394 1403 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1395 1404 parent2=xp2, pending=p)
1396 1405 self.changelog.finalize(trp)
1397 1406 # set the new commit is proper phase
1398 1407 targetphase = subrepo.newcommitphase(self.ui, ctx)
1399 1408 if targetphase:
1400 1409 # retract boundary do not alter parent changeset.
1401 1410 # if a parent have higher the resulting phase will
1402 1411 # be compliant anyway
1403 1412 #
1404 1413 # if minimal phase was 0 we don't need to retract anything
1405 1414 phases.retractboundary(self, targetphase, [n])
1406 1415 tr.close()
1407 1416 branchmap.updatecache(self.filtered('served'))
1408 1417 return n
1409 1418 finally:
1410 1419 if tr:
1411 1420 tr.release()
1412 1421 lock.release()
1413 1422
1414 1423 @unfilteredmethod
1415 1424 def destroying(self):
1416 1425 '''Inform the repository that nodes are about to be destroyed.
1417 1426 Intended for use by strip and rollback, so there's a common
1418 1427 place for anything that has to be done before destroying history.
1419 1428
1420 1429 This is mostly useful for saving state that is in memory and waiting
1421 1430 to be flushed when the current lock is released. Because a call to
1422 1431 destroyed is imminent, the repo will be invalidated causing those
1423 1432 changes to stay in memory (waiting for the next unlock), or vanish
1424 1433 completely.
1425 1434 '''
1426 1435 # When using the same lock to commit and strip, the phasecache is left
1427 1436 # dirty after committing. Then when we strip, the repo is invalidated,
1428 1437 # causing those changes to disappear.
1429 1438 if '_phasecache' in vars(self):
1430 1439 self._phasecache.write()
1431 1440
1432 1441 @unfilteredmethod
1433 1442 def destroyed(self):
1434 1443 '''Inform the repository that nodes have been destroyed.
1435 1444 Intended for use by strip and rollback, so there's a common
1436 1445 place for anything that has to be done after destroying history.
1437 1446 '''
1438 1447 # When one tries to:
1439 1448 # 1) destroy nodes thus calling this method (e.g. strip)
1440 1449 # 2) use phasecache somewhere (e.g. commit)
1441 1450 #
1442 1451 # then 2) will fail because the phasecache contains nodes that were
1443 1452 # removed. We can either remove phasecache from the filecache,
1444 1453 # causing it to reload next time it is accessed, or simply filter
1445 1454 # the removed nodes now and write the updated cache.
1446 1455 self._phasecache.filterunknown(self)
1447 1456 self._phasecache.write()
1448 1457
1449 1458 # update the 'served' branch cache to help read only server process
1450 1459 # Thanks to branchcache collaboration this is done from the nearest
1451 1460 # filtered subset and it is expected to be fast.
1452 1461 branchmap.updatecache(self.filtered('served'))
1453 1462
1454 1463 # Ensure the persistent tag cache is updated. Doing it now
1455 1464 # means that the tag cache only has to worry about destroyed
1456 1465 # heads immediately after a strip/rollback. That in turn
1457 1466 # guarantees that "cachetip == currenttip" (comparing both rev
1458 1467 # and node) always means no nodes have been added or destroyed.
1459 1468
1460 1469 # XXX this is suboptimal when qrefresh'ing: we strip the current
1461 1470 # head, refresh the tag cache, then immediately add a new head.
1462 1471 # But I think doing it this way is necessary for the "instant
1463 1472 # tag cache retrieval" case to work.
1464 1473 self.invalidate()
1465 1474
1466 1475 def walk(self, match, node=None):
1467 1476 '''
1468 1477 walk recursively through the directory tree or a given
1469 1478 changeset, finding all files matched by the match
1470 1479 function
1471 1480 '''
1472 1481 return self[node].walk(match)
1473 1482
1474 1483 def status(self, node1='.', node2=None, match=None,
1475 1484 ignored=False, clean=False, unknown=False,
1476 1485 listsubrepos=False):
1477 1486 """return status of files between two nodes or node and working
1478 1487 directory.
1479 1488
1480 1489 If node1 is None, use the first dirstate parent instead.
1481 1490 If node2 is None, compare node1 with working directory.
1482 1491 """
1483 1492
1484 1493 def mfmatches(ctx):
1485 1494 mf = ctx.manifest().copy()
1486 1495 if match.always():
1487 1496 return mf
1488 1497 for fn in mf.keys():
1489 1498 if not match(fn):
1490 1499 del mf[fn]
1491 1500 return mf
1492 1501
1493 1502 ctx1 = self[node1]
1494 1503 ctx2 = self[node2]
1495 1504
1496 1505 working = ctx2.rev() is None
1497 1506 parentworking = working and ctx1 == self['.']
1498 1507 match = match or matchmod.always(self.root, self.getcwd())
1499 1508 listignored, listclean, listunknown = ignored, clean, unknown
1500 1509
1501 1510 # load earliest manifest first for caching reasons
1502 1511 if not working and ctx2.rev() < ctx1.rev():
1503 1512 ctx2.manifest()
1504 1513
1505 1514 if not parentworking:
1506 1515 def bad(f, msg):
1507 1516 # 'f' may be a directory pattern from 'match.files()',
1508 1517 # so 'f not in ctx1' is not enough
1509 1518 if f not in ctx1 and f not in ctx1.dirs():
1510 1519 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1511 1520 match.bad = bad
1512 1521
1513 1522 if working: # we need to scan the working dir
1514 1523 subrepos = []
1515 1524 if '.hgsub' in self.dirstate:
1516 1525 subrepos = sorted(ctx2.substate)
1517 1526 s = self.dirstate.status(match, subrepos, listignored,
1518 1527 listclean, listunknown)
1519 1528 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1520 1529
1521 1530 # check for any possibly clean files
1522 1531 if parentworking and cmp:
1523 1532 fixup = []
1524 1533 # do a full compare of any files that might have changed
1525 1534 for f in sorted(cmp):
1526 1535 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1527 1536 or ctx1[f].cmp(ctx2[f])):
1528 1537 modified.append(f)
1529 1538 else:
1530 1539 fixup.append(f)
1531 1540
1532 1541 # update dirstate for files that are actually clean
1533 1542 if fixup:
1534 1543 if listclean:
1535 1544 clean += fixup
1536 1545
1537 1546 try:
1538 1547 # updating the dirstate is optional
1539 1548 # so we don't wait on the lock
1540 1549 wlock = self.wlock(False)
1541 1550 try:
1542 1551 for f in fixup:
1543 1552 self.dirstate.normal(f)
1544 1553 finally:
1545 1554 wlock.release()
1546 1555 except error.LockError:
1547 1556 pass
1548 1557
1549 1558 if not parentworking:
1550 1559 mf1 = mfmatches(ctx1)
1551 1560 if working:
1552 1561 # we are comparing working dir against non-parent
1553 1562 # generate a pseudo-manifest for the working dir
1554 1563 mf2 = mfmatches(self['.'])
1555 1564 for f in cmp + modified + added:
1556 1565 mf2[f] = None
1557 1566 mf2.set(f, ctx2.flags(f))
1558 1567 for f in removed:
1559 1568 if f in mf2:
1560 1569 del mf2[f]
1561 1570 else:
1562 1571 # we are comparing two revisions
1563 1572 deleted, unknown, ignored = [], [], []
1564 1573 mf2 = mfmatches(ctx2)
1565 1574
1566 1575 modified, added, clean = [], [], []
1567 1576 withflags = mf1.withflags() | mf2.withflags()
1568 1577 for fn, mf2node in mf2.iteritems():
1569 1578 if fn in mf1:
1570 1579 if (fn not in deleted and
1571 1580 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1572 1581 (mf1[fn] != mf2node and
1573 1582 (mf2node or ctx1[fn].cmp(ctx2[fn]))))):
1574 1583 modified.append(fn)
1575 1584 elif listclean:
1576 1585 clean.append(fn)
1577 1586 del mf1[fn]
1578 1587 elif fn not in deleted:
1579 1588 added.append(fn)
1580 1589 removed = mf1.keys()
1581 1590
1582 1591 if working and modified and not self.dirstate._checklink:
1583 1592 # Symlink placeholders may get non-symlink-like contents
1584 1593 # via user error or dereferencing by NFS or Samba servers,
1585 1594 # so we filter out any placeholders that don't look like a
1586 1595 # symlink
1587 1596 sane = []
1588 1597 for f in modified:
1589 1598 if ctx2.flags(f) == 'l':
1590 1599 d = ctx2[f].data()
1591 1600 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1592 1601 self.ui.debug('ignoring suspect symlink placeholder'
1593 1602 ' "%s"\n' % f)
1594 1603 continue
1595 1604 sane.append(f)
1596 1605 modified = sane
1597 1606
1598 1607 r = modified, added, removed, deleted, unknown, ignored, clean
1599 1608
1600 1609 if listsubrepos:
1601 1610 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1602 1611 if working:
1603 1612 rev2 = None
1604 1613 else:
1605 1614 rev2 = ctx2.substate[subpath][1]
1606 1615 try:
1607 1616 submatch = matchmod.narrowmatcher(subpath, match)
1608 1617 s = sub.status(rev2, match=submatch, ignored=listignored,
1609 1618 clean=listclean, unknown=listunknown,
1610 1619 listsubrepos=True)
1611 1620 for rfiles, sfiles in zip(r, s):
1612 1621 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1613 1622 except error.LookupError:
1614 1623 self.ui.status(_("skipping missing subrepository: %s\n")
1615 1624 % subpath)
1616 1625
1617 1626 for l in r:
1618 1627 l.sort()
1619 1628 return r
1620 1629
1621 1630 def heads(self, start=None):
1622 1631 heads = self.changelog.heads(start)
1623 1632 # sort the output in rev descending order
1624 1633 return sorted(heads, key=self.changelog.rev, reverse=True)
1625 1634
1626 1635 def branchheads(self, branch=None, start=None, closed=False):
1627 1636 '''return a (possibly filtered) list of heads for the given branch
1628 1637
1629 1638 Heads are returned in topological order, from newest to oldest.
1630 1639 If branch is None, use the dirstate branch.
1631 1640 If start is not None, return only heads reachable from start.
1632 1641 If closed is True, return heads that are marked as closed as well.
1633 1642 '''
1634 1643 if branch is None:
1635 1644 branch = self[None].branch()
1636 1645 branches = self.branchmap()
1637 1646 if branch not in branches:
1638 1647 return []
1639 1648 # the cache returns heads ordered lowest to highest
1640 1649 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1641 1650 if start is not None:
1642 1651 # filter out the heads that cannot be reached from startrev
1643 1652 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1644 1653 bheads = [h for h in bheads if h in fbheads]
1645 1654 return bheads
1646 1655
1647 1656 def branches(self, nodes):
1648 1657 if not nodes:
1649 1658 nodes = [self.changelog.tip()]
1650 1659 b = []
1651 1660 for n in nodes:
1652 1661 t = n
1653 1662 while True:
1654 1663 p = self.changelog.parents(n)
1655 1664 if p[1] != nullid or p[0] == nullid:
1656 1665 b.append((t, n, p[0], p[1]))
1657 1666 break
1658 1667 n = p[0]
1659 1668 return b
1660 1669
1661 1670 def between(self, pairs):
1662 1671 r = []
1663 1672
1664 1673 for top, bottom in pairs:
1665 1674 n, l, i = top, [], 0
1666 1675 f = 1
1667 1676
1668 1677 while n != bottom and n != nullid:
1669 1678 p = self.changelog.parents(n)[0]
1670 1679 if i == f:
1671 1680 l.append(n)
1672 1681 f = f * 2
1673 1682 n = p
1674 1683 i += 1
1675 1684
1676 1685 r.append(l)
1677 1686
1678 1687 return r
1679 1688
1680 1689 def pull(self, remote, heads=None, force=False):
1681 1690 return exchange.pull (self, remote, heads, force)
1682 1691
1683 1692 def checkpush(self, pushop):
1684 1693 """Extensions can override this function if additional checks have
1685 1694 to be performed before pushing, or call it if they override push
1686 1695 command.
1687 1696 """
1688 1697 pass
1689 1698
1690 1699 def push(self, remote, force=False, revs=None, newbranch=False):
1691 1700 return exchange.push(self, remote, force, revs, newbranch)
1692 1701
1693 1702 def stream_in(self, remote, requirements):
1694 1703 lock = self.lock()
1695 1704 try:
1696 1705 # Save remote branchmap. We will use it later
1697 1706 # to speed up branchcache creation
1698 1707 rbranchmap = None
1699 1708 if remote.capable("branchmap"):
1700 1709 rbranchmap = remote.branchmap()
1701 1710
1702 1711 fp = remote.stream_out()
1703 1712 l = fp.readline()
1704 1713 try:
1705 1714 resp = int(l)
1706 1715 except ValueError:
1707 1716 raise error.ResponseError(
1708 1717 _('unexpected response from remote server:'), l)
1709 1718 if resp == 1:
1710 1719 raise util.Abort(_('operation forbidden by server'))
1711 1720 elif resp == 2:
1712 1721 raise util.Abort(_('locking the remote repository failed'))
1713 1722 elif resp != 0:
1714 1723 raise util.Abort(_('the server sent an unknown error code'))
1715 1724 self.ui.status(_('streaming all changes\n'))
1716 1725 l = fp.readline()
1717 1726 try:
1718 1727 total_files, total_bytes = map(int, l.split(' ', 1))
1719 1728 except (ValueError, TypeError):
1720 1729 raise error.ResponseError(
1721 1730 _('unexpected response from remote server:'), l)
1722 1731 self.ui.status(_('%d files to transfer, %s of data\n') %
1723 1732 (total_files, util.bytecount(total_bytes)))
1724 1733 handled_bytes = 0
1725 1734 self.ui.progress(_('clone'), 0, total=total_bytes)
1726 1735 start = time.time()
1727 1736
1728 1737 tr = self.transaction(_('clone'))
1729 1738 try:
1730 1739 for i in xrange(total_files):
1731 1740 # XXX doesn't support '\n' or '\r' in filenames
1732 1741 l = fp.readline()
1733 1742 try:
1734 1743 name, size = l.split('\0', 1)
1735 1744 size = int(size)
1736 1745 except (ValueError, TypeError):
1737 1746 raise error.ResponseError(
1738 1747 _('unexpected response from remote server:'), l)
1739 1748 if self.ui.debugflag:
1740 1749 self.ui.debug('adding %s (%s)\n' %
1741 1750 (name, util.bytecount(size)))
1742 1751 # for backwards compat, name was partially encoded
1743 1752 ofp = self.sopener(store.decodedir(name), 'w')
1744 1753 for chunk in util.filechunkiter(fp, limit=size):
1745 1754 handled_bytes += len(chunk)
1746 1755 self.ui.progress(_('clone'), handled_bytes,
1747 1756 total=total_bytes)
1748 1757 ofp.write(chunk)
1749 1758 ofp.close()
1750 1759 tr.close()
1751 1760 finally:
1752 1761 tr.release()
1753 1762
1754 1763 # Writing straight to files circumvented the inmemory caches
1755 1764 self.invalidate()
1756 1765
1757 1766 elapsed = time.time() - start
1758 1767 if elapsed <= 0:
1759 1768 elapsed = 0.001
1760 1769 self.ui.progress(_('clone'), None)
1761 1770 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1762 1771 (util.bytecount(total_bytes), elapsed,
1763 1772 util.bytecount(total_bytes / elapsed)))
1764 1773
1765 1774 # new requirements = old non-format requirements +
1766 1775 # new format-related
1767 1776 # requirements from the streamed-in repository
1768 1777 requirements.update(set(self.requirements) - self.supportedformats)
1769 1778 self._applyrequirements(requirements)
1770 1779 self._writerequirements()
1771 1780
1772 1781 if rbranchmap:
1773 1782 rbheads = []
1774 1783 for bheads in rbranchmap.itervalues():
1775 1784 rbheads.extend(bheads)
1776 1785
1777 1786 if rbheads:
1778 1787 rtiprev = max((int(self.changelog.rev(node))
1779 1788 for node in rbheads))
1780 1789 cache = branchmap.branchcache(rbranchmap,
1781 1790 self[rtiprev].node(),
1782 1791 rtiprev)
1783 1792 # Try to stick it as low as possible
1784 1793 # filter above served are unlikely to be fetch from a clone
1785 1794 for candidate in ('base', 'immutable', 'served'):
1786 1795 rview = self.filtered(candidate)
1787 1796 if cache.validfor(rview):
1788 1797 self._branchcaches[candidate] = cache
1789 1798 cache.write(rview)
1790 1799 break
1791 1800 self.invalidate()
1792 1801 return len(self.heads()) + 1
1793 1802 finally:
1794 1803 lock.release()
1795 1804
1796 1805 def clone(self, remote, heads=[], stream=False):
1797 1806 '''clone remote repository.
1798 1807
1799 1808 keyword arguments:
1800 1809 heads: list of revs to clone (forces use of pull)
1801 1810 stream: use streaming clone if possible'''
1802 1811
1803 1812 # now, all clients that can request uncompressed clones can
1804 1813 # read repo formats supported by all servers that can serve
1805 1814 # them.
1806 1815
1807 1816 # if revlog format changes, client will have to check version
1808 1817 # and format flags on "stream" capability, and use
1809 1818 # uncompressed only if compatible.
1810 1819
1811 1820 if not stream:
1812 1821 # if the server explicitly prefers to stream (for fast LANs)
1813 1822 stream = remote.capable('stream-preferred')
1814 1823
1815 1824 if stream and not heads:
1816 1825 # 'stream' means remote revlog format is revlogv1 only
1817 1826 if remote.capable('stream'):
1818 1827 return self.stream_in(remote, set(('revlogv1',)))
1819 1828 # otherwise, 'streamreqs' contains the remote revlog format
1820 1829 streamreqs = remote.capable('streamreqs')
1821 1830 if streamreqs:
1822 1831 streamreqs = set(streamreqs.split(','))
1823 1832 # if we support it, stream in and adjust our requirements
1824 1833 if not streamreqs - self.supportedformats:
1825 1834 return self.stream_in(remote, streamreqs)
1826 1835 return self.pull(remote, heads)
1827 1836
1828 1837 def pushkey(self, namespace, key, old, new):
1829 1838 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1830 1839 old=old, new=new)
1831 1840 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1832 1841 ret = pushkey.push(self, namespace, key, old, new)
1833 1842 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1834 1843 ret=ret)
1835 1844 return ret
1836 1845
1837 1846 def listkeys(self, namespace):
1838 1847 self.hook('prelistkeys', throw=True, namespace=namespace)
1839 1848 self.ui.debug('listing keys for "%s"\n' % namespace)
1840 1849 values = pushkey.list(self, namespace)
1841 1850 self.hook('listkeys', namespace=namespace, values=values)
1842 1851 return values
1843 1852
1844 1853 def debugwireargs(self, one, two, three=None, four=None, five=None):
1845 1854 '''used to test argument passing over the wire'''
1846 1855 return "%s %s %s %s %s" % (one, two, three, four, five)
1847 1856
1848 1857 def savecommitmessage(self, text):
1849 1858 fp = self.opener('last-message.txt', 'wb')
1850 1859 try:
1851 1860 fp.write(text)
1852 1861 finally:
1853 1862 fp.close()
1854 1863 return self.pathto(fp.name[len(self.root) + 1:])
1855 1864
1856 1865 # used to avoid circular references so destructors work
1857 1866 def aftertrans(files):
1858 1867 renamefiles = [tuple(t) for t in files]
1859 1868 def a():
1860 1869 for vfs, src, dest in renamefiles:
1861 1870 try:
1862 1871 vfs.rename(src, dest)
1863 1872 except OSError: # journal file does not yet exist
1864 1873 pass
1865 1874 return a
1866 1875
1867 1876 def undoname(fn):
1868 1877 base, name = os.path.split(fn)
1869 1878 assert name.startswith('journal')
1870 1879 return os.path.join(base, name.replace('journal', 'undo', 1))
1871 1880
1872 1881 def instance(ui, path, create):
1873 1882 return localrepository(ui, util.urllocalpath(path), create)
1874 1883
1875 1884 def islocal(path):
1876 1885 return True
@@ -1,778 +1,785 b''
1 1 # wireproto.py - generic wire protocol support functions
2 2 #
3 3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import urllib, tempfile, os, sys
9 9 from i18n import _
10 10 from node import bin, hex
11 11 import changegroup as changegroupmod
12 12 import peer, error, encoding, util, store, exchange
13 13
14 14
15 15 class abstractserverproto(object):
16 16 """abstract class that summarizes the protocol API
17 17
18 18 Used as reference and documentation.
19 19 """
20 20
21 21 def getargs(self, args):
22 22 """return the value for arguments in <args>
23 23
24 24 returns a list of values (same order as <args>)"""
25 25 raise NotImplementedError()
26 26
27 27 def getfile(self, fp):
28 28 """write the whole content of a file into a file like object
29 29
30 30 The file is in the form::
31 31
32 32 (<chunk-size>\n<chunk>)+0\n
33 33
34 34 chunk size is the ascii version of the int.
35 35 """
36 36 raise NotImplementedError()
37 37
38 38 def redirect(self):
39 39 """may setup interception for stdout and stderr
40 40
41 41 See also the `restore` method."""
42 42 raise NotImplementedError()
43 43
44 44 # If the `redirect` function does install interception, the `restore`
45 45 # function MUST be defined. If interception is not used, this function
46 46 # MUST NOT be defined.
47 47 #
48 48 # left commented here on purpose
49 49 #
50 50 #def restore(self):
51 51 # """reinstall previous stdout and stderr and return intercepted stdout
52 52 # """
53 53 # raise NotImplementedError()
54 54
55 55 def groupchunks(self, cg):
56 56 """return 4096 chunks from a changegroup object
57 57
58 58 Some protocols may have compressed the contents."""
59 59 raise NotImplementedError()
60 60
61 61 # abstract batching support
62 62
63 63 class future(object):
64 64 '''placeholder for a value to be set later'''
65 65 def set(self, value):
66 66 if util.safehasattr(self, 'value'):
67 67 raise error.RepoError("future is already set")
68 68 self.value = value
69 69
70 70 class batcher(object):
71 71 '''base class for batches of commands submittable in a single request
72 72
73 73 All methods invoked on instances of this class are simply queued and
74 74 return a a future for the result. Once you call submit(), all the queued
75 75 calls are performed and the results set in their respective futures.
76 76 '''
77 77 def __init__(self):
78 78 self.calls = []
79 79 def __getattr__(self, name):
80 80 def call(*args, **opts):
81 81 resref = future()
82 82 self.calls.append((name, args, opts, resref,))
83 83 return resref
84 84 return call
85 85 def submit(self):
86 86 pass
87 87
88 88 class localbatch(batcher):
89 89 '''performs the queued calls directly'''
90 90 def __init__(self, local):
91 91 batcher.__init__(self)
92 92 self.local = local
93 93 def submit(self):
94 94 for name, args, opts, resref in self.calls:
95 95 resref.set(getattr(self.local, name)(*args, **opts))
96 96
97 97 class remotebatch(batcher):
98 98 '''batches the queued calls; uses as few roundtrips as possible'''
99 99 def __init__(self, remote):
100 100 '''remote must support _submitbatch(encbatch) and
101 101 _submitone(op, encargs)'''
102 102 batcher.__init__(self)
103 103 self.remote = remote
104 104 def submit(self):
105 105 req, rsp = [], []
106 106 for name, args, opts, resref in self.calls:
107 107 mtd = getattr(self.remote, name)
108 108 batchablefn = getattr(mtd, 'batchable', None)
109 109 if batchablefn is not None:
110 110 batchable = batchablefn(mtd.im_self, *args, **opts)
111 111 encargsorres, encresref = batchable.next()
112 112 if encresref:
113 113 req.append((name, encargsorres,))
114 114 rsp.append((batchable, encresref, resref,))
115 115 else:
116 116 resref.set(encargsorres)
117 117 else:
118 118 if req:
119 119 self._submitreq(req, rsp)
120 120 req, rsp = [], []
121 121 resref.set(mtd(*args, **opts))
122 122 if req:
123 123 self._submitreq(req, rsp)
124 124 def _submitreq(self, req, rsp):
125 125 encresults = self.remote._submitbatch(req)
126 126 for encres, r in zip(encresults, rsp):
127 127 batchable, encresref, resref = r
128 128 encresref.set(encres)
129 129 resref.set(batchable.next())
130 130
131 131 def batchable(f):
132 132 '''annotation for batchable methods
133 133
134 134 Such methods must implement a coroutine as follows:
135 135
136 136 @batchable
137 137 def sample(self, one, two=None):
138 138 # Handle locally computable results first:
139 139 if not one:
140 140 yield "a local result", None
141 141 # Build list of encoded arguments suitable for your wire protocol:
142 142 encargs = [('one', encode(one),), ('two', encode(two),)]
143 143 # Create future for injection of encoded result:
144 144 encresref = future()
145 145 # Return encoded arguments and future:
146 146 yield encargs, encresref
147 147 # Assuming the future to be filled with the result from the batched
148 148 # request now. Decode it:
149 149 yield decode(encresref.value)
150 150
151 151 The decorator returns a function which wraps this coroutine as a plain
152 152 method, but adds the original method as an attribute called "batchable",
153 153 which is used by remotebatch to split the call into separate encoding and
154 154 decoding phases.
155 155 '''
156 156 def plain(*args, **opts):
157 157 batchable = f(*args, **opts)
158 158 encargsorres, encresref = batchable.next()
159 159 if not encresref:
160 160 return encargsorres # a local result in this case
161 161 self = args[0]
162 162 encresref.set(self._submitone(f.func_name, encargsorres))
163 163 return batchable.next()
164 164 setattr(plain, 'batchable', f)
165 165 return plain
166 166
167 167 # list of nodes encoding / decoding
168 168
169 169 def decodelist(l, sep=' '):
170 170 if l:
171 171 return map(bin, l.split(sep))
172 172 return []
173 173
174 174 def encodelist(l, sep=' '):
175 175 return sep.join(map(hex, l))
176 176
177 177 # batched call argument encoding
178 178
179 179 def escapearg(plain):
180 180 return (plain
181 181 .replace(':', '::')
182 182 .replace(',', ':,')
183 183 .replace(';', ':;')
184 184 .replace('=', ':='))
185 185
186 186 def unescapearg(escaped):
187 187 return (escaped
188 188 .replace(':=', '=')
189 189 .replace(':;', ';')
190 190 .replace(':,', ',')
191 191 .replace('::', ':'))
192 192
193 193 # client side
194 194
195 195 class wirepeer(peer.peerrepository):
196 196
197 197 def batch(self):
198 198 return remotebatch(self)
199 199 def _submitbatch(self, req):
200 200 cmds = []
201 201 for op, argsdict in req:
202 202 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
203 203 cmds.append('%s %s' % (op, args))
204 204 rsp = self._call("batch", cmds=';'.join(cmds))
205 205 return rsp.split(';')
206 206 def _submitone(self, op, args):
207 207 return self._call(op, **args)
208 208
209 209 @batchable
210 210 def lookup(self, key):
211 211 self.requirecap('lookup', _('look up remote revision'))
212 212 f = future()
213 213 yield {'key': encoding.fromlocal(key)}, f
214 214 d = f.value
215 215 success, data = d[:-1].split(" ", 1)
216 216 if int(success):
217 217 yield bin(data)
218 218 self._abort(error.RepoError(data))
219 219
220 220 @batchable
221 221 def heads(self):
222 222 f = future()
223 223 yield {}, f
224 224 d = f.value
225 225 try:
226 226 yield decodelist(d[:-1])
227 227 except ValueError:
228 228 self._abort(error.ResponseError(_("unexpected response:"), d))
229 229
230 230 @batchable
231 231 def known(self, nodes):
232 232 f = future()
233 233 yield {'nodes': encodelist(nodes)}, f
234 234 d = f.value
235 235 try:
236 236 yield [bool(int(f)) for f in d]
237 237 except ValueError:
238 238 self._abort(error.ResponseError(_("unexpected response:"), d))
239 239
240 240 @batchable
241 241 def branchmap(self):
242 242 f = future()
243 243 yield {}, f
244 244 d = f.value
245 245 try:
246 246 branchmap = {}
247 247 for branchpart in d.splitlines():
248 248 branchname, branchheads = branchpart.split(' ', 1)
249 249 branchname = encoding.tolocal(urllib.unquote(branchname))
250 250 branchheads = decodelist(branchheads)
251 251 branchmap[branchname] = branchheads
252 252 yield branchmap
253 253 except TypeError:
254 254 self._abort(error.ResponseError(_("unexpected response:"), d))
255 255
256 256 def branches(self, nodes):
257 257 n = encodelist(nodes)
258 258 d = self._call("branches", nodes=n)
259 259 try:
260 260 br = [tuple(decodelist(b)) for b in d.splitlines()]
261 261 return br
262 262 except ValueError:
263 263 self._abort(error.ResponseError(_("unexpected response:"), d))
264 264
265 265 def between(self, pairs):
266 266 batch = 8 # avoid giant requests
267 267 r = []
268 268 for i in xrange(0, len(pairs), batch):
269 269 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
270 270 d = self._call("between", pairs=n)
271 271 try:
272 272 r.extend(l and decodelist(l) or [] for l in d.splitlines())
273 273 except ValueError:
274 274 self._abort(error.ResponseError(_("unexpected response:"), d))
275 275 return r
276 276
277 277 @batchable
278 278 def pushkey(self, namespace, key, old, new):
279 279 if not self.capable('pushkey'):
280 280 yield False, None
281 281 f = future()
282 282 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
283 283 yield {'namespace': encoding.fromlocal(namespace),
284 284 'key': encoding.fromlocal(key),
285 285 'old': encoding.fromlocal(old),
286 286 'new': encoding.fromlocal(new)}, f
287 287 d = f.value
288 288 d, output = d.split('\n', 1)
289 289 try:
290 290 d = bool(int(d))
291 291 except ValueError:
292 292 raise error.ResponseError(
293 293 _('push failed (unexpected response):'), d)
294 294 for l in output.splitlines(True):
295 295 self.ui.status(_('remote: '), l)
296 296 yield d
297 297
298 298 @batchable
299 299 def listkeys(self, namespace):
300 300 if not self.capable('pushkey'):
301 301 yield {}, None
302 302 f = future()
303 303 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
304 304 yield {'namespace': encoding.fromlocal(namespace)}, f
305 305 d = f.value
306 306 r = {}
307 307 for l in d.splitlines():
308 308 k, v = l.split('\t')
309 309 r[encoding.tolocal(k)] = encoding.tolocal(v)
310 310 yield r
311 311
312 312 def stream_out(self):
313 313 return self._callstream('stream_out')
314 314
315 315 def changegroup(self, nodes, kind):
316 316 n = encodelist(nodes)
317 317 f = self._callcompressable("changegroup", roots=n)
318 318 return changegroupmod.unbundle10(f, 'UN')
319 319
320 320 def changegroupsubset(self, bases, heads, kind):
321 321 self.requirecap('changegroupsubset', _('look up remote changes'))
322 322 bases = encodelist(bases)
323 323 heads = encodelist(heads)
324 324 f = self._callcompressable("changegroupsubset",
325 325 bases=bases, heads=heads)
326 326 return changegroupmod.unbundle10(f, 'UN')
327 327
328 328 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
329 329 self.requirecap('getbundle', _('look up remote changes'))
330 330 opts = {}
331 331 if heads is not None:
332 332 opts['heads'] = encodelist(heads)
333 333 if common is not None:
334 334 opts['common'] = encodelist(common)
335 335 if bundlecaps is not None:
336 336 opts['bundlecaps'] = ','.join(bundlecaps)
337 337 f = self._callcompressable("getbundle", **opts)
338 338 return changegroupmod.unbundle10(f, 'UN')
339 339
340 340 def unbundle(self, cg, heads, source):
341 341 '''Send cg (a readable file-like object representing the
342 342 changegroup to push, typically a chunkbuffer object) to the
343 343 remote server as a bundle. Return an integer indicating the
344 344 result of the push (see localrepository.addchangegroup()).'''
345 345
346 346 if heads != ['force'] and self.capable('unbundlehash'):
347 347 heads = encodelist(['hashed',
348 348 util.sha1(''.join(sorted(heads))).digest()])
349 349 else:
350 350 heads = encodelist(heads)
351 351
352 352 ret, output = self._callpush("unbundle", cg, heads=heads)
353 353 if ret == "":
354 354 raise error.ResponseError(
355 355 _('push failed:'), output)
356 356 try:
357 357 ret = int(ret)
358 358 except ValueError:
359 359 raise error.ResponseError(
360 360 _('push failed (unexpected response):'), ret)
361 361
362 362 for l in output.splitlines(True):
363 363 self.ui.status(_('remote: '), l)
364 364 return ret
365 365
366 366 def debugwireargs(self, one, two, three=None, four=None, five=None):
367 367 # don't pass optional arguments left at their default value
368 368 opts = {}
369 369 if three is not None:
370 370 opts['three'] = three
371 371 if four is not None:
372 372 opts['four'] = four
373 373 return self._call('debugwireargs', one=one, two=two, **opts)
374 374
375 375 def _call(self, cmd, **args):
376 376 """execute <cmd> on the server
377 377
378 378 The command is expected to return a simple string.
379 379
380 380 returns the server reply as a string."""
381 381 raise NotImplementedError()
382 382
383 383 def _callstream(self, cmd, **args):
384 384 """execute <cmd> on the server
385 385
386 386 The command is expected to return a stream.
387 387
388 388 returns the server reply as a file like object."""
389 389 raise NotImplementedError()
390 390
391 391 def _callcompressable(self, cmd, **args):
392 392 """execute <cmd> on the server
393 393
394 394 The command is expected to return a stream.
395 395
396 396 The stream may have been compressed in some implementaitons. This
397 397 function takes care of the decompression. This is the only difference
398 398 with _callstream.
399 399
400 400 returns the server reply as a file like object.
401 401 """
402 402 raise NotImplementedError()
403 403
404 404 def _callpush(self, cmd, fp, **args):
405 405 """execute a <cmd> on server
406 406
407 407 The command is expected to be related to a push. Push has a special
408 408 return method.
409 409
410 410 returns the server reply as a (ret, output) tuple. ret is either
411 411 empty (error) or a stringified int.
412 412 """
413 413 raise NotImplementedError()
414 414
415 415 def _abort(self, exception):
416 416 """clearly abort the wire protocol connection and raise the exception
417 417 """
418 418 raise NotImplementedError()
419 419
420 420 # server side
421 421
422 422 # wire protocol command can either return a string or one of these classes.
423 423 class streamres(object):
424 424 """wireproto reply: binary stream
425 425
426 426 The call was successful and the result is a stream.
427 427 Iterate on the `self.gen` attribute to retrieve chunks.
428 428 """
429 429 def __init__(self, gen):
430 430 self.gen = gen
431 431
432 432 class pushres(object):
433 433 """wireproto reply: success with simple integer return
434 434
435 435 The call was successful and returned an integer contained in `self.res`.
436 436 """
437 437 def __init__(self, res):
438 438 self.res = res
439 439
440 440 class pusherr(object):
441 441 """wireproto reply: failure
442 442
443 443 The call failed. The `self.res` attribute contains the error message.
444 444 """
445 445 def __init__(self, res):
446 446 self.res = res
447 447
448 448 class ooberror(object):
449 449 """wireproto reply: failure of a batch of operation
450 450
451 451 Something failed during a batch call. The error message is stored in
452 452 `self.message`.
453 453 """
454 454 def __init__(self, message):
455 455 self.message = message
456 456
457 457 def dispatch(repo, proto, command):
458 458 repo = repo.filtered("served")
459 459 func, spec = commands[command]
460 460 args = proto.getargs(spec)
461 461 return func(repo, proto, *args)
462 462
463 463 def options(cmd, keys, others):
464 464 opts = {}
465 465 for k in keys:
466 466 if k in others:
467 467 opts[k] = others[k]
468 468 del others[k]
469 469 if others:
470 470 sys.stderr.write("abort: %s got unexpected arguments %s\n"
471 471 % (cmd, ",".join(others)))
472 472 return opts
473 473
474 474 # list of commands
475 475 commands = {}
476 476
477 477 def wireprotocommand(name, args=''):
478 478 """decorator for wireprotocol command"""
479 479 def register(func):
480 480 commands[name] = (func, args)
481 481 return func
482 482 return register
483 483
484 484 @wireprotocommand('batch', 'cmds *')
485 485 def batch(repo, proto, cmds, others):
486 486 repo = repo.filtered("served")
487 487 res = []
488 488 for pair in cmds.split(';'):
489 489 op, args = pair.split(' ', 1)
490 490 vals = {}
491 491 for a in args.split(','):
492 492 if a:
493 493 n, v = a.split('=')
494 494 vals[n] = unescapearg(v)
495 495 func, spec = commands[op]
496 496 if spec:
497 497 keys = spec.split()
498 498 data = {}
499 499 for k in keys:
500 500 if k == '*':
501 501 star = {}
502 502 for key in vals.keys():
503 503 if key not in keys:
504 504 star[key] = vals[key]
505 505 data['*'] = star
506 506 else:
507 507 data[k] = vals[k]
508 508 result = func(repo, proto, *[data[k] for k in keys])
509 509 else:
510 510 result = func(repo, proto)
511 511 if isinstance(result, ooberror):
512 512 return result
513 513 res.append(escapearg(result))
514 514 return ';'.join(res)
515 515
516 516 @wireprotocommand('between', 'pairs')
517 517 def between(repo, proto, pairs):
518 518 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
519 519 r = []
520 520 for b in repo.between(pairs):
521 521 r.append(encodelist(b) + "\n")
522 522 return "".join(r)
523 523
524 524 @wireprotocommand('branchmap')
525 525 def branchmap(repo, proto):
526 526 branchmap = repo.branchmap()
527 527 heads = []
528 528 for branch, nodes in branchmap.iteritems():
529 529 branchname = urllib.quote(encoding.fromlocal(branch))
530 530 branchnodes = encodelist(nodes)
531 531 heads.append('%s %s' % (branchname, branchnodes))
532 532 return '\n'.join(heads)
533 533
534 534 @wireprotocommand('branches', 'nodes')
535 535 def branches(repo, proto, nodes):
536 536 nodes = decodelist(nodes)
537 537 r = []
538 538 for b in repo.branches(nodes):
539 539 r.append(encodelist(b) + "\n")
540 540 return "".join(r)
541 541
542 542
543 543 wireprotocaps = ['lookup', 'changegroupsubset', 'branchmap', 'pushkey',
544 544 'known', 'getbundle', 'unbundlehash', 'batch']
545 545
546 546 def _capabilities(repo, proto):
547 547 """return a list of capabilities for a repo
548 548
549 549 This function exists to allow extensions to easily wrap capabilities
550 550 computation
551 551
552 552 - returns a lists: easy to alter
553 553 - change done here will be propagated to both `capabilities` and `hello`
554 554 command without any other effort. without any other action needed.
555 555 """
556 556 # copy to prevent modification of the global list
557 557 caps = list(wireprotocaps)
558 558 if _allowstream(repo.ui):
559 559 if repo.ui.configbool('server', 'preferuncompressed', False):
560 560 caps.append('stream-preferred')
561 561 requiredformats = repo.requirements & repo.supportedformats
562 562 # if our local revlogs are just revlogv1, add 'stream' cap
563 563 if not requiredformats - set(('revlogv1',)):
564 564 caps.append('stream')
565 565 # otherwise, add 'streamreqs' detailing our local revlog format
566 566 else:
567 567 caps.append('streamreqs=%s' % ','.join(requiredformats))
568 568 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
569 569 caps.append('httpheader=1024')
570 570 return caps
571 571
572 572 # If you are writting and extension and consider wrapping this function. Wrap
573 573 # `_capabilities` instead.
574 574 @wireprotocommand('capabilities')
575 575 def capabilities(repo, proto):
576 576 return ' '.join(_capabilities(repo, proto))
577 577
578 578 @wireprotocommand('changegroup', 'roots')
579 579 def changegroup(repo, proto, roots):
580 580 nodes = decodelist(roots)
581 581 cg = changegroupmod.changegroup(repo, nodes, 'serve')
582 582 return streamres(proto.groupchunks(cg))
583 583
584 584 @wireprotocommand('changegroupsubset', 'bases heads')
585 585 def changegroupsubset(repo, proto, bases, heads):
586 586 bases = decodelist(bases)
587 587 heads = decodelist(heads)
588 588 cg = changegroupmod.changegroupsubset(repo, bases, heads, 'serve')
589 589 return streamres(proto.groupchunks(cg))
590 590
591 591 @wireprotocommand('debugwireargs', 'one two *')
592 592 def debugwireargs(repo, proto, one, two, others):
593 593 # only accept optional args from the known set
594 594 opts = options('debugwireargs', ['three', 'four'], others)
595 595 return repo.debugwireargs(one, two, **opts)
596 596
597 597 @wireprotocommand('getbundle', '*')
598 598 def getbundle(repo, proto, others):
599 599 opts = options('getbundle', ['heads', 'common', 'bundlecaps'], others)
600 600 for k, v in opts.iteritems():
601 601 if k in ('heads', 'common'):
602 602 opts[k] = decodelist(v)
603 603 elif k == 'bundlecaps':
604 604 opts[k] = set(v.split(','))
605 605 cg = changegroupmod.getbundle(repo, 'serve', **opts)
606 606 return streamres(proto.groupchunks(cg))
607 607
608 608 @wireprotocommand('heads')
609 609 def heads(repo, proto):
610 610 h = repo.heads()
611 611 return encodelist(h) + "\n"
612 612
613 613 @wireprotocommand('hello')
614 614 def hello(repo, proto):
615 615 '''the hello command returns a set of lines describing various
616 616 interesting things about the server, in an RFC822-like format.
617 617 Currently the only one defined is "capabilities", which
618 618 consists of a line in the form:
619 619
620 620 capabilities: space separated list of tokens
621 621 '''
622 622 return "capabilities: %s\n" % (capabilities(repo, proto))
623 623
624 624 @wireprotocommand('listkeys', 'namespace')
625 625 def listkeys(repo, proto, namespace):
626 626 d = repo.listkeys(encoding.tolocal(namespace)).items()
627 627 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
628 628 for k, v in d])
629 629 return t
630 630
631 631 @wireprotocommand('lookup', 'key')
632 632 def lookup(repo, proto, key):
633 633 try:
634 634 k = encoding.tolocal(key)
635 635 c = repo[k]
636 636 r = c.hex()
637 637 success = 1
638 638 except Exception, inst:
639 639 r = str(inst)
640 640 success = 0
641 641 return "%s %s\n" % (success, r)
642 642
643 643 @wireprotocommand('known', 'nodes *')
644 644 def known(repo, proto, nodes, others):
645 645 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
646 646
647 647 @wireprotocommand('pushkey', 'namespace key old new')
648 648 def pushkey(repo, proto, namespace, key, old, new):
649 649 # compatibility with pre-1.8 clients which were accidentally
650 650 # sending raw binary nodes rather than utf-8-encoded hex
651 651 if len(new) == 20 and new.encode('string-escape') != new:
652 652 # looks like it could be a binary node
653 653 try:
654 654 new.decode('utf-8')
655 655 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
656 656 except UnicodeDecodeError:
657 657 pass # binary, leave unmodified
658 658 else:
659 659 new = encoding.tolocal(new) # normal path
660 660
661 661 if util.safehasattr(proto, 'restore'):
662 662
663 663 proto.redirect()
664 664
665 665 try:
666 666 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
667 667 encoding.tolocal(old), new) or False
668 668 except util.Abort:
669 669 r = False
670 670
671 671 output = proto.restore()
672 672
673 673 return '%s\n%s' % (int(r), output)
674 674
675 675 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
676 676 encoding.tolocal(old), new)
677 677 return '%s\n' % int(r)
678 678
679 679 def _allowstream(ui):
680 680 return ui.configbool('server', 'uncompressed', True, untrusted=True)
681 681
682 682 def _walkstreamfiles(repo):
683 683 # this is it's own function so extensions can override it
684 684 return repo.store.walk()
685 685
686 686 @wireprotocommand('stream_out')
687 687 def stream(repo, proto):
688 688 '''If the server supports streaming clone, it advertises the "stream"
689 689 capability with a value representing the version and flags of the repo
690 690 it is serving. Client checks to see if it understands the format.
691 691
692 692 The format is simple: the server writes out a line with the amount
693 693 of files, then the total amount of bytes to be transferred (separated
694 694 by a space). Then, for each file, the server first writes the filename
695 695 and filesize (separated by the null character), then the file contents.
696 696 '''
697 697
698 698 if not _allowstream(repo.ui):
699 699 return '1\n'
700 700
701 701 entries = []
702 702 total_bytes = 0
703 703 try:
704 704 # get consistent snapshot of repo, lock during scan
705 705 lock = repo.lock()
706 706 try:
707 707 repo.ui.debug('scanning\n')
708 708 for name, ename, size in _walkstreamfiles(repo):
709 709 if size:
710 710 entries.append((name, size))
711 711 total_bytes += size
712 712 finally:
713 713 lock.release()
714 714 except error.LockError:
715 715 return '2\n' # error: 2
716 716
717 717 def streamer(repo, entries, total):
718 718 '''stream out all metadata files in repository.'''
719 719 yield '0\n' # success
720 720 repo.ui.debug('%d files, %d bytes to transfer\n' %
721 721 (len(entries), total_bytes))
722 722 yield '%d %d\n' % (len(entries), total_bytes)
723 723
724 724 sopener = repo.sopener
725 725 oldaudit = sopener.mustaudit
726 726 debugflag = repo.ui.debugflag
727 727 sopener.mustaudit = False
728 728
729 729 try:
730 730 for name, size in entries:
731 731 if debugflag:
732 732 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
733 733 # partially encode name over the wire for backwards compat
734 734 yield '%s\0%d\n' % (store.encodedir(name), size)
735 735 if size <= 65536:
736 736 fp = sopener(name)
737 737 try:
738 738 data = fp.read(size)
739 739 finally:
740 740 fp.close()
741 741 yield data
742 742 else:
743 743 for chunk in util.filechunkiter(sopener(name), limit=size):
744 744 yield chunk
745 745 # replace with "finally:" when support for python 2.4 has been dropped
746 746 except Exception:
747 747 sopener.mustaudit = oldaudit
748 748 raise
749 749 sopener.mustaudit = oldaudit
750 750
751 751 return streamres(streamer(repo, entries, total_bytes))
752 752
753 753 @wireprotocommand('unbundle', 'heads')
754 754 def unbundle(repo, proto, heads):
755 755 their_heads = decodelist(heads)
756 756
757 757 try:
758 758 proto.redirect()
759 759
760 760 exchange.check_heads(repo, their_heads, 'preparing changes')
761 761
762 762 # write bundle data to temporary file because it can be big
763 763 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
764 764 fp = os.fdopen(fd, 'wb+')
765 765 r = 0
766 766 try:
767 767 proto.getfile(fp)
768 768 fp.seek(0)
769 769 gen = changegroupmod.readbundle(fp, None)
770 770 r = exchange.unbundle(repo, gen, their_heads, 'serve',
771 771 proto._client())
772 772 return pushres(r)
773 773
774 774 finally:
775 775 fp.close()
776 776 os.unlink(tempname)
777 except util.Abort, inst:
778 # The old code we moved used sys.stderr directly.
779 # We did not changed it to minise code change.
780 # This need to be moved to something proper.
781 # Feel free to do it.
782 sys.stderr.write("abort: %s\n" % inst)
783 return pushres(0)
777 784 except exchange.PushRaced, exc:
778 785 return pusherr(str(exc))
@@ -1,2139 +1,2139 b''
1 1 > do_push()
2 2 > {
3 3 > user=$1
4 4 > shift
5 5 > echo "Pushing as user $user"
6 6 > echo 'hgrc = """'
7 7 > sed -e 1,2d b/.hg/hgrc | grep -v fakegroups.py
8 8 > echo '"""'
9 9 > if test -f acl.config; then
10 10 > echo 'acl.config = """'
11 11 > cat acl.config
12 12 > echo '"""'
13 13 > fi
14 14 > # On AIX /etc/profile sets LOGNAME read-only. So
15 15 > # LOGNAME=$user hg --cws a --debug push ../b
16 16 > # fails with "This variable is read only."
17 17 > # Use env to work around this.
18 18 > env LOGNAME=$user hg --cwd a --debug push ../b
19 19 > hg --cwd b rollback
20 20 > hg --cwd b --quiet tip
21 21 > echo
22 22 > }
23 23
24 24 > init_config()
25 25 > {
26 26 > cat > fakegroups.py <<EOF
27 27 > from hgext import acl
28 28 > def fakegetusers(ui, group):
29 29 > try:
30 30 > return acl._getusersorig(ui, group)
31 31 > except:
32 32 > return ["fred", "betty"]
33 33 > acl._getusersorig = acl._getusers
34 34 > acl._getusers = fakegetusers
35 35 > EOF
36 36 > rm -f acl.config
37 37 > cat > $config <<EOF
38 38 > [hooks]
39 39 > pretxnchangegroup.acl = python:hgext.acl.hook
40 40 > [acl]
41 41 > sources = push
42 42 > [extensions]
43 43 > f=`pwd`/fakegroups.py
44 44 > EOF
45 45 > }
46 46
47 47 $ hg init a
48 48 $ cd a
49 49 $ mkdir foo foo/Bar quux
50 50 $ echo 'in foo' > foo/file.txt
51 51 $ echo 'in foo/Bar' > foo/Bar/file.txt
52 52 $ echo 'in quux' > quux/file.py
53 53 $ hg add -q
54 54 $ hg ci -m 'add files' -d '1000000 0'
55 55 $ echo >> foo/file.txt
56 56 $ hg ci -m 'change foo/file' -d '1000001 0'
57 57 $ echo >> foo/Bar/file.txt
58 58 $ hg ci -m 'change foo/Bar/file' -d '1000002 0'
59 59 $ echo >> quux/file.py
60 60 $ hg ci -m 'change quux/file' -d '1000003 0'
61 61 $ hg tip --quiet
62 62 3:911600dab2ae
63 63
64 64 $ cd ..
65 65 $ hg clone -r 0 a b
66 66 adding changesets
67 67 adding manifests
68 68 adding file changes
69 69 added 1 changesets with 3 changes to 3 files
70 70 updating to branch default
71 71 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
72 72
73 73 $ config=b/.hg/hgrc
74 74
75 75 Extension disabled for lack of a hook
76 76
77 77 $ do_push fred
78 78 Pushing as user fred
79 79 hgrc = """
80 80 """
81 81 pushing to ../b
82 82 query 1; heads
83 83 searching for changes
84 84 all remote heads known locally
85 85 listing keys for "bookmarks"
86 86 3 changesets found
87 87 list of changesets:
88 88 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
89 89 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
90 90 911600dab2ae7a9baff75958b84fe606851ce955
91 91 adding changesets
92 92 bundling: 1/3 changesets (33.33%)
93 93 bundling: 2/3 changesets (66.67%)
94 94 bundling: 3/3 changesets (100.00%)
95 95 bundling: 1/3 manifests (33.33%)
96 96 bundling: 2/3 manifests (66.67%)
97 97 bundling: 3/3 manifests (100.00%)
98 98 bundling: foo/Bar/file.txt 1/3 files (33.33%)
99 99 bundling: foo/file.txt 2/3 files (66.67%)
100 100 bundling: quux/file.py 3/3 files (100.00%)
101 101 changesets: 1 chunks
102 102 add changeset ef1ea85a6374
103 103 changesets: 2 chunks
104 104 add changeset f9cafe1212c8
105 105 changesets: 3 chunks
106 106 add changeset 911600dab2ae
107 107 adding manifests
108 108 manifests: 1/3 chunks (33.33%)
109 109 manifests: 2/3 chunks (66.67%)
110 110 manifests: 3/3 chunks (100.00%)
111 111 adding file changes
112 112 adding foo/Bar/file.txt revisions
113 113 files: 1/3 chunks (33.33%)
114 114 adding foo/file.txt revisions
115 115 files: 2/3 chunks (66.67%)
116 116 adding quux/file.py revisions
117 117 files: 3/3 chunks (100.00%)
118 118 added 3 changesets with 3 changes to 3 files
119 updating the branch cache
119 120 listing keys for "phases"
120 121 try to push obsolete markers to remote
121 updating the branch cache
122 122 checking for updated bookmarks
123 123 listing keys for "bookmarks"
124 124 repository tip rolled back to revision 0 (undo push)
125 125 0:6675d58eff77
126 126
127 127
128 128 $ echo '[hooks]' >> $config
129 129 $ echo 'pretxnchangegroup.acl = python:hgext.acl.hook' >> $config
130 130
131 131 Extension disabled for lack of acl.sources
132 132
133 133 $ do_push fred
134 134 Pushing as user fred
135 135 hgrc = """
136 136 [hooks]
137 137 pretxnchangegroup.acl = python:hgext.acl.hook
138 138 """
139 139 pushing to ../b
140 140 query 1; heads
141 141 searching for changes
142 142 all remote heads known locally
143 143 invalid branchheads cache (served): tip differs
144 144 listing keys for "bookmarks"
145 145 3 changesets found
146 146 list of changesets:
147 147 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
148 148 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
149 149 911600dab2ae7a9baff75958b84fe606851ce955
150 150 adding changesets
151 151 bundling: 1/3 changesets (33.33%)
152 152 bundling: 2/3 changesets (66.67%)
153 153 bundling: 3/3 changesets (100.00%)
154 154 bundling: 1/3 manifests (33.33%)
155 155 bundling: 2/3 manifests (66.67%)
156 156 bundling: 3/3 manifests (100.00%)
157 157 bundling: foo/Bar/file.txt 1/3 files (33.33%)
158 158 bundling: foo/file.txt 2/3 files (66.67%)
159 159 bundling: quux/file.py 3/3 files (100.00%)
160 160 changesets: 1 chunks
161 161 add changeset ef1ea85a6374
162 162 changesets: 2 chunks
163 163 add changeset f9cafe1212c8
164 164 changesets: 3 chunks
165 165 add changeset 911600dab2ae
166 166 adding manifests
167 167 manifests: 1/3 chunks (33.33%)
168 168 manifests: 2/3 chunks (66.67%)
169 169 manifests: 3/3 chunks (100.00%)
170 170 adding file changes
171 171 adding foo/Bar/file.txt revisions
172 172 files: 1/3 chunks (33.33%)
173 173 adding foo/file.txt revisions
174 174 files: 2/3 chunks (66.67%)
175 175 adding quux/file.py revisions
176 176 files: 3/3 chunks (100.00%)
177 177 added 3 changesets with 3 changes to 3 files
178 178 calling hook pretxnchangegroup.acl: hgext.acl.hook
179 179 acl: changes have source "push" - skipping
180 updating the branch cache
180 181 listing keys for "phases"
181 182 try to push obsolete markers to remote
182 updating the branch cache
183 183 checking for updated bookmarks
184 184 listing keys for "bookmarks"
185 185 repository tip rolled back to revision 0 (undo push)
186 186 0:6675d58eff77
187 187
188 188
189 189 No [acl.allow]/[acl.deny]
190 190
191 191 $ echo '[acl]' >> $config
192 192 $ echo 'sources = push' >> $config
193 193 $ do_push fred
194 194 Pushing as user fred
195 195 hgrc = """
196 196 [hooks]
197 197 pretxnchangegroup.acl = python:hgext.acl.hook
198 198 [acl]
199 199 sources = push
200 200 """
201 201 pushing to ../b
202 202 query 1; heads
203 203 searching for changes
204 204 all remote heads known locally
205 205 invalid branchheads cache (served): tip differs
206 206 listing keys for "bookmarks"
207 207 3 changesets found
208 208 list of changesets:
209 209 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
210 210 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
211 211 911600dab2ae7a9baff75958b84fe606851ce955
212 212 adding changesets
213 213 bundling: 1/3 changesets (33.33%)
214 214 bundling: 2/3 changesets (66.67%)
215 215 bundling: 3/3 changesets (100.00%)
216 216 bundling: 1/3 manifests (33.33%)
217 217 bundling: 2/3 manifests (66.67%)
218 218 bundling: 3/3 manifests (100.00%)
219 219 bundling: foo/Bar/file.txt 1/3 files (33.33%)
220 220 bundling: foo/file.txt 2/3 files (66.67%)
221 221 bundling: quux/file.py 3/3 files (100.00%)
222 222 changesets: 1 chunks
223 223 add changeset ef1ea85a6374
224 224 changesets: 2 chunks
225 225 add changeset f9cafe1212c8
226 226 changesets: 3 chunks
227 227 add changeset 911600dab2ae
228 228 adding manifests
229 229 manifests: 1/3 chunks (33.33%)
230 230 manifests: 2/3 chunks (66.67%)
231 231 manifests: 3/3 chunks (100.00%)
232 232 adding file changes
233 233 adding foo/Bar/file.txt revisions
234 234 files: 1/3 chunks (33.33%)
235 235 adding foo/file.txt revisions
236 236 files: 2/3 chunks (66.67%)
237 237 adding quux/file.py revisions
238 238 files: 3/3 chunks (100.00%)
239 239 added 3 changesets with 3 changes to 3 files
240 240 calling hook pretxnchangegroup.acl: hgext.acl.hook
241 241 acl: checking access for user "fred"
242 242 acl: acl.allow.branches not enabled
243 243 acl: acl.deny.branches not enabled
244 244 acl: acl.allow not enabled
245 245 acl: acl.deny not enabled
246 246 acl: branch access granted: "ef1ea85a6374" on branch "default"
247 247 acl: path access granted: "ef1ea85a6374"
248 248 acl: branch access granted: "f9cafe1212c8" on branch "default"
249 249 acl: path access granted: "f9cafe1212c8"
250 250 acl: branch access granted: "911600dab2ae" on branch "default"
251 251 acl: path access granted: "911600dab2ae"
252 updating the branch cache
252 253 listing keys for "phases"
253 254 try to push obsolete markers to remote
254 updating the branch cache
255 255 checking for updated bookmarks
256 256 listing keys for "bookmarks"
257 257 repository tip rolled back to revision 0 (undo push)
258 258 0:6675d58eff77
259 259
260 260
261 261 Empty [acl.allow]
262 262
263 263 $ echo '[acl.allow]' >> $config
264 264 $ do_push fred
265 265 Pushing as user fred
266 266 hgrc = """
267 267 [hooks]
268 268 pretxnchangegroup.acl = python:hgext.acl.hook
269 269 [acl]
270 270 sources = push
271 271 [acl.allow]
272 272 """
273 273 pushing to ../b
274 274 query 1; heads
275 275 searching for changes
276 276 all remote heads known locally
277 277 invalid branchheads cache (served): tip differs
278 278 listing keys for "bookmarks"
279 279 3 changesets found
280 280 list of changesets:
281 281 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
282 282 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
283 283 911600dab2ae7a9baff75958b84fe606851ce955
284 284 adding changesets
285 285 bundling: 1/3 changesets (33.33%)
286 286 bundling: 2/3 changesets (66.67%)
287 287 bundling: 3/3 changesets (100.00%)
288 288 bundling: 1/3 manifests (33.33%)
289 289 bundling: 2/3 manifests (66.67%)
290 290 bundling: 3/3 manifests (100.00%)
291 291 bundling: foo/Bar/file.txt 1/3 files (33.33%)
292 292 bundling: foo/file.txt 2/3 files (66.67%)
293 293 bundling: quux/file.py 3/3 files (100.00%)
294 294 changesets: 1 chunks
295 295 add changeset ef1ea85a6374
296 296 changesets: 2 chunks
297 297 add changeset f9cafe1212c8
298 298 changesets: 3 chunks
299 299 add changeset 911600dab2ae
300 300 adding manifests
301 301 manifests: 1/3 chunks (33.33%)
302 302 manifests: 2/3 chunks (66.67%)
303 303 manifests: 3/3 chunks (100.00%)
304 304 adding file changes
305 305 adding foo/Bar/file.txt revisions
306 306 files: 1/3 chunks (33.33%)
307 307 adding foo/file.txt revisions
308 308 files: 2/3 chunks (66.67%)
309 309 adding quux/file.py revisions
310 310 files: 3/3 chunks (100.00%)
311 311 added 3 changesets with 3 changes to 3 files
312 312 calling hook pretxnchangegroup.acl: hgext.acl.hook
313 313 acl: checking access for user "fred"
314 314 acl: acl.allow.branches not enabled
315 315 acl: acl.deny.branches not enabled
316 316 acl: acl.allow enabled, 0 entries for user fred
317 317 acl: acl.deny not enabled
318 318 acl: branch access granted: "ef1ea85a6374" on branch "default"
319 319 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
320 320 transaction abort!
321 321 rollback completed
322 322 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
323 323 no rollback information available
324 324 0:6675d58eff77
325 325
326 326
327 327 fred is allowed inside foo/
328 328
329 329 $ echo 'foo/** = fred' >> $config
330 330 $ do_push fred
331 331 Pushing as user fred
332 332 hgrc = """
333 333 [hooks]
334 334 pretxnchangegroup.acl = python:hgext.acl.hook
335 335 [acl]
336 336 sources = push
337 337 [acl.allow]
338 338 foo/** = fred
339 339 """
340 340 pushing to ../b
341 341 query 1; heads
342 342 searching for changes
343 343 all remote heads known locally
344 344 invalid branchheads cache (served): tip differs
345 345 listing keys for "bookmarks"
346 346 3 changesets found
347 347 list of changesets:
348 348 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
349 349 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
350 350 911600dab2ae7a9baff75958b84fe606851ce955
351 351 adding changesets
352 352 bundling: 1/3 changesets (33.33%)
353 353 bundling: 2/3 changesets (66.67%)
354 354 bundling: 3/3 changesets (100.00%)
355 355 bundling: 1/3 manifests (33.33%)
356 356 bundling: 2/3 manifests (66.67%)
357 357 bundling: 3/3 manifests (100.00%)
358 358 bundling: foo/Bar/file.txt 1/3 files (33.33%)
359 359 bundling: foo/file.txt 2/3 files (66.67%)
360 360 bundling: quux/file.py 3/3 files (100.00%)
361 361 changesets: 1 chunks
362 362 add changeset ef1ea85a6374
363 363 changesets: 2 chunks
364 364 add changeset f9cafe1212c8
365 365 changesets: 3 chunks
366 366 add changeset 911600dab2ae
367 367 adding manifests
368 368 manifests: 1/3 chunks (33.33%)
369 369 manifests: 2/3 chunks (66.67%)
370 370 manifests: 3/3 chunks (100.00%)
371 371 adding file changes
372 372 adding foo/Bar/file.txt revisions
373 373 files: 1/3 chunks (33.33%)
374 374 adding foo/file.txt revisions
375 375 files: 2/3 chunks (66.67%)
376 376 adding quux/file.py revisions
377 377 files: 3/3 chunks (100.00%)
378 378 added 3 changesets with 3 changes to 3 files
379 379 calling hook pretxnchangegroup.acl: hgext.acl.hook
380 380 acl: checking access for user "fred"
381 381 acl: acl.allow.branches not enabled
382 382 acl: acl.deny.branches not enabled
383 383 acl: acl.allow enabled, 1 entries for user fred
384 384 acl: acl.deny not enabled
385 385 acl: branch access granted: "ef1ea85a6374" on branch "default"
386 386 acl: path access granted: "ef1ea85a6374"
387 387 acl: branch access granted: "f9cafe1212c8" on branch "default"
388 388 acl: path access granted: "f9cafe1212c8"
389 389 acl: branch access granted: "911600dab2ae" on branch "default"
390 390 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
391 391 transaction abort!
392 392 rollback completed
393 393 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
394 394 no rollback information available
395 395 0:6675d58eff77
396 396
397 397
398 398 Empty [acl.deny]
399 399
400 400 $ echo '[acl.deny]' >> $config
401 401 $ do_push barney
402 402 Pushing as user barney
403 403 hgrc = """
404 404 [hooks]
405 405 pretxnchangegroup.acl = python:hgext.acl.hook
406 406 [acl]
407 407 sources = push
408 408 [acl.allow]
409 409 foo/** = fred
410 410 [acl.deny]
411 411 """
412 412 pushing to ../b
413 413 query 1; heads
414 414 searching for changes
415 415 all remote heads known locally
416 416 invalid branchheads cache (served): tip differs
417 417 listing keys for "bookmarks"
418 418 3 changesets found
419 419 list of changesets:
420 420 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
421 421 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
422 422 911600dab2ae7a9baff75958b84fe606851ce955
423 423 adding changesets
424 424 bundling: 1/3 changesets (33.33%)
425 425 bundling: 2/3 changesets (66.67%)
426 426 bundling: 3/3 changesets (100.00%)
427 427 bundling: 1/3 manifests (33.33%)
428 428 bundling: 2/3 manifests (66.67%)
429 429 bundling: 3/3 manifests (100.00%)
430 430 bundling: foo/Bar/file.txt 1/3 files (33.33%)
431 431 bundling: foo/file.txt 2/3 files (66.67%)
432 432 bundling: quux/file.py 3/3 files (100.00%)
433 433 changesets: 1 chunks
434 434 add changeset ef1ea85a6374
435 435 changesets: 2 chunks
436 436 add changeset f9cafe1212c8
437 437 changesets: 3 chunks
438 438 add changeset 911600dab2ae
439 439 adding manifests
440 440 manifests: 1/3 chunks (33.33%)
441 441 manifests: 2/3 chunks (66.67%)
442 442 manifests: 3/3 chunks (100.00%)
443 443 adding file changes
444 444 adding foo/Bar/file.txt revisions
445 445 files: 1/3 chunks (33.33%)
446 446 adding foo/file.txt revisions
447 447 files: 2/3 chunks (66.67%)
448 448 adding quux/file.py revisions
449 449 files: 3/3 chunks (100.00%)
450 450 added 3 changesets with 3 changes to 3 files
451 451 calling hook pretxnchangegroup.acl: hgext.acl.hook
452 452 acl: checking access for user "barney"
453 453 acl: acl.allow.branches not enabled
454 454 acl: acl.deny.branches not enabled
455 455 acl: acl.allow enabled, 0 entries for user barney
456 456 acl: acl.deny enabled, 0 entries for user barney
457 457 acl: branch access granted: "ef1ea85a6374" on branch "default"
458 458 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
459 459 transaction abort!
460 460 rollback completed
461 461 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
462 462 no rollback information available
463 463 0:6675d58eff77
464 464
465 465
466 466 fred is allowed inside foo/, but not foo/bar/ (case matters)
467 467
468 468 $ echo 'foo/bar/** = fred' >> $config
469 469 $ do_push fred
470 470 Pushing as user fred
471 471 hgrc = """
472 472 [hooks]
473 473 pretxnchangegroup.acl = python:hgext.acl.hook
474 474 [acl]
475 475 sources = push
476 476 [acl.allow]
477 477 foo/** = fred
478 478 [acl.deny]
479 479 foo/bar/** = fred
480 480 """
481 481 pushing to ../b
482 482 query 1; heads
483 483 searching for changes
484 484 all remote heads known locally
485 485 invalid branchheads cache (served): tip differs
486 486 listing keys for "bookmarks"
487 487 3 changesets found
488 488 list of changesets:
489 489 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
490 490 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
491 491 911600dab2ae7a9baff75958b84fe606851ce955
492 492 adding changesets
493 493 bundling: 1/3 changesets (33.33%)
494 494 bundling: 2/3 changesets (66.67%)
495 495 bundling: 3/3 changesets (100.00%)
496 496 bundling: 1/3 manifests (33.33%)
497 497 bundling: 2/3 manifests (66.67%)
498 498 bundling: 3/3 manifests (100.00%)
499 499 bundling: foo/Bar/file.txt 1/3 files (33.33%)
500 500 bundling: foo/file.txt 2/3 files (66.67%)
501 501 bundling: quux/file.py 3/3 files (100.00%)
502 502 changesets: 1 chunks
503 503 add changeset ef1ea85a6374
504 504 changesets: 2 chunks
505 505 add changeset f9cafe1212c8
506 506 changesets: 3 chunks
507 507 add changeset 911600dab2ae
508 508 adding manifests
509 509 manifests: 1/3 chunks (33.33%)
510 510 manifests: 2/3 chunks (66.67%)
511 511 manifests: 3/3 chunks (100.00%)
512 512 adding file changes
513 513 adding foo/Bar/file.txt revisions
514 514 files: 1/3 chunks (33.33%)
515 515 adding foo/file.txt revisions
516 516 files: 2/3 chunks (66.67%)
517 517 adding quux/file.py revisions
518 518 files: 3/3 chunks (100.00%)
519 519 added 3 changesets with 3 changes to 3 files
520 520 calling hook pretxnchangegroup.acl: hgext.acl.hook
521 521 acl: checking access for user "fred"
522 522 acl: acl.allow.branches not enabled
523 523 acl: acl.deny.branches not enabled
524 524 acl: acl.allow enabled, 1 entries for user fred
525 525 acl: acl.deny enabled, 1 entries for user fred
526 526 acl: branch access granted: "ef1ea85a6374" on branch "default"
527 527 acl: path access granted: "ef1ea85a6374"
528 528 acl: branch access granted: "f9cafe1212c8" on branch "default"
529 529 acl: path access granted: "f9cafe1212c8"
530 530 acl: branch access granted: "911600dab2ae" on branch "default"
531 531 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
532 532 transaction abort!
533 533 rollback completed
534 534 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
535 535 no rollback information available
536 536 0:6675d58eff77
537 537
538 538
539 539 fred is allowed inside foo/, but not foo/Bar/
540 540
541 541 $ echo 'foo/Bar/** = fred' >> $config
542 542 $ do_push fred
543 543 Pushing as user fred
544 544 hgrc = """
545 545 [hooks]
546 546 pretxnchangegroup.acl = python:hgext.acl.hook
547 547 [acl]
548 548 sources = push
549 549 [acl.allow]
550 550 foo/** = fred
551 551 [acl.deny]
552 552 foo/bar/** = fred
553 553 foo/Bar/** = fred
554 554 """
555 555 pushing to ../b
556 556 query 1; heads
557 557 searching for changes
558 558 all remote heads known locally
559 559 invalid branchheads cache (served): tip differs
560 560 listing keys for "bookmarks"
561 561 3 changesets found
562 562 list of changesets:
563 563 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
564 564 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
565 565 911600dab2ae7a9baff75958b84fe606851ce955
566 566 adding changesets
567 567 bundling: 1/3 changesets (33.33%)
568 568 bundling: 2/3 changesets (66.67%)
569 569 bundling: 3/3 changesets (100.00%)
570 570 bundling: 1/3 manifests (33.33%)
571 571 bundling: 2/3 manifests (66.67%)
572 572 bundling: 3/3 manifests (100.00%)
573 573 bundling: foo/Bar/file.txt 1/3 files (33.33%)
574 574 bundling: foo/file.txt 2/3 files (66.67%)
575 575 bundling: quux/file.py 3/3 files (100.00%)
576 576 changesets: 1 chunks
577 577 add changeset ef1ea85a6374
578 578 changesets: 2 chunks
579 579 add changeset f9cafe1212c8
580 580 changesets: 3 chunks
581 581 add changeset 911600dab2ae
582 582 adding manifests
583 583 manifests: 1/3 chunks (33.33%)
584 584 manifests: 2/3 chunks (66.67%)
585 585 manifests: 3/3 chunks (100.00%)
586 586 adding file changes
587 587 adding foo/Bar/file.txt revisions
588 588 files: 1/3 chunks (33.33%)
589 589 adding foo/file.txt revisions
590 590 files: 2/3 chunks (66.67%)
591 591 adding quux/file.py revisions
592 592 files: 3/3 chunks (100.00%)
593 593 added 3 changesets with 3 changes to 3 files
594 594 calling hook pretxnchangegroup.acl: hgext.acl.hook
595 595 acl: checking access for user "fred"
596 596 acl: acl.allow.branches not enabled
597 597 acl: acl.deny.branches not enabled
598 598 acl: acl.allow enabled, 1 entries for user fred
599 599 acl: acl.deny enabled, 2 entries for user fred
600 600 acl: branch access granted: "ef1ea85a6374" on branch "default"
601 601 acl: path access granted: "ef1ea85a6374"
602 602 acl: branch access granted: "f9cafe1212c8" on branch "default"
603 603 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
604 604 transaction abort!
605 605 rollback completed
606 606 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
607 607 no rollback information available
608 608 0:6675d58eff77
609 609
610 610
611 611 $ echo 'barney is not mentioned => not allowed anywhere'
612 612 barney is not mentioned => not allowed anywhere
613 613 $ do_push barney
614 614 Pushing as user barney
615 615 hgrc = """
616 616 [hooks]
617 617 pretxnchangegroup.acl = python:hgext.acl.hook
618 618 [acl]
619 619 sources = push
620 620 [acl.allow]
621 621 foo/** = fred
622 622 [acl.deny]
623 623 foo/bar/** = fred
624 624 foo/Bar/** = fred
625 625 """
626 626 pushing to ../b
627 627 query 1; heads
628 628 searching for changes
629 629 all remote heads known locally
630 630 invalid branchheads cache (served): tip differs
631 631 listing keys for "bookmarks"
632 632 3 changesets found
633 633 list of changesets:
634 634 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
635 635 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
636 636 911600dab2ae7a9baff75958b84fe606851ce955
637 637 adding changesets
638 638 bundling: 1/3 changesets (33.33%)
639 639 bundling: 2/3 changesets (66.67%)
640 640 bundling: 3/3 changesets (100.00%)
641 641 bundling: 1/3 manifests (33.33%)
642 642 bundling: 2/3 manifests (66.67%)
643 643 bundling: 3/3 manifests (100.00%)
644 644 bundling: foo/Bar/file.txt 1/3 files (33.33%)
645 645 bundling: foo/file.txt 2/3 files (66.67%)
646 646 bundling: quux/file.py 3/3 files (100.00%)
647 647 changesets: 1 chunks
648 648 add changeset ef1ea85a6374
649 649 changesets: 2 chunks
650 650 add changeset f9cafe1212c8
651 651 changesets: 3 chunks
652 652 add changeset 911600dab2ae
653 653 adding manifests
654 654 manifests: 1/3 chunks (33.33%)
655 655 manifests: 2/3 chunks (66.67%)
656 656 manifests: 3/3 chunks (100.00%)
657 657 adding file changes
658 658 adding foo/Bar/file.txt revisions
659 659 files: 1/3 chunks (33.33%)
660 660 adding foo/file.txt revisions
661 661 files: 2/3 chunks (66.67%)
662 662 adding quux/file.py revisions
663 663 files: 3/3 chunks (100.00%)
664 664 added 3 changesets with 3 changes to 3 files
665 665 calling hook pretxnchangegroup.acl: hgext.acl.hook
666 666 acl: checking access for user "barney"
667 667 acl: acl.allow.branches not enabled
668 668 acl: acl.deny.branches not enabled
669 669 acl: acl.allow enabled, 0 entries for user barney
670 670 acl: acl.deny enabled, 0 entries for user barney
671 671 acl: branch access granted: "ef1ea85a6374" on branch "default"
672 672 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
673 673 transaction abort!
674 674 rollback completed
675 675 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
676 676 no rollback information available
677 677 0:6675d58eff77
678 678
679 679
680 680 barney is allowed everywhere
681 681
682 682 $ echo '[acl.allow]' >> $config
683 683 $ echo '** = barney' >> $config
684 684 $ do_push barney
685 685 Pushing as user barney
686 686 hgrc = """
687 687 [hooks]
688 688 pretxnchangegroup.acl = python:hgext.acl.hook
689 689 [acl]
690 690 sources = push
691 691 [acl.allow]
692 692 foo/** = fred
693 693 [acl.deny]
694 694 foo/bar/** = fred
695 695 foo/Bar/** = fred
696 696 [acl.allow]
697 697 ** = barney
698 698 """
699 699 pushing to ../b
700 700 query 1; heads
701 701 searching for changes
702 702 all remote heads known locally
703 703 invalid branchheads cache (served): tip differs
704 704 listing keys for "bookmarks"
705 705 3 changesets found
706 706 list of changesets:
707 707 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
708 708 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
709 709 911600dab2ae7a9baff75958b84fe606851ce955
710 710 adding changesets
711 711 bundling: 1/3 changesets (33.33%)
712 712 bundling: 2/3 changesets (66.67%)
713 713 bundling: 3/3 changesets (100.00%)
714 714 bundling: 1/3 manifests (33.33%)
715 715 bundling: 2/3 manifests (66.67%)
716 716 bundling: 3/3 manifests (100.00%)
717 717 bundling: foo/Bar/file.txt 1/3 files (33.33%)
718 718 bundling: foo/file.txt 2/3 files (66.67%)
719 719 bundling: quux/file.py 3/3 files (100.00%)
720 720 changesets: 1 chunks
721 721 add changeset ef1ea85a6374
722 722 changesets: 2 chunks
723 723 add changeset f9cafe1212c8
724 724 changesets: 3 chunks
725 725 add changeset 911600dab2ae
726 726 adding manifests
727 727 manifests: 1/3 chunks (33.33%)
728 728 manifests: 2/3 chunks (66.67%)
729 729 manifests: 3/3 chunks (100.00%)
730 730 adding file changes
731 731 adding foo/Bar/file.txt revisions
732 732 files: 1/3 chunks (33.33%)
733 733 adding foo/file.txt revisions
734 734 files: 2/3 chunks (66.67%)
735 735 adding quux/file.py revisions
736 736 files: 3/3 chunks (100.00%)
737 737 added 3 changesets with 3 changes to 3 files
738 738 calling hook pretxnchangegroup.acl: hgext.acl.hook
739 739 acl: checking access for user "barney"
740 740 acl: acl.allow.branches not enabled
741 741 acl: acl.deny.branches not enabled
742 742 acl: acl.allow enabled, 1 entries for user barney
743 743 acl: acl.deny enabled, 0 entries for user barney
744 744 acl: branch access granted: "ef1ea85a6374" on branch "default"
745 745 acl: path access granted: "ef1ea85a6374"
746 746 acl: branch access granted: "f9cafe1212c8" on branch "default"
747 747 acl: path access granted: "f9cafe1212c8"
748 748 acl: branch access granted: "911600dab2ae" on branch "default"
749 749 acl: path access granted: "911600dab2ae"
750 updating the branch cache
750 751 listing keys for "phases"
751 752 try to push obsolete markers to remote
752 updating the branch cache
753 753 checking for updated bookmarks
754 754 listing keys for "bookmarks"
755 755 repository tip rolled back to revision 0 (undo push)
756 756 0:6675d58eff77
757 757
758 758
759 759 wilma can change files with a .txt extension
760 760
761 761 $ echo '**/*.txt = wilma' >> $config
762 762 $ do_push wilma
763 763 Pushing as user wilma
764 764 hgrc = """
765 765 [hooks]
766 766 pretxnchangegroup.acl = python:hgext.acl.hook
767 767 [acl]
768 768 sources = push
769 769 [acl.allow]
770 770 foo/** = fred
771 771 [acl.deny]
772 772 foo/bar/** = fred
773 773 foo/Bar/** = fred
774 774 [acl.allow]
775 775 ** = barney
776 776 **/*.txt = wilma
777 777 """
778 778 pushing to ../b
779 779 query 1; heads
780 780 searching for changes
781 781 all remote heads known locally
782 782 invalid branchheads cache (served): tip differs
783 783 listing keys for "bookmarks"
784 784 3 changesets found
785 785 list of changesets:
786 786 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
787 787 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
788 788 911600dab2ae7a9baff75958b84fe606851ce955
789 789 adding changesets
790 790 bundling: 1/3 changesets (33.33%)
791 791 bundling: 2/3 changesets (66.67%)
792 792 bundling: 3/3 changesets (100.00%)
793 793 bundling: 1/3 manifests (33.33%)
794 794 bundling: 2/3 manifests (66.67%)
795 795 bundling: 3/3 manifests (100.00%)
796 796 bundling: foo/Bar/file.txt 1/3 files (33.33%)
797 797 bundling: foo/file.txt 2/3 files (66.67%)
798 798 bundling: quux/file.py 3/3 files (100.00%)
799 799 changesets: 1 chunks
800 800 add changeset ef1ea85a6374
801 801 changesets: 2 chunks
802 802 add changeset f9cafe1212c8
803 803 changesets: 3 chunks
804 804 add changeset 911600dab2ae
805 805 adding manifests
806 806 manifests: 1/3 chunks (33.33%)
807 807 manifests: 2/3 chunks (66.67%)
808 808 manifests: 3/3 chunks (100.00%)
809 809 adding file changes
810 810 adding foo/Bar/file.txt revisions
811 811 files: 1/3 chunks (33.33%)
812 812 adding foo/file.txt revisions
813 813 files: 2/3 chunks (66.67%)
814 814 adding quux/file.py revisions
815 815 files: 3/3 chunks (100.00%)
816 816 added 3 changesets with 3 changes to 3 files
817 817 calling hook pretxnchangegroup.acl: hgext.acl.hook
818 818 acl: checking access for user "wilma"
819 819 acl: acl.allow.branches not enabled
820 820 acl: acl.deny.branches not enabled
821 821 acl: acl.allow enabled, 1 entries for user wilma
822 822 acl: acl.deny enabled, 0 entries for user wilma
823 823 acl: branch access granted: "ef1ea85a6374" on branch "default"
824 824 acl: path access granted: "ef1ea85a6374"
825 825 acl: branch access granted: "f9cafe1212c8" on branch "default"
826 826 acl: path access granted: "f9cafe1212c8"
827 827 acl: branch access granted: "911600dab2ae" on branch "default"
828 828 error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
829 829 transaction abort!
830 830 rollback completed
831 831 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
832 832 no rollback information available
833 833 0:6675d58eff77
834 834
835 835
836 836 file specified by acl.config does not exist
837 837
838 838 $ echo '[acl]' >> $config
839 839 $ echo 'config = ../acl.config' >> $config
840 840 $ do_push barney
841 841 Pushing as user barney
842 842 hgrc = """
843 843 [hooks]
844 844 pretxnchangegroup.acl = python:hgext.acl.hook
845 845 [acl]
846 846 sources = push
847 847 [acl.allow]
848 848 foo/** = fred
849 849 [acl.deny]
850 850 foo/bar/** = fred
851 851 foo/Bar/** = fred
852 852 [acl.allow]
853 853 ** = barney
854 854 **/*.txt = wilma
855 855 [acl]
856 856 config = ../acl.config
857 857 """
858 858 pushing to ../b
859 859 query 1; heads
860 860 searching for changes
861 861 all remote heads known locally
862 862 invalid branchheads cache (served): tip differs
863 863 listing keys for "bookmarks"
864 864 3 changesets found
865 865 list of changesets:
866 866 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
867 867 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
868 868 911600dab2ae7a9baff75958b84fe606851ce955
869 869 adding changesets
870 870 bundling: 1/3 changesets (33.33%)
871 871 bundling: 2/3 changesets (66.67%)
872 872 bundling: 3/3 changesets (100.00%)
873 873 bundling: 1/3 manifests (33.33%)
874 874 bundling: 2/3 manifests (66.67%)
875 875 bundling: 3/3 manifests (100.00%)
876 876 bundling: foo/Bar/file.txt 1/3 files (33.33%)
877 877 bundling: foo/file.txt 2/3 files (66.67%)
878 878 bundling: quux/file.py 3/3 files (100.00%)
879 879 changesets: 1 chunks
880 880 add changeset ef1ea85a6374
881 881 changesets: 2 chunks
882 882 add changeset f9cafe1212c8
883 883 changesets: 3 chunks
884 884 add changeset 911600dab2ae
885 885 adding manifests
886 886 manifests: 1/3 chunks (33.33%)
887 887 manifests: 2/3 chunks (66.67%)
888 888 manifests: 3/3 chunks (100.00%)
889 889 adding file changes
890 890 adding foo/Bar/file.txt revisions
891 891 files: 1/3 chunks (33.33%)
892 892 adding foo/file.txt revisions
893 893 files: 2/3 chunks (66.67%)
894 894 adding quux/file.py revisions
895 895 files: 3/3 chunks (100.00%)
896 896 added 3 changesets with 3 changes to 3 files
897 897 calling hook pretxnchangegroup.acl: hgext.acl.hook
898 898 acl: checking access for user "barney"
899 899 error: pretxnchangegroup.acl hook raised an exception: [Errno *] *: '../acl.config' (glob)
900 900 transaction abort!
901 901 rollback completed
902 902 abort: *: ../acl.config (glob)
903 903 no rollback information available
904 904 0:6675d58eff77
905 905
906 906
907 907 betty is allowed inside foo/ by a acl.config file
908 908
909 909 $ echo '[acl.allow]' >> acl.config
910 910 $ echo 'foo/** = betty' >> acl.config
911 911 $ do_push betty
912 912 Pushing as user betty
913 913 hgrc = """
914 914 [hooks]
915 915 pretxnchangegroup.acl = python:hgext.acl.hook
916 916 [acl]
917 917 sources = push
918 918 [acl.allow]
919 919 foo/** = fred
920 920 [acl.deny]
921 921 foo/bar/** = fred
922 922 foo/Bar/** = fred
923 923 [acl.allow]
924 924 ** = barney
925 925 **/*.txt = wilma
926 926 [acl]
927 927 config = ../acl.config
928 928 """
929 929 acl.config = """
930 930 [acl.allow]
931 931 foo/** = betty
932 932 """
933 933 pushing to ../b
934 934 query 1; heads
935 935 searching for changes
936 936 all remote heads known locally
937 937 invalid branchheads cache (served): tip differs
938 938 listing keys for "bookmarks"
939 939 3 changesets found
940 940 list of changesets:
941 941 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
942 942 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
943 943 911600dab2ae7a9baff75958b84fe606851ce955
944 944 adding changesets
945 945 bundling: 1/3 changesets (33.33%)
946 946 bundling: 2/3 changesets (66.67%)
947 947 bundling: 3/3 changesets (100.00%)
948 948 bundling: 1/3 manifests (33.33%)
949 949 bundling: 2/3 manifests (66.67%)
950 950 bundling: 3/3 manifests (100.00%)
951 951 bundling: foo/Bar/file.txt 1/3 files (33.33%)
952 952 bundling: foo/file.txt 2/3 files (66.67%)
953 953 bundling: quux/file.py 3/3 files (100.00%)
954 954 changesets: 1 chunks
955 955 add changeset ef1ea85a6374
956 956 changesets: 2 chunks
957 957 add changeset f9cafe1212c8
958 958 changesets: 3 chunks
959 959 add changeset 911600dab2ae
960 960 adding manifests
961 961 manifests: 1/3 chunks (33.33%)
962 962 manifests: 2/3 chunks (66.67%)
963 963 manifests: 3/3 chunks (100.00%)
964 964 adding file changes
965 965 adding foo/Bar/file.txt revisions
966 966 files: 1/3 chunks (33.33%)
967 967 adding foo/file.txt revisions
968 968 files: 2/3 chunks (66.67%)
969 969 adding quux/file.py revisions
970 970 files: 3/3 chunks (100.00%)
971 971 added 3 changesets with 3 changes to 3 files
972 972 calling hook pretxnchangegroup.acl: hgext.acl.hook
973 973 acl: checking access for user "betty"
974 974 acl: acl.allow.branches not enabled
975 975 acl: acl.deny.branches not enabled
976 976 acl: acl.allow enabled, 1 entries for user betty
977 977 acl: acl.deny enabled, 0 entries for user betty
978 978 acl: branch access granted: "ef1ea85a6374" on branch "default"
979 979 acl: path access granted: "ef1ea85a6374"
980 980 acl: branch access granted: "f9cafe1212c8" on branch "default"
981 981 acl: path access granted: "f9cafe1212c8"
982 982 acl: branch access granted: "911600dab2ae" on branch "default"
983 983 error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
984 984 transaction abort!
985 985 rollback completed
986 986 abort: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
987 987 no rollback information available
988 988 0:6675d58eff77
989 989
990 990
991 991 acl.config can set only [acl.allow]/[acl.deny]
992 992
993 993 $ echo '[hooks]' >> acl.config
994 994 $ echo 'changegroup.acl = false' >> acl.config
995 995 $ do_push barney
996 996 Pushing as user barney
997 997 hgrc = """
998 998 [hooks]
999 999 pretxnchangegroup.acl = python:hgext.acl.hook
1000 1000 [acl]
1001 1001 sources = push
1002 1002 [acl.allow]
1003 1003 foo/** = fred
1004 1004 [acl.deny]
1005 1005 foo/bar/** = fred
1006 1006 foo/Bar/** = fred
1007 1007 [acl.allow]
1008 1008 ** = barney
1009 1009 **/*.txt = wilma
1010 1010 [acl]
1011 1011 config = ../acl.config
1012 1012 """
1013 1013 acl.config = """
1014 1014 [acl.allow]
1015 1015 foo/** = betty
1016 1016 [hooks]
1017 1017 changegroup.acl = false
1018 1018 """
1019 1019 pushing to ../b
1020 1020 query 1; heads
1021 1021 searching for changes
1022 1022 all remote heads known locally
1023 1023 invalid branchheads cache (served): tip differs
1024 1024 listing keys for "bookmarks"
1025 1025 3 changesets found
1026 1026 list of changesets:
1027 1027 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1028 1028 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1029 1029 911600dab2ae7a9baff75958b84fe606851ce955
1030 1030 adding changesets
1031 1031 bundling: 1/3 changesets (33.33%)
1032 1032 bundling: 2/3 changesets (66.67%)
1033 1033 bundling: 3/3 changesets (100.00%)
1034 1034 bundling: 1/3 manifests (33.33%)
1035 1035 bundling: 2/3 manifests (66.67%)
1036 1036 bundling: 3/3 manifests (100.00%)
1037 1037 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1038 1038 bundling: foo/file.txt 2/3 files (66.67%)
1039 1039 bundling: quux/file.py 3/3 files (100.00%)
1040 1040 changesets: 1 chunks
1041 1041 add changeset ef1ea85a6374
1042 1042 changesets: 2 chunks
1043 1043 add changeset f9cafe1212c8
1044 1044 changesets: 3 chunks
1045 1045 add changeset 911600dab2ae
1046 1046 adding manifests
1047 1047 manifests: 1/3 chunks (33.33%)
1048 1048 manifests: 2/3 chunks (66.67%)
1049 1049 manifests: 3/3 chunks (100.00%)
1050 1050 adding file changes
1051 1051 adding foo/Bar/file.txt revisions
1052 1052 files: 1/3 chunks (33.33%)
1053 1053 adding foo/file.txt revisions
1054 1054 files: 2/3 chunks (66.67%)
1055 1055 adding quux/file.py revisions
1056 1056 files: 3/3 chunks (100.00%)
1057 1057 added 3 changesets with 3 changes to 3 files
1058 1058 calling hook pretxnchangegroup.acl: hgext.acl.hook
1059 1059 acl: checking access for user "barney"
1060 1060 acl: acl.allow.branches not enabled
1061 1061 acl: acl.deny.branches not enabled
1062 1062 acl: acl.allow enabled, 1 entries for user barney
1063 1063 acl: acl.deny enabled, 0 entries for user barney
1064 1064 acl: branch access granted: "ef1ea85a6374" on branch "default"
1065 1065 acl: path access granted: "ef1ea85a6374"
1066 1066 acl: branch access granted: "f9cafe1212c8" on branch "default"
1067 1067 acl: path access granted: "f9cafe1212c8"
1068 1068 acl: branch access granted: "911600dab2ae" on branch "default"
1069 1069 acl: path access granted: "911600dab2ae"
1070 updating the branch cache
1070 1071 listing keys for "phases"
1071 1072 try to push obsolete markers to remote
1072 updating the branch cache
1073 1073 checking for updated bookmarks
1074 1074 listing keys for "bookmarks"
1075 1075 repository tip rolled back to revision 0 (undo push)
1076 1076 0:6675d58eff77
1077 1077
1078 1078
1079 1079 asterisk
1080 1080
1081 1081 $ init_config
1082 1082
1083 1083 asterisk test
1084 1084
1085 1085 $ echo '[acl.allow]' >> $config
1086 1086 $ echo "** = fred" >> $config
1087 1087
1088 1088 fred is always allowed
1089 1089
1090 1090 $ do_push fred
1091 1091 Pushing as user fred
1092 1092 hgrc = """
1093 1093 [acl]
1094 1094 sources = push
1095 1095 [extensions]
1096 1096 [acl.allow]
1097 1097 ** = fred
1098 1098 """
1099 1099 pushing to ../b
1100 1100 query 1; heads
1101 1101 searching for changes
1102 1102 all remote heads known locally
1103 1103 invalid branchheads cache (served): tip differs
1104 1104 listing keys for "bookmarks"
1105 1105 3 changesets found
1106 1106 list of changesets:
1107 1107 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1108 1108 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1109 1109 911600dab2ae7a9baff75958b84fe606851ce955
1110 1110 adding changesets
1111 1111 bundling: 1/3 changesets (33.33%)
1112 1112 bundling: 2/3 changesets (66.67%)
1113 1113 bundling: 3/3 changesets (100.00%)
1114 1114 bundling: 1/3 manifests (33.33%)
1115 1115 bundling: 2/3 manifests (66.67%)
1116 1116 bundling: 3/3 manifests (100.00%)
1117 1117 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1118 1118 bundling: foo/file.txt 2/3 files (66.67%)
1119 1119 bundling: quux/file.py 3/3 files (100.00%)
1120 1120 changesets: 1 chunks
1121 1121 add changeset ef1ea85a6374
1122 1122 changesets: 2 chunks
1123 1123 add changeset f9cafe1212c8
1124 1124 changesets: 3 chunks
1125 1125 add changeset 911600dab2ae
1126 1126 adding manifests
1127 1127 manifests: 1/3 chunks (33.33%)
1128 1128 manifests: 2/3 chunks (66.67%)
1129 1129 manifests: 3/3 chunks (100.00%)
1130 1130 adding file changes
1131 1131 adding foo/Bar/file.txt revisions
1132 1132 files: 1/3 chunks (33.33%)
1133 1133 adding foo/file.txt revisions
1134 1134 files: 2/3 chunks (66.67%)
1135 1135 adding quux/file.py revisions
1136 1136 files: 3/3 chunks (100.00%)
1137 1137 added 3 changesets with 3 changes to 3 files
1138 1138 calling hook pretxnchangegroup.acl: hgext.acl.hook
1139 1139 acl: checking access for user "fred"
1140 1140 acl: acl.allow.branches not enabled
1141 1141 acl: acl.deny.branches not enabled
1142 1142 acl: acl.allow enabled, 1 entries for user fred
1143 1143 acl: acl.deny not enabled
1144 1144 acl: branch access granted: "ef1ea85a6374" on branch "default"
1145 1145 acl: path access granted: "ef1ea85a6374"
1146 1146 acl: branch access granted: "f9cafe1212c8" on branch "default"
1147 1147 acl: path access granted: "f9cafe1212c8"
1148 1148 acl: branch access granted: "911600dab2ae" on branch "default"
1149 1149 acl: path access granted: "911600dab2ae"
1150 updating the branch cache
1150 1151 listing keys for "phases"
1151 1152 try to push obsolete markers to remote
1152 updating the branch cache
1153 1153 checking for updated bookmarks
1154 1154 listing keys for "bookmarks"
1155 1155 repository tip rolled back to revision 0 (undo push)
1156 1156 0:6675d58eff77
1157 1157
1158 1158
1159 1159 $ echo '[acl.deny]' >> $config
1160 1160 $ echo "foo/Bar/** = *" >> $config
1161 1161
1162 1162 no one is allowed inside foo/Bar/
1163 1163
1164 1164 $ do_push fred
1165 1165 Pushing as user fred
1166 1166 hgrc = """
1167 1167 [acl]
1168 1168 sources = push
1169 1169 [extensions]
1170 1170 [acl.allow]
1171 1171 ** = fred
1172 1172 [acl.deny]
1173 1173 foo/Bar/** = *
1174 1174 """
1175 1175 pushing to ../b
1176 1176 query 1; heads
1177 1177 searching for changes
1178 1178 all remote heads known locally
1179 1179 invalid branchheads cache (served): tip differs
1180 1180 listing keys for "bookmarks"
1181 1181 3 changesets found
1182 1182 list of changesets:
1183 1183 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1184 1184 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1185 1185 911600dab2ae7a9baff75958b84fe606851ce955
1186 1186 adding changesets
1187 1187 bundling: 1/3 changesets (33.33%)
1188 1188 bundling: 2/3 changesets (66.67%)
1189 1189 bundling: 3/3 changesets (100.00%)
1190 1190 bundling: 1/3 manifests (33.33%)
1191 1191 bundling: 2/3 manifests (66.67%)
1192 1192 bundling: 3/3 manifests (100.00%)
1193 1193 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1194 1194 bundling: foo/file.txt 2/3 files (66.67%)
1195 1195 bundling: quux/file.py 3/3 files (100.00%)
1196 1196 changesets: 1 chunks
1197 1197 add changeset ef1ea85a6374
1198 1198 changesets: 2 chunks
1199 1199 add changeset f9cafe1212c8
1200 1200 changesets: 3 chunks
1201 1201 add changeset 911600dab2ae
1202 1202 adding manifests
1203 1203 manifests: 1/3 chunks (33.33%)
1204 1204 manifests: 2/3 chunks (66.67%)
1205 1205 manifests: 3/3 chunks (100.00%)
1206 1206 adding file changes
1207 1207 adding foo/Bar/file.txt revisions
1208 1208 files: 1/3 chunks (33.33%)
1209 1209 adding foo/file.txt revisions
1210 1210 files: 2/3 chunks (66.67%)
1211 1211 adding quux/file.py revisions
1212 1212 files: 3/3 chunks (100.00%)
1213 1213 added 3 changesets with 3 changes to 3 files
1214 1214 calling hook pretxnchangegroup.acl: hgext.acl.hook
1215 1215 acl: checking access for user "fred"
1216 1216 acl: acl.allow.branches not enabled
1217 1217 acl: acl.deny.branches not enabled
1218 1218 acl: acl.allow enabled, 1 entries for user fred
1219 1219 acl: acl.deny enabled, 1 entries for user fred
1220 1220 acl: branch access granted: "ef1ea85a6374" on branch "default"
1221 1221 acl: path access granted: "ef1ea85a6374"
1222 1222 acl: branch access granted: "f9cafe1212c8" on branch "default"
1223 1223 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1224 1224 transaction abort!
1225 1225 rollback completed
1226 1226 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1227 1227 no rollback information available
1228 1228 0:6675d58eff77
1229 1229
1230 1230
1231 1231 Groups
1232 1232
1233 1233 $ init_config
1234 1234
1235 1235 OS-level groups
1236 1236
1237 1237 $ echo '[acl.allow]' >> $config
1238 1238 $ echo "** = @group1" >> $config
1239 1239
1240 1240 @group1 is always allowed
1241 1241
1242 1242 $ do_push fred
1243 1243 Pushing as user fred
1244 1244 hgrc = """
1245 1245 [acl]
1246 1246 sources = push
1247 1247 [extensions]
1248 1248 [acl.allow]
1249 1249 ** = @group1
1250 1250 """
1251 1251 pushing to ../b
1252 1252 query 1; heads
1253 1253 searching for changes
1254 1254 all remote heads known locally
1255 1255 invalid branchheads cache (served): tip differs
1256 1256 listing keys for "bookmarks"
1257 1257 3 changesets found
1258 1258 list of changesets:
1259 1259 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1260 1260 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1261 1261 911600dab2ae7a9baff75958b84fe606851ce955
1262 1262 adding changesets
1263 1263 bundling: 1/3 changesets (33.33%)
1264 1264 bundling: 2/3 changesets (66.67%)
1265 1265 bundling: 3/3 changesets (100.00%)
1266 1266 bundling: 1/3 manifests (33.33%)
1267 1267 bundling: 2/3 manifests (66.67%)
1268 1268 bundling: 3/3 manifests (100.00%)
1269 1269 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1270 1270 bundling: foo/file.txt 2/3 files (66.67%)
1271 1271 bundling: quux/file.py 3/3 files (100.00%)
1272 1272 changesets: 1 chunks
1273 1273 add changeset ef1ea85a6374
1274 1274 changesets: 2 chunks
1275 1275 add changeset f9cafe1212c8
1276 1276 changesets: 3 chunks
1277 1277 add changeset 911600dab2ae
1278 1278 adding manifests
1279 1279 manifests: 1/3 chunks (33.33%)
1280 1280 manifests: 2/3 chunks (66.67%)
1281 1281 manifests: 3/3 chunks (100.00%)
1282 1282 adding file changes
1283 1283 adding foo/Bar/file.txt revisions
1284 1284 files: 1/3 chunks (33.33%)
1285 1285 adding foo/file.txt revisions
1286 1286 files: 2/3 chunks (66.67%)
1287 1287 adding quux/file.py revisions
1288 1288 files: 3/3 chunks (100.00%)
1289 1289 added 3 changesets with 3 changes to 3 files
1290 1290 calling hook pretxnchangegroup.acl: hgext.acl.hook
1291 1291 acl: checking access for user "fred"
1292 1292 acl: acl.allow.branches not enabled
1293 1293 acl: acl.deny.branches not enabled
1294 1294 acl: "group1" not defined in [acl.groups]
1295 1295 acl: acl.allow enabled, 1 entries for user fred
1296 1296 acl: acl.deny not enabled
1297 1297 acl: branch access granted: "ef1ea85a6374" on branch "default"
1298 1298 acl: path access granted: "ef1ea85a6374"
1299 1299 acl: branch access granted: "f9cafe1212c8" on branch "default"
1300 1300 acl: path access granted: "f9cafe1212c8"
1301 1301 acl: branch access granted: "911600dab2ae" on branch "default"
1302 1302 acl: path access granted: "911600dab2ae"
1303 updating the branch cache
1303 1304 listing keys for "phases"
1304 1305 try to push obsolete markers to remote
1305 updating the branch cache
1306 1306 checking for updated bookmarks
1307 1307 listing keys for "bookmarks"
1308 1308 repository tip rolled back to revision 0 (undo push)
1309 1309 0:6675d58eff77
1310 1310
1311 1311
1312 1312 $ echo '[acl.deny]' >> $config
1313 1313 $ echo "foo/Bar/** = @group1" >> $config
1314 1314
1315 1315 @group is allowed inside anything but foo/Bar/
1316 1316
1317 1317 $ do_push fred
1318 1318 Pushing as user fred
1319 1319 hgrc = """
1320 1320 [acl]
1321 1321 sources = push
1322 1322 [extensions]
1323 1323 [acl.allow]
1324 1324 ** = @group1
1325 1325 [acl.deny]
1326 1326 foo/Bar/** = @group1
1327 1327 """
1328 1328 pushing to ../b
1329 1329 query 1; heads
1330 1330 searching for changes
1331 1331 all remote heads known locally
1332 1332 invalid branchheads cache (served): tip differs
1333 1333 listing keys for "bookmarks"
1334 1334 3 changesets found
1335 1335 list of changesets:
1336 1336 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1337 1337 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1338 1338 911600dab2ae7a9baff75958b84fe606851ce955
1339 1339 adding changesets
1340 1340 bundling: 1/3 changesets (33.33%)
1341 1341 bundling: 2/3 changesets (66.67%)
1342 1342 bundling: 3/3 changesets (100.00%)
1343 1343 bundling: 1/3 manifests (33.33%)
1344 1344 bundling: 2/3 manifests (66.67%)
1345 1345 bundling: 3/3 manifests (100.00%)
1346 1346 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1347 1347 bundling: foo/file.txt 2/3 files (66.67%)
1348 1348 bundling: quux/file.py 3/3 files (100.00%)
1349 1349 changesets: 1 chunks
1350 1350 add changeset ef1ea85a6374
1351 1351 changesets: 2 chunks
1352 1352 add changeset f9cafe1212c8
1353 1353 changesets: 3 chunks
1354 1354 add changeset 911600dab2ae
1355 1355 adding manifests
1356 1356 manifests: 1/3 chunks (33.33%)
1357 1357 manifests: 2/3 chunks (66.67%)
1358 1358 manifests: 3/3 chunks (100.00%)
1359 1359 adding file changes
1360 1360 adding foo/Bar/file.txt revisions
1361 1361 files: 1/3 chunks (33.33%)
1362 1362 adding foo/file.txt revisions
1363 1363 files: 2/3 chunks (66.67%)
1364 1364 adding quux/file.py revisions
1365 1365 files: 3/3 chunks (100.00%)
1366 1366 added 3 changesets with 3 changes to 3 files
1367 1367 calling hook pretxnchangegroup.acl: hgext.acl.hook
1368 1368 acl: checking access for user "fred"
1369 1369 acl: acl.allow.branches not enabled
1370 1370 acl: acl.deny.branches not enabled
1371 1371 acl: "group1" not defined in [acl.groups]
1372 1372 acl: acl.allow enabled, 1 entries for user fred
1373 1373 acl: "group1" not defined in [acl.groups]
1374 1374 acl: acl.deny enabled, 1 entries for user fred
1375 1375 acl: branch access granted: "ef1ea85a6374" on branch "default"
1376 1376 acl: path access granted: "ef1ea85a6374"
1377 1377 acl: branch access granted: "f9cafe1212c8" on branch "default"
1378 1378 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1379 1379 transaction abort!
1380 1380 rollback completed
1381 1381 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1382 1382 no rollback information available
1383 1383 0:6675d58eff77
1384 1384
1385 1385
1386 1386 Invalid group
1387 1387
1388 1388 Disable the fakegroups trick to get real failures
1389 1389
1390 1390 $ grep -v fakegroups $config > config.tmp
1391 1391 $ mv config.tmp $config
1392 1392 $ echo '[acl.allow]' >> $config
1393 1393 $ echo "** = @unlikelytoexist" >> $config
1394 1394 $ do_push fred 2>&1 | grep unlikelytoexist
1395 1395 ** = @unlikelytoexist
1396 1396 acl: "unlikelytoexist" not defined in [acl.groups]
1397 1397 error: pretxnchangegroup.acl hook failed: group 'unlikelytoexist' is undefined
1398 1398 abort: group 'unlikelytoexist' is undefined
1399 1399
1400 1400
1401 1401 Branch acl tests setup
1402 1402
1403 1403 $ init_config
1404 1404 $ cd b
1405 1405 $ hg up
1406 1406 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1407 1407 $ hg branch foobar
1408 1408 marked working directory as branch foobar
1409 1409 (branches are permanent and global, did you want a bookmark?)
1410 1410 $ hg commit -m 'create foobar'
1411 1411 $ echo 'foo contents' > abc.txt
1412 1412 $ hg add abc.txt
1413 1413 $ hg commit -m 'foobar contents'
1414 1414 $ cd ..
1415 1415 $ hg --cwd a pull ../b
1416 1416 pulling from ../b
1417 1417 searching for changes
1418 1418 adding changesets
1419 1419 adding manifests
1420 1420 adding file changes
1421 1421 added 2 changesets with 1 changes to 1 files (+1 heads)
1422 1422 (run 'hg heads' to see heads)
1423 1423
1424 1424 Create additional changeset on foobar branch
1425 1425
1426 1426 $ cd a
1427 1427 $ hg up -C foobar
1428 1428 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1429 1429 $ echo 'foo contents2' > abc.txt
1430 1430 $ hg commit -m 'foobar contents2'
1431 1431 $ cd ..
1432 1432
1433 1433
1434 1434 No branch acls specified
1435 1435
1436 1436 $ do_push astro
1437 1437 Pushing as user astro
1438 1438 hgrc = """
1439 1439 [acl]
1440 1440 sources = push
1441 1441 [extensions]
1442 1442 """
1443 1443 pushing to ../b
1444 1444 query 1; heads
1445 1445 searching for changes
1446 1446 all remote heads known locally
1447 1447 listing keys for "bookmarks"
1448 1448 4 changesets found
1449 1449 list of changesets:
1450 1450 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1451 1451 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1452 1452 911600dab2ae7a9baff75958b84fe606851ce955
1453 1453 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1454 1454 adding changesets
1455 1455 bundling: 1/4 changesets (25.00%)
1456 1456 bundling: 2/4 changesets (50.00%)
1457 1457 bundling: 3/4 changesets (75.00%)
1458 1458 bundling: 4/4 changesets (100.00%)
1459 1459 bundling: 1/4 manifests (25.00%)
1460 1460 bundling: 2/4 manifests (50.00%)
1461 1461 bundling: 3/4 manifests (75.00%)
1462 1462 bundling: 4/4 manifests (100.00%)
1463 1463 bundling: abc.txt 1/4 files (25.00%)
1464 1464 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1465 1465 bundling: foo/file.txt 3/4 files (75.00%)
1466 1466 bundling: quux/file.py 4/4 files (100.00%)
1467 1467 changesets: 1 chunks
1468 1468 add changeset ef1ea85a6374
1469 1469 changesets: 2 chunks
1470 1470 add changeset f9cafe1212c8
1471 1471 changesets: 3 chunks
1472 1472 add changeset 911600dab2ae
1473 1473 changesets: 4 chunks
1474 1474 add changeset e8fc755d4d82
1475 1475 adding manifests
1476 1476 manifests: 1/4 chunks (25.00%)
1477 1477 manifests: 2/4 chunks (50.00%)
1478 1478 manifests: 3/4 chunks (75.00%)
1479 1479 manifests: 4/4 chunks (100.00%)
1480 1480 adding file changes
1481 1481 adding abc.txt revisions
1482 1482 files: 1/4 chunks (25.00%)
1483 1483 adding foo/Bar/file.txt revisions
1484 1484 files: 2/4 chunks (50.00%)
1485 1485 adding foo/file.txt revisions
1486 1486 files: 3/4 chunks (75.00%)
1487 1487 adding quux/file.py revisions
1488 1488 files: 4/4 chunks (100.00%)
1489 1489 added 4 changesets with 4 changes to 4 files (+1 heads)
1490 1490 calling hook pretxnchangegroup.acl: hgext.acl.hook
1491 1491 acl: checking access for user "astro"
1492 1492 acl: acl.allow.branches not enabled
1493 1493 acl: acl.deny.branches not enabled
1494 1494 acl: acl.allow not enabled
1495 1495 acl: acl.deny not enabled
1496 1496 acl: branch access granted: "ef1ea85a6374" on branch "default"
1497 1497 acl: path access granted: "ef1ea85a6374"
1498 1498 acl: branch access granted: "f9cafe1212c8" on branch "default"
1499 1499 acl: path access granted: "f9cafe1212c8"
1500 1500 acl: branch access granted: "911600dab2ae" on branch "default"
1501 1501 acl: path access granted: "911600dab2ae"
1502 1502 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1503 1503 acl: path access granted: "e8fc755d4d82"
1504 updating the branch cache
1504 1505 listing keys for "phases"
1505 1506 try to push obsolete markers to remote
1506 updating the branch cache
1507 1507 checking for updated bookmarks
1508 1508 listing keys for "bookmarks"
1509 1509 repository tip rolled back to revision 2 (undo push)
1510 1510 2:fb35475503ef
1511 1511
1512 1512
1513 1513 Branch acl deny test
1514 1514
1515 1515 $ echo "[acl.deny.branches]" >> $config
1516 1516 $ echo "foobar = *" >> $config
1517 1517 $ do_push astro
1518 1518 Pushing as user astro
1519 1519 hgrc = """
1520 1520 [acl]
1521 1521 sources = push
1522 1522 [extensions]
1523 1523 [acl.deny.branches]
1524 1524 foobar = *
1525 1525 """
1526 1526 pushing to ../b
1527 1527 query 1; heads
1528 1528 searching for changes
1529 1529 all remote heads known locally
1530 1530 listing keys for "bookmarks"
1531 1531 4 changesets found
1532 1532 list of changesets:
1533 1533 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1534 1534 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1535 1535 911600dab2ae7a9baff75958b84fe606851ce955
1536 1536 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1537 1537 adding changesets
1538 1538 bundling: 1/4 changesets (25.00%)
1539 1539 bundling: 2/4 changesets (50.00%)
1540 1540 bundling: 3/4 changesets (75.00%)
1541 1541 bundling: 4/4 changesets (100.00%)
1542 1542 bundling: 1/4 manifests (25.00%)
1543 1543 bundling: 2/4 manifests (50.00%)
1544 1544 bundling: 3/4 manifests (75.00%)
1545 1545 bundling: 4/4 manifests (100.00%)
1546 1546 bundling: abc.txt 1/4 files (25.00%)
1547 1547 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1548 1548 bundling: foo/file.txt 3/4 files (75.00%)
1549 1549 bundling: quux/file.py 4/4 files (100.00%)
1550 1550 changesets: 1 chunks
1551 1551 add changeset ef1ea85a6374
1552 1552 changesets: 2 chunks
1553 1553 add changeset f9cafe1212c8
1554 1554 changesets: 3 chunks
1555 1555 add changeset 911600dab2ae
1556 1556 changesets: 4 chunks
1557 1557 add changeset e8fc755d4d82
1558 1558 adding manifests
1559 1559 manifests: 1/4 chunks (25.00%)
1560 1560 manifests: 2/4 chunks (50.00%)
1561 1561 manifests: 3/4 chunks (75.00%)
1562 1562 manifests: 4/4 chunks (100.00%)
1563 1563 adding file changes
1564 1564 adding abc.txt revisions
1565 1565 files: 1/4 chunks (25.00%)
1566 1566 adding foo/Bar/file.txt revisions
1567 1567 files: 2/4 chunks (50.00%)
1568 1568 adding foo/file.txt revisions
1569 1569 files: 3/4 chunks (75.00%)
1570 1570 adding quux/file.py revisions
1571 1571 files: 4/4 chunks (100.00%)
1572 1572 added 4 changesets with 4 changes to 4 files (+1 heads)
1573 1573 calling hook pretxnchangegroup.acl: hgext.acl.hook
1574 1574 acl: checking access for user "astro"
1575 1575 acl: acl.allow.branches not enabled
1576 1576 acl: acl.deny.branches enabled, 1 entries for user astro
1577 1577 acl: acl.allow not enabled
1578 1578 acl: acl.deny not enabled
1579 1579 acl: branch access granted: "ef1ea85a6374" on branch "default"
1580 1580 acl: path access granted: "ef1ea85a6374"
1581 1581 acl: branch access granted: "f9cafe1212c8" on branch "default"
1582 1582 acl: path access granted: "f9cafe1212c8"
1583 1583 acl: branch access granted: "911600dab2ae" on branch "default"
1584 1584 acl: path access granted: "911600dab2ae"
1585 1585 error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1586 1586 transaction abort!
1587 1587 rollback completed
1588 1588 abort: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1589 1589 no rollback information available
1590 1590 2:fb35475503ef
1591 1591
1592 1592
1593 1593 Branch acl empty allow test
1594 1594
1595 1595 $ init_config
1596 1596 $ echo "[acl.allow.branches]" >> $config
1597 1597 $ do_push astro
1598 1598 Pushing as user astro
1599 1599 hgrc = """
1600 1600 [acl]
1601 1601 sources = push
1602 1602 [extensions]
1603 1603 [acl.allow.branches]
1604 1604 """
1605 1605 pushing to ../b
1606 1606 query 1; heads
1607 1607 searching for changes
1608 1608 all remote heads known locally
1609 1609 listing keys for "bookmarks"
1610 1610 4 changesets found
1611 1611 list of changesets:
1612 1612 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1613 1613 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1614 1614 911600dab2ae7a9baff75958b84fe606851ce955
1615 1615 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1616 1616 adding changesets
1617 1617 bundling: 1/4 changesets (25.00%)
1618 1618 bundling: 2/4 changesets (50.00%)
1619 1619 bundling: 3/4 changesets (75.00%)
1620 1620 bundling: 4/4 changesets (100.00%)
1621 1621 bundling: 1/4 manifests (25.00%)
1622 1622 bundling: 2/4 manifests (50.00%)
1623 1623 bundling: 3/4 manifests (75.00%)
1624 1624 bundling: 4/4 manifests (100.00%)
1625 1625 bundling: abc.txt 1/4 files (25.00%)
1626 1626 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1627 1627 bundling: foo/file.txt 3/4 files (75.00%)
1628 1628 bundling: quux/file.py 4/4 files (100.00%)
1629 1629 changesets: 1 chunks
1630 1630 add changeset ef1ea85a6374
1631 1631 changesets: 2 chunks
1632 1632 add changeset f9cafe1212c8
1633 1633 changesets: 3 chunks
1634 1634 add changeset 911600dab2ae
1635 1635 changesets: 4 chunks
1636 1636 add changeset e8fc755d4d82
1637 1637 adding manifests
1638 1638 manifests: 1/4 chunks (25.00%)
1639 1639 manifests: 2/4 chunks (50.00%)
1640 1640 manifests: 3/4 chunks (75.00%)
1641 1641 manifests: 4/4 chunks (100.00%)
1642 1642 adding file changes
1643 1643 adding abc.txt revisions
1644 1644 files: 1/4 chunks (25.00%)
1645 1645 adding foo/Bar/file.txt revisions
1646 1646 files: 2/4 chunks (50.00%)
1647 1647 adding foo/file.txt revisions
1648 1648 files: 3/4 chunks (75.00%)
1649 1649 adding quux/file.py revisions
1650 1650 files: 4/4 chunks (100.00%)
1651 1651 added 4 changesets with 4 changes to 4 files (+1 heads)
1652 1652 calling hook pretxnchangegroup.acl: hgext.acl.hook
1653 1653 acl: checking access for user "astro"
1654 1654 acl: acl.allow.branches enabled, 0 entries for user astro
1655 1655 acl: acl.deny.branches not enabled
1656 1656 acl: acl.allow not enabled
1657 1657 acl: acl.deny not enabled
1658 1658 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1659 1659 transaction abort!
1660 1660 rollback completed
1661 1661 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1662 1662 no rollback information available
1663 1663 2:fb35475503ef
1664 1664
1665 1665
1666 1666 Branch acl allow other
1667 1667
1668 1668 $ init_config
1669 1669 $ echo "[acl.allow.branches]" >> $config
1670 1670 $ echo "* = george" >> $config
1671 1671 $ do_push astro
1672 1672 Pushing as user astro
1673 1673 hgrc = """
1674 1674 [acl]
1675 1675 sources = push
1676 1676 [extensions]
1677 1677 [acl.allow.branches]
1678 1678 * = george
1679 1679 """
1680 1680 pushing to ../b
1681 1681 query 1; heads
1682 1682 searching for changes
1683 1683 all remote heads known locally
1684 1684 listing keys for "bookmarks"
1685 1685 4 changesets found
1686 1686 list of changesets:
1687 1687 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1688 1688 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1689 1689 911600dab2ae7a9baff75958b84fe606851ce955
1690 1690 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1691 1691 adding changesets
1692 1692 bundling: 1/4 changesets (25.00%)
1693 1693 bundling: 2/4 changesets (50.00%)
1694 1694 bundling: 3/4 changesets (75.00%)
1695 1695 bundling: 4/4 changesets (100.00%)
1696 1696 bundling: 1/4 manifests (25.00%)
1697 1697 bundling: 2/4 manifests (50.00%)
1698 1698 bundling: 3/4 manifests (75.00%)
1699 1699 bundling: 4/4 manifests (100.00%)
1700 1700 bundling: abc.txt 1/4 files (25.00%)
1701 1701 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1702 1702 bundling: foo/file.txt 3/4 files (75.00%)
1703 1703 bundling: quux/file.py 4/4 files (100.00%)
1704 1704 changesets: 1 chunks
1705 1705 add changeset ef1ea85a6374
1706 1706 changesets: 2 chunks
1707 1707 add changeset f9cafe1212c8
1708 1708 changesets: 3 chunks
1709 1709 add changeset 911600dab2ae
1710 1710 changesets: 4 chunks
1711 1711 add changeset e8fc755d4d82
1712 1712 adding manifests
1713 1713 manifests: 1/4 chunks (25.00%)
1714 1714 manifests: 2/4 chunks (50.00%)
1715 1715 manifests: 3/4 chunks (75.00%)
1716 1716 manifests: 4/4 chunks (100.00%)
1717 1717 adding file changes
1718 1718 adding abc.txt revisions
1719 1719 files: 1/4 chunks (25.00%)
1720 1720 adding foo/Bar/file.txt revisions
1721 1721 files: 2/4 chunks (50.00%)
1722 1722 adding foo/file.txt revisions
1723 1723 files: 3/4 chunks (75.00%)
1724 1724 adding quux/file.py revisions
1725 1725 files: 4/4 chunks (100.00%)
1726 1726 added 4 changesets with 4 changes to 4 files (+1 heads)
1727 1727 calling hook pretxnchangegroup.acl: hgext.acl.hook
1728 1728 acl: checking access for user "astro"
1729 1729 acl: acl.allow.branches enabled, 0 entries for user astro
1730 1730 acl: acl.deny.branches not enabled
1731 1731 acl: acl.allow not enabled
1732 1732 acl: acl.deny not enabled
1733 1733 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1734 1734 transaction abort!
1735 1735 rollback completed
1736 1736 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1737 1737 no rollback information available
1738 1738 2:fb35475503ef
1739 1739
1740 1740 $ do_push george
1741 1741 Pushing as user george
1742 1742 hgrc = """
1743 1743 [acl]
1744 1744 sources = push
1745 1745 [extensions]
1746 1746 [acl.allow.branches]
1747 1747 * = george
1748 1748 """
1749 1749 pushing to ../b
1750 1750 query 1; heads
1751 1751 searching for changes
1752 1752 all remote heads known locally
1753 1753 listing keys for "bookmarks"
1754 1754 4 changesets found
1755 1755 list of changesets:
1756 1756 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1757 1757 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1758 1758 911600dab2ae7a9baff75958b84fe606851ce955
1759 1759 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1760 1760 adding changesets
1761 1761 bundling: 1/4 changesets (25.00%)
1762 1762 bundling: 2/4 changesets (50.00%)
1763 1763 bundling: 3/4 changesets (75.00%)
1764 1764 bundling: 4/4 changesets (100.00%)
1765 1765 bundling: 1/4 manifests (25.00%)
1766 1766 bundling: 2/4 manifests (50.00%)
1767 1767 bundling: 3/4 manifests (75.00%)
1768 1768 bundling: 4/4 manifests (100.00%)
1769 1769 bundling: abc.txt 1/4 files (25.00%)
1770 1770 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1771 1771 bundling: foo/file.txt 3/4 files (75.00%)
1772 1772 bundling: quux/file.py 4/4 files (100.00%)
1773 1773 changesets: 1 chunks
1774 1774 add changeset ef1ea85a6374
1775 1775 changesets: 2 chunks
1776 1776 add changeset f9cafe1212c8
1777 1777 changesets: 3 chunks
1778 1778 add changeset 911600dab2ae
1779 1779 changesets: 4 chunks
1780 1780 add changeset e8fc755d4d82
1781 1781 adding manifests
1782 1782 manifests: 1/4 chunks (25.00%)
1783 1783 manifests: 2/4 chunks (50.00%)
1784 1784 manifests: 3/4 chunks (75.00%)
1785 1785 manifests: 4/4 chunks (100.00%)
1786 1786 adding file changes
1787 1787 adding abc.txt revisions
1788 1788 files: 1/4 chunks (25.00%)
1789 1789 adding foo/Bar/file.txt revisions
1790 1790 files: 2/4 chunks (50.00%)
1791 1791 adding foo/file.txt revisions
1792 1792 files: 3/4 chunks (75.00%)
1793 1793 adding quux/file.py revisions
1794 1794 files: 4/4 chunks (100.00%)
1795 1795 added 4 changesets with 4 changes to 4 files (+1 heads)
1796 1796 calling hook pretxnchangegroup.acl: hgext.acl.hook
1797 1797 acl: checking access for user "george"
1798 1798 acl: acl.allow.branches enabled, 1 entries for user george
1799 1799 acl: acl.deny.branches not enabled
1800 1800 acl: acl.allow not enabled
1801 1801 acl: acl.deny not enabled
1802 1802 acl: branch access granted: "ef1ea85a6374" on branch "default"
1803 1803 acl: path access granted: "ef1ea85a6374"
1804 1804 acl: branch access granted: "f9cafe1212c8" on branch "default"
1805 1805 acl: path access granted: "f9cafe1212c8"
1806 1806 acl: branch access granted: "911600dab2ae" on branch "default"
1807 1807 acl: path access granted: "911600dab2ae"
1808 1808 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1809 1809 acl: path access granted: "e8fc755d4d82"
1810 updating the branch cache
1810 1811 listing keys for "phases"
1811 1812 try to push obsolete markers to remote
1812 updating the branch cache
1813 1813 checking for updated bookmarks
1814 1814 listing keys for "bookmarks"
1815 1815 repository tip rolled back to revision 2 (undo push)
1816 1816 2:fb35475503ef
1817 1817
1818 1818
1819 1819 Branch acl conflicting allow
1820 1820 asterisk ends up applying to all branches and allowing george to
1821 1821 push foobar into the remote
1822 1822
1823 1823 $ init_config
1824 1824 $ echo "[acl.allow.branches]" >> $config
1825 1825 $ echo "foobar = astro" >> $config
1826 1826 $ echo "* = george" >> $config
1827 1827 $ do_push george
1828 1828 Pushing as user george
1829 1829 hgrc = """
1830 1830 [acl]
1831 1831 sources = push
1832 1832 [extensions]
1833 1833 [acl.allow.branches]
1834 1834 foobar = astro
1835 1835 * = george
1836 1836 """
1837 1837 pushing to ../b
1838 1838 query 1; heads
1839 1839 searching for changes
1840 1840 all remote heads known locally
1841 1841 listing keys for "bookmarks"
1842 1842 4 changesets found
1843 1843 list of changesets:
1844 1844 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1845 1845 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1846 1846 911600dab2ae7a9baff75958b84fe606851ce955
1847 1847 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1848 1848 adding changesets
1849 1849 bundling: 1/4 changesets (25.00%)
1850 1850 bundling: 2/4 changesets (50.00%)
1851 1851 bundling: 3/4 changesets (75.00%)
1852 1852 bundling: 4/4 changesets (100.00%)
1853 1853 bundling: 1/4 manifests (25.00%)
1854 1854 bundling: 2/4 manifests (50.00%)
1855 1855 bundling: 3/4 manifests (75.00%)
1856 1856 bundling: 4/4 manifests (100.00%)
1857 1857 bundling: abc.txt 1/4 files (25.00%)
1858 1858 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1859 1859 bundling: foo/file.txt 3/4 files (75.00%)
1860 1860 bundling: quux/file.py 4/4 files (100.00%)
1861 1861 changesets: 1 chunks
1862 1862 add changeset ef1ea85a6374
1863 1863 changesets: 2 chunks
1864 1864 add changeset f9cafe1212c8
1865 1865 changesets: 3 chunks
1866 1866 add changeset 911600dab2ae
1867 1867 changesets: 4 chunks
1868 1868 add changeset e8fc755d4d82
1869 1869 adding manifests
1870 1870 manifests: 1/4 chunks (25.00%)
1871 1871 manifests: 2/4 chunks (50.00%)
1872 1872 manifests: 3/4 chunks (75.00%)
1873 1873 manifests: 4/4 chunks (100.00%)
1874 1874 adding file changes
1875 1875 adding abc.txt revisions
1876 1876 files: 1/4 chunks (25.00%)
1877 1877 adding foo/Bar/file.txt revisions
1878 1878 files: 2/4 chunks (50.00%)
1879 1879 adding foo/file.txt revisions
1880 1880 files: 3/4 chunks (75.00%)
1881 1881 adding quux/file.py revisions
1882 1882 files: 4/4 chunks (100.00%)
1883 1883 added 4 changesets with 4 changes to 4 files (+1 heads)
1884 1884 calling hook pretxnchangegroup.acl: hgext.acl.hook
1885 1885 acl: checking access for user "george"
1886 1886 acl: acl.allow.branches enabled, 1 entries for user george
1887 1887 acl: acl.deny.branches not enabled
1888 1888 acl: acl.allow not enabled
1889 1889 acl: acl.deny not enabled
1890 1890 acl: branch access granted: "ef1ea85a6374" on branch "default"
1891 1891 acl: path access granted: "ef1ea85a6374"
1892 1892 acl: branch access granted: "f9cafe1212c8" on branch "default"
1893 1893 acl: path access granted: "f9cafe1212c8"
1894 1894 acl: branch access granted: "911600dab2ae" on branch "default"
1895 1895 acl: path access granted: "911600dab2ae"
1896 1896 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1897 1897 acl: path access granted: "e8fc755d4d82"
1898 updating the branch cache
1898 1899 listing keys for "phases"
1899 1900 try to push obsolete markers to remote
1900 updating the branch cache
1901 1901 checking for updated bookmarks
1902 1902 listing keys for "bookmarks"
1903 1903 repository tip rolled back to revision 2 (undo push)
1904 1904 2:fb35475503ef
1905 1905
1906 1906 Branch acl conflicting deny
1907 1907
1908 1908 $ init_config
1909 1909 $ echo "[acl.deny.branches]" >> $config
1910 1910 $ echo "foobar = astro" >> $config
1911 1911 $ echo "default = astro" >> $config
1912 1912 $ echo "* = george" >> $config
1913 1913 $ do_push george
1914 1914 Pushing as user george
1915 1915 hgrc = """
1916 1916 [acl]
1917 1917 sources = push
1918 1918 [extensions]
1919 1919 [acl.deny.branches]
1920 1920 foobar = astro
1921 1921 default = astro
1922 1922 * = george
1923 1923 """
1924 1924 pushing to ../b
1925 1925 query 1; heads
1926 1926 searching for changes
1927 1927 all remote heads known locally
1928 1928 listing keys for "bookmarks"
1929 1929 4 changesets found
1930 1930 list of changesets:
1931 1931 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1932 1932 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1933 1933 911600dab2ae7a9baff75958b84fe606851ce955
1934 1934 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1935 1935 adding changesets
1936 1936 bundling: 1/4 changesets (25.00%)
1937 1937 bundling: 2/4 changesets (50.00%)
1938 1938 bundling: 3/4 changesets (75.00%)
1939 1939 bundling: 4/4 changesets (100.00%)
1940 1940 bundling: 1/4 manifests (25.00%)
1941 1941 bundling: 2/4 manifests (50.00%)
1942 1942 bundling: 3/4 manifests (75.00%)
1943 1943 bundling: 4/4 manifests (100.00%)
1944 1944 bundling: abc.txt 1/4 files (25.00%)
1945 1945 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1946 1946 bundling: foo/file.txt 3/4 files (75.00%)
1947 1947 bundling: quux/file.py 4/4 files (100.00%)
1948 1948 changesets: 1 chunks
1949 1949 add changeset ef1ea85a6374
1950 1950 changesets: 2 chunks
1951 1951 add changeset f9cafe1212c8
1952 1952 changesets: 3 chunks
1953 1953 add changeset 911600dab2ae
1954 1954 changesets: 4 chunks
1955 1955 add changeset e8fc755d4d82
1956 1956 adding manifests
1957 1957 manifests: 1/4 chunks (25.00%)
1958 1958 manifests: 2/4 chunks (50.00%)
1959 1959 manifests: 3/4 chunks (75.00%)
1960 1960 manifests: 4/4 chunks (100.00%)
1961 1961 adding file changes
1962 1962 adding abc.txt revisions
1963 1963 files: 1/4 chunks (25.00%)
1964 1964 adding foo/Bar/file.txt revisions
1965 1965 files: 2/4 chunks (50.00%)
1966 1966 adding foo/file.txt revisions
1967 1967 files: 3/4 chunks (75.00%)
1968 1968 adding quux/file.py revisions
1969 1969 files: 4/4 chunks (100.00%)
1970 1970 added 4 changesets with 4 changes to 4 files (+1 heads)
1971 1971 calling hook pretxnchangegroup.acl: hgext.acl.hook
1972 1972 acl: checking access for user "george"
1973 1973 acl: acl.allow.branches not enabled
1974 1974 acl: acl.deny.branches enabled, 1 entries for user george
1975 1975 acl: acl.allow not enabled
1976 1976 acl: acl.deny not enabled
1977 1977 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1978 1978 transaction abort!
1979 1979 rollback completed
1980 1980 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1981 1981 no rollback information available
1982 1982 2:fb35475503ef
1983 1983
1984 1984 User 'astro' must not be denied
1985 1985
1986 1986 $ init_config
1987 1987 $ echo "[acl.deny.branches]" >> $config
1988 1988 $ echo "default = !astro" >> $config
1989 1989 $ do_push astro
1990 1990 Pushing as user astro
1991 1991 hgrc = """
1992 1992 [acl]
1993 1993 sources = push
1994 1994 [extensions]
1995 1995 [acl.deny.branches]
1996 1996 default = !astro
1997 1997 """
1998 1998 pushing to ../b
1999 1999 query 1; heads
2000 2000 searching for changes
2001 2001 all remote heads known locally
2002 2002 listing keys for "bookmarks"
2003 2003 4 changesets found
2004 2004 list of changesets:
2005 2005 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2006 2006 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2007 2007 911600dab2ae7a9baff75958b84fe606851ce955
2008 2008 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2009 2009 adding changesets
2010 2010 bundling: 1/4 changesets (25.00%)
2011 2011 bundling: 2/4 changesets (50.00%)
2012 2012 bundling: 3/4 changesets (75.00%)
2013 2013 bundling: 4/4 changesets (100.00%)
2014 2014 bundling: 1/4 manifests (25.00%)
2015 2015 bundling: 2/4 manifests (50.00%)
2016 2016 bundling: 3/4 manifests (75.00%)
2017 2017 bundling: 4/4 manifests (100.00%)
2018 2018 bundling: abc.txt 1/4 files (25.00%)
2019 2019 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2020 2020 bundling: foo/file.txt 3/4 files (75.00%)
2021 2021 bundling: quux/file.py 4/4 files (100.00%)
2022 2022 changesets: 1 chunks
2023 2023 add changeset ef1ea85a6374
2024 2024 changesets: 2 chunks
2025 2025 add changeset f9cafe1212c8
2026 2026 changesets: 3 chunks
2027 2027 add changeset 911600dab2ae
2028 2028 changesets: 4 chunks
2029 2029 add changeset e8fc755d4d82
2030 2030 adding manifests
2031 2031 manifests: 1/4 chunks (25.00%)
2032 2032 manifests: 2/4 chunks (50.00%)
2033 2033 manifests: 3/4 chunks (75.00%)
2034 2034 manifests: 4/4 chunks (100.00%)
2035 2035 adding file changes
2036 2036 adding abc.txt revisions
2037 2037 files: 1/4 chunks (25.00%)
2038 2038 adding foo/Bar/file.txt revisions
2039 2039 files: 2/4 chunks (50.00%)
2040 2040 adding foo/file.txt revisions
2041 2041 files: 3/4 chunks (75.00%)
2042 2042 adding quux/file.py revisions
2043 2043 files: 4/4 chunks (100.00%)
2044 2044 added 4 changesets with 4 changes to 4 files (+1 heads)
2045 2045 calling hook pretxnchangegroup.acl: hgext.acl.hook
2046 2046 acl: checking access for user "astro"
2047 2047 acl: acl.allow.branches not enabled
2048 2048 acl: acl.deny.branches enabled, 0 entries for user astro
2049 2049 acl: acl.allow not enabled
2050 2050 acl: acl.deny not enabled
2051 2051 acl: branch access granted: "ef1ea85a6374" on branch "default"
2052 2052 acl: path access granted: "ef1ea85a6374"
2053 2053 acl: branch access granted: "f9cafe1212c8" on branch "default"
2054 2054 acl: path access granted: "f9cafe1212c8"
2055 2055 acl: branch access granted: "911600dab2ae" on branch "default"
2056 2056 acl: path access granted: "911600dab2ae"
2057 2057 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
2058 2058 acl: path access granted: "e8fc755d4d82"
2059 updating the branch cache
2059 2060 listing keys for "phases"
2060 2061 try to push obsolete markers to remote
2061 updating the branch cache
2062 2062 checking for updated bookmarks
2063 2063 listing keys for "bookmarks"
2064 2064 repository tip rolled back to revision 2 (undo push)
2065 2065 2:fb35475503ef
2066 2066
2067 2067
2068 2068 Non-astro users must be denied
2069 2069
2070 2070 $ do_push george
2071 2071 Pushing as user george
2072 2072 hgrc = """
2073 2073 [acl]
2074 2074 sources = push
2075 2075 [extensions]
2076 2076 [acl.deny.branches]
2077 2077 default = !astro
2078 2078 """
2079 2079 pushing to ../b
2080 2080 query 1; heads
2081 2081 searching for changes
2082 2082 all remote heads known locally
2083 2083 listing keys for "bookmarks"
2084 2084 4 changesets found
2085 2085 list of changesets:
2086 2086 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2087 2087 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2088 2088 911600dab2ae7a9baff75958b84fe606851ce955
2089 2089 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2090 2090 adding changesets
2091 2091 bundling: 1/4 changesets (25.00%)
2092 2092 bundling: 2/4 changesets (50.00%)
2093 2093 bundling: 3/4 changesets (75.00%)
2094 2094 bundling: 4/4 changesets (100.00%)
2095 2095 bundling: 1/4 manifests (25.00%)
2096 2096 bundling: 2/4 manifests (50.00%)
2097 2097 bundling: 3/4 manifests (75.00%)
2098 2098 bundling: 4/4 manifests (100.00%)
2099 2099 bundling: abc.txt 1/4 files (25.00%)
2100 2100 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2101 2101 bundling: foo/file.txt 3/4 files (75.00%)
2102 2102 bundling: quux/file.py 4/4 files (100.00%)
2103 2103 changesets: 1 chunks
2104 2104 add changeset ef1ea85a6374
2105 2105 changesets: 2 chunks
2106 2106 add changeset f9cafe1212c8
2107 2107 changesets: 3 chunks
2108 2108 add changeset 911600dab2ae
2109 2109 changesets: 4 chunks
2110 2110 add changeset e8fc755d4d82
2111 2111 adding manifests
2112 2112 manifests: 1/4 chunks (25.00%)
2113 2113 manifests: 2/4 chunks (50.00%)
2114 2114 manifests: 3/4 chunks (75.00%)
2115 2115 manifests: 4/4 chunks (100.00%)
2116 2116 adding file changes
2117 2117 adding abc.txt revisions
2118 2118 files: 1/4 chunks (25.00%)
2119 2119 adding foo/Bar/file.txt revisions
2120 2120 files: 2/4 chunks (50.00%)
2121 2121 adding foo/file.txt revisions
2122 2122 files: 3/4 chunks (75.00%)
2123 2123 adding quux/file.py revisions
2124 2124 files: 4/4 chunks (100.00%)
2125 2125 added 4 changesets with 4 changes to 4 files (+1 heads)
2126 2126 calling hook pretxnchangegroup.acl: hgext.acl.hook
2127 2127 acl: checking access for user "george"
2128 2128 acl: acl.allow.branches not enabled
2129 2129 acl: acl.deny.branches enabled, 1 entries for user george
2130 2130 acl: acl.allow not enabled
2131 2131 acl: acl.deny not enabled
2132 2132 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2133 2133 transaction abort!
2134 2134 rollback completed
2135 2135 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2136 2136 no rollback information available
2137 2137 2:fb35475503ef
2138 2138
2139 2139
@@ -1,41 +1,42 b''
1 1 #if unix-permissions no-root no-windows
2 2
3 3 Prepare
4 4
5 5 $ hg init a
6 6 $ echo a > a/a
7 7 $ hg -R a ci -A -m a
8 8 adding a
9 9
10 10 $ hg clone a b
11 11 updating to branch default
12 12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
13 13
14 14 One process waiting for another
15 15
16 16 $ cat > hooks.py << EOF
17 17 > import time
18 18 > def sleepone(**x): time.sleep(1)
19 19 > def sleephalf(**x): time.sleep(0.5)
20 20 > EOF
21 21 $ echo b > b/b
22 22 $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
23 23 $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf"
24 24 waiting for lock on working directory of b held by '*:*' (glob)
25 25 got lock after ? seconds (glob)
26 26 warning: ignoring unknown working parent d2ae7f538514!
27 27 $ wait
28 28 $ cat stdout
29 29 adding b
30 30
31 31 Pushing to a local read-only repo that can't be locked
32 32
33 33 $ chmod 100 a/.hg/store
34 34
35 35 $ hg -R b push a
36 36 pushing to a
37 searching for changes
37 38 abort: could not lock repository a: Permission denied
38 39 [255]
39 40
40 41 $ chmod 700 a/.hg/store
41 42 #endif
General Comments 0
You need to be logged in to leave comments. Login now