##// END OF EJS Templates
localrepo: add unbundle support...
Pierre-Yves David -
r20969:7a679918 default
parent child Browse files
Show More
@@ -1,656 +1,649 b''
1 # exchange.py - utily to exchange data between repo.
1 # exchange.py - utily to exchange data between repo.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import sys
8 import sys
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid
10 from node import hex, nullid
11 import cStringIO
11 import cStringIO
12 import errno
12 import errno
13 import util, scmutil, changegroup, base85
13 import util, scmutil, changegroup, base85
14 import discovery, phases, obsolete, bookmarks, bundle2
14 import discovery, phases, obsolete, bookmarks, bundle2
15
15
16
16
17 class pushoperation(object):
17 class pushoperation(object):
18 """A object that represent a single push operation
18 """A object that represent a single push operation
19
19
20 It purpose is to carry push related state and very common operation.
20 It purpose is to carry push related state and very common operation.
21
21
22 A new should be created at the begining of each push and discarded
22 A new should be created at the begining of each push and discarded
23 afterward.
23 afterward.
24 """
24 """
25
25
26 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
26 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
27 # repo we push from
27 # repo we push from
28 self.repo = repo
28 self.repo = repo
29 self.ui = repo.ui
29 self.ui = repo.ui
30 # repo we push to
30 # repo we push to
31 self.remote = remote
31 self.remote = remote
32 # force option provided
32 # force option provided
33 self.force = force
33 self.force = force
34 # revs to be pushed (None is "all")
34 # revs to be pushed (None is "all")
35 self.revs = revs
35 self.revs = revs
36 # allow push of new branch
36 # allow push of new branch
37 self.newbranch = newbranch
37 self.newbranch = newbranch
38 # did a local lock get acquired?
38 # did a local lock get acquired?
39 self.locallocked = None
39 self.locallocked = None
40 # Integer version of the push result
40 # Integer version of the push result
41 # - None means nothing to push
41 # - None means nothing to push
42 # - 0 means HTTP error
42 # - 0 means HTTP error
43 # - 1 means we pushed and remote head count is unchanged *or*
43 # - 1 means we pushed and remote head count is unchanged *or*
44 # we have outgoing changesets but refused to push
44 # we have outgoing changesets but refused to push
45 # - other values as described by addchangegroup()
45 # - other values as described by addchangegroup()
46 self.ret = None
46 self.ret = None
47 # discover.outgoing object (contains common and outgoin data)
47 # discover.outgoing object (contains common and outgoin data)
48 self.outgoing = None
48 self.outgoing = None
49 # all remote heads before the push
49 # all remote heads before the push
50 self.remoteheads = None
50 self.remoteheads = None
51 # testable as a boolean indicating if any nodes are missing locally.
51 # testable as a boolean indicating if any nodes are missing locally.
52 self.incoming = None
52 self.incoming = None
53 # set of all heads common after changeset bundle push
53 # set of all heads common after changeset bundle push
54 self.commonheads = None
54 self.commonheads = None
55
55
56 def push(repo, remote, force=False, revs=None, newbranch=False):
56 def push(repo, remote, force=False, revs=None, newbranch=False):
57 '''Push outgoing changesets (limited by revs) from a local
57 '''Push outgoing changesets (limited by revs) from a local
58 repository to remote. Return an integer:
58 repository to remote. Return an integer:
59 - None means nothing to push
59 - None means nothing to push
60 - 0 means HTTP error
60 - 0 means HTTP error
61 - 1 means we pushed and remote head count is unchanged *or*
61 - 1 means we pushed and remote head count is unchanged *or*
62 we have outgoing changesets but refused to push
62 we have outgoing changesets but refused to push
63 - other values as described by addchangegroup()
63 - other values as described by addchangegroup()
64 '''
64 '''
65 pushop = pushoperation(repo, remote, force, revs, newbranch)
65 pushop = pushoperation(repo, remote, force, revs, newbranch)
66 if pushop.remote.local():
66 if pushop.remote.local():
67 missing = (set(pushop.repo.requirements)
67 missing = (set(pushop.repo.requirements)
68 - pushop.remote.local().supported)
68 - pushop.remote.local().supported)
69 if missing:
69 if missing:
70 msg = _("required features are not"
70 msg = _("required features are not"
71 " supported in the destination:"
71 " supported in the destination:"
72 " %s") % (', '.join(sorted(missing)))
72 " %s") % (', '.join(sorted(missing)))
73 raise util.Abort(msg)
73 raise util.Abort(msg)
74
74
75 # there are two ways to push to remote repo:
75 # there are two ways to push to remote repo:
76 #
76 #
77 # addchangegroup assumes local user can lock remote
77 # addchangegroup assumes local user can lock remote
78 # repo (local filesystem, old ssh servers).
78 # repo (local filesystem, old ssh servers).
79 #
79 #
80 # unbundle assumes local user cannot lock remote repo (new ssh
80 # unbundle assumes local user cannot lock remote repo (new ssh
81 # servers, http servers).
81 # servers, http servers).
82
82
83 if not pushop.remote.canpush():
83 if not pushop.remote.canpush():
84 raise util.Abort(_("destination does not support push"))
84 raise util.Abort(_("destination does not support push"))
85 # get local lock as we might write phase data
85 # get local lock as we might write phase data
86 locallock = None
86 locallock = None
87 try:
87 try:
88 locallock = pushop.repo.lock()
88 locallock = pushop.repo.lock()
89 pushop.locallocked = True
89 pushop.locallocked = True
90 except IOError, err:
90 except IOError, err:
91 pushop.locallocked = False
91 pushop.locallocked = False
92 if err.errno != errno.EACCES:
92 if err.errno != errno.EACCES:
93 raise
93 raise
94 # source repo cannot be locked.
94 # source repo cannot be locked.
95 # We do not abort the push, but just disable the local phase
95 # We do not abort the push, but just disable the local phase
96 # synchronisation.
96 # synchronisation.
97 msg = 'cannot lock source repository: %s\n' % err
97 msg = 'cannot lock source repository: %s\n' % err
98 pushop.ui.debug(msg)
98 pushop.ui.debug(msg)
99 try:
99 try:
100 pushop.repo.checkpush(pushop)
100 pushop.repo.checkpush(pushop)
101 lock = None
101 lock = None
102 unbundle = pushop.remote.capable('unbundle')
102 unbundle = pushop.remote.capable('unbundle')
103 if not unbundle:
103 if not unbundle:
104 lock = pushop.remote.lock()
104 lock = pushop.remote.lock()
105 try:
105 try:
106 _pushdiscovery(pushop)
106 _pushdiscovery(pushop)
107 if _pushcheckoutgoing(pushop):
107 if _pushcheckoutgoing(pushop):
108 _pushchangeset(pushop)
108 _pushchangeset(pushop)
109 _pushcomputecommonheads(pushop)
109 _pushcomputecommonheads(pushop)
110 _pushsyncphase(pushop)
110 _pushsyncphase(pushop)
111 _pushobsolete(pushop)
111 _pushobsolete(pushop)
112 finally:
112 finally:
113 if lock is not None:
113 if lock is not None:
114 lock.release()
114 lock.release()
115 finally:
115 finally:
116 if locallock is not None:
116 if locallock is not None:
117 locallock.release()
117 locallock.release()
118
118
119 _pushbookmark(pushop)
119 _pushbookmark(pushop)
120 return pushop.ret
120 return pushop.ret
121
121
122 def _pushdiscovery(pushop):
122 def _pushdiscovery(pushop):
123 # discovery
123 # discovery
124 unfi = pushop.repo.unfiltered()
124 unfi = pushop.repo.unfiltered()
125 fci = discovery.findcommonincoming
125 fci = discovery.findcommonincoming
126 commoninc = fci(unfi, pushop.remote, force=pushop.force)
126 commoninc = fci(unfi, pushop.remote, force=pushop.force)
127 common, inc, remoteheads = commoninc
127 common, inc, remoteheads = commoninc
128 fco = discovery.findcommonoutgoing
128 fco = discovery.findcommonoutgoing
129 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
129 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
130 commoninc=commoninc, force=pushop.force)
130 commoninc=commoninc, force=pushop.force)
131 pushop.outgoing = outgoing
131 pushop.outgoing = outgoing
132 pushop.remoteheads = remoteheads
132 pushop.remoteheads = remoteheads
133 pushop.incoming = inc
133 pushop.incoming = inc
134
134
135 def _pushcheckoutgoing(pushop):
135 def _pushcheckoutgoing(pushop):
136 outgoing = pushop.outgoing
136 outgoing = pushop.outgoing
137 unfi = pushop.repo.unfiltered()
137 unfi = pushop.repo.unfiltered()
138 if not outgoing.missing:
138 if not outgoing.missing:
139 # nothing to push
139 # nothing to push
140 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
140 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
141 return False
141 return False
142 # something to push
142 # something to push
143 if not pushop.force:
143 if not pushop.force:
144 # if repo.obsstore == False --> no obsolete
144 # if repo.obsstore == False --> no obsolete
145 # then, save the iteration
145 # then, save the iteration
146 if unfi.obsstore:
146 if unfi.obsstore:
147 # this message are here for 80 char limit reason
147 # this message are here for 80 char limit reason
148 mso = _("push includes obsolete changeset: %s!")
148 mso = _("push includes obsolete changeset: %s!")
149 mst = "push includes %s changeset: %s!"
149 mst = "push includes %s changeset: %s!"
150 # plain versions for i18n tool to detect them
150 # plain versions for i18n tool to detect them
151 _("push includes unstable changeset: %s!")
151 _("push includes unstable changeset: %s!")
152 _("push includes bumped changeset: %s!")
152 _("push includes bumped changeset: %s!")
153 _("push includes divergent changeset: %s!")
153 _("push includes divergent changeset: %s!")
154 # If we are to push if there is at least one
154 # If we are to push if there is at least one
155 # obsolete or unstable changeset in missing, at
155 # obsolete or unstable changeset in missing, at
156 # least one of the missinghead will be obsolete or
156 # least one of the missinghead will be obsolete or
157 # unstable. So checking heads only is ok
157 # unstable. So checking heads only is ok
158 for node in outgoing.missingheads:
158 for node in outgoing.missingheads:
159 ctx = unfi[node]
159 ctx = unfi[node]
160 if ctx.obsolete():
160 if ctx.obsolete():
161 raise util.Abort(mso % ctx)
161 raise util.Abort(mso % ctx)
162 elif ctx.troubled():
162 elif ctx.troubled():
163 raise util.Abort(_(mst)
163 raise util.Abort(_(mst)
164 % (ctx.troubles()[0],
164 % (ctx.troubles()[0],
165 ctx))
165 ctx))
166 newbm = pushop.ui.configlist('bookmarks', 'pushing')
166 newbm = pushop.ui.configlist('bookmarks', 'pushing')
167 discovery.checkheads(unfi, pushop.remote, outgoing,
167 discovery.checkheads(unfi, pushop.remote, outgoing,
168 pushop.remoteheads,
168 pushop.remoteheads,
169 pushop.newbranch,
169 pushop.newbranch,
170 bool(pushop.incoming),
170 bool(pushop.incoming),
171 newbm)
171 newbm)
172 return True
172 return True
173
173
174 def _pushchangeset(pushop):
174 def _pushchangeset(pushop):
175 """Make the actual push of changeset bundle to remote repo"""
175 """Make the actual push of changeset bundle to remote repo"""
176 outgoing = pushop.outgoing
176 outgoing = pushop.outgoing
177 unbundle = pushop.remote.capable('unbundle')
177 unbundle = pushop.remote.capable('unbundle')
178 # TODO: get bundlecaps from remote
178 # TODO: get bundlecaps from remote
179 bundlecaps = None
179 bundlecaps = None
180 # create a changegroup from local
180 # create a changegroup from local
181 if pushop.revs is None and not (outgoing.excluded
181 if pushop.revs is None and not (outgoing.excluded
182 or pushop.repo.changelog.filteredrevs):
182 or pushop.repo.changelog.filteredrevs):
183 # push everything,
183 # push everything,
184 # use the fast path, no race possible on push
184 # use the fast path, no race possible on push
185 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
185 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
186 cg = changegroup.getsubset(pushop.repo,
186 cg = changegroup.getsubset(pushop.repo,
187 outgoing,
187 outgoing,
188 bundler,
188 bundler,
189 'push',
189 'push',
190 fastpath=True)
190 fastpath=True)
191 else:
191 else:
192 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
192 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
193 bundlecaps)
193 bundlecaps)
194
194
195 # apply changegroup to remote
195 # apply changegroup to remote
196 if unbundle:
196 if unbundle:
197 # local repo finds heads on server, finds out what
197 # local repo finds heads on server, finds out what
198 # revs it must push. once revs transferred, if server
198 # revs it must push. once revs transferred, if server
199 # finds it has different heads (someone else won
199 # finds it has different heads (someone else won
200 # commit/push race), server aborts.
200 # commit/push race), server aborts.
201 if pushop.force:
201 if pushop.force:
202 remoteheads = ['force']
202 remoteheads = ['force']
203 else:
203 else:
204 remoteheads = pushop.remoteheads
204 remoteheads = pushop.remoteheads
205 # ssh: return remote's addchangegroup()
205 # ssh: return remote's addchangegroup()
206 # http: return remote's addchangegroup() or 0 for error
206 # http: return remote's addchangegroup() or 0 for error
207 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
207 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
208 'push')
208 'push')
209 else:
209 else:
210 # we return an integer indicating remote head count
210 # we return an integer indicating remote head count
211 # change
211 # change
212 pushop.ret = pushop.remote.addchangegroup(cg, 'push',
212 pushop.ret = pushop.remote.addchangegroup(cg, 'push',
213 pushop.repo.url())
213 pushop.repo.url())
214
214
215 def _pushcomputecommonheads(pushop):
215 def _pushcomputecommonheads(pushop):
216 unfi = pushop.repo.unfiltered()
216 unfi = pushop.repo.unfiltered()
217 if pushop.ret:
217 if pushop.ret:
218 # push succeed, synchronize target of the push
218 # push succeed, synchronize target of the push
219 cheads = pushop.outgoing.missingheads
219 cheads = pushop.outgoing.missingheads
220 elif pushop.revs is None:
220 elif pushop.revs is None:
221 # All out push fails. synchronize all common
221 # All out push fails. synchronize all common
222 cheads = pushop.outgoing.commonheads
222 cheads = pushop.outgoing.commonheads
223 else:
223 else:
224 # I want cheads = heads(::missingheads and ::commonheads)
224 # I want cheads = heads(::missingheads and ::commonheads)
225 # (missingheads is revs with secret changeset filtered out)
225 # (missingheads is revs with secret changeset filtered out)
226 #
226 #
227 # This can be expressed as:
227 # This can be expressed as:
228 # cheads = ( (missingheads and ::commonheads)
228 # cheads = ( (missingheads and ::commonheads)
229 # + (commonheads and ::missingheads))"
229 # + (commonheads and ::missingheads))"
230 # )
230 # )
231 #
231 #
232 # while trying to push we already computed the following:
232 # while trying to push we already computed the following:
233 # common = (::commonheads)
233 # common = (::commonheads)
234 # missing = ((commonheads::missingheads) - commonheads)
234 # missing = ((commonheads::missingheads) - commonheads)
235 #
235 #
236 # We can pick:
236 # We can pick:
237 # * missingheads part of common (::commonheads)
237 # * missingheads part of common (::commonheads)
238 common = set(pushop.outgoing.common)
238 common = set(pushop.outgoing.common)
239 nm = pushop.repo.changelog.nodemap
239 nm = pushop.repo.changelog.nodemap
240 cheads = [node for node in pushop.revs if nm[node] in common]
240 cheads = [node for node in pushop.revs if nm[node] in common]
241 # and
241 # and
242 # * commonheads parents on missing
242 # * commonheads parents on missing
243 revset = unfi.set('%ln and parents(roots(%ln))',
243 revset = unfi.set('%ln and parents(roots(%ln))',
244 pushop.outgoing.commonheads,
244 pushop.outgoing.commonheads,
245 pushop.outgoing.missing)
245 pushop.outgoing.missing)
246 cheads.extend(c.node() for c in revset)
246 cheads.extend(c.node() for c in revset)
247 pushop.commonheads = cheads
247 pushop.commonheads = cheads
248
248
249 def _pushsyncphase(pushop):
249 def _pushsyncphase(pushop):
250 """synchronise phase information locally and remotly"""
250 """synchronise phase information locally and remotly"""
251 unfi = pushop.repo.unfiltered()
251 unfi = pushop.repo.unfiltered()
252 cheads = pushop.commonheads
252 cheads = pushop.commonheads
253 if pushop.ret:
253 if pushop.ret:
254 # push succeed, synchronize target of the push
254 # push succeed, synchronize target of the push
255 cheads = pushop.outgoing.missingheads
255 cheads = pushop.outgoing.missingheads
256 elif pushop.revs is None:
256 elif pushop.revs is None:
257 # All out push fails. synchronize all common
257 # All out push fails. synchronize all common
258 cheads = pushop.outgoing.commonheads
258 cheads = pushop.outgoing.commonheads
259 else:
259 else:
260 # I want cheads = heads(::missingheads and ::commonheads)
260 # I want cheads = heads(::missingheads and ::commonheads)
261 # (missingheads is revs with secret changeset filtered out)
261 # (missingheads is revs with secret changeset filtered out)
262 #
262 #
263 # This can be expressed as:
263 # This can be expressed as:
264 # cheads = ( (missingheads and ::commonheads)
264 # cheads = ( (missingheads and ::commonheads)
265 # + (commonheads and ::missingheads))"
265 # + (commonheads and ::missingheads))"
266 # )
266 # )
267 #
267 #
268 # while trying to push we already computed the following:
268 # while trying to push we already computed the following:
269 # common = (::commonheads)
269 # common = (::commonheads)
270 # missing = ((commonheads::missingheads) - commonheads)
270 # missing = ((commonheads::missingheads) - commonheads)
271 #
271 #
272 # We can pick:
272 # We can pick:
273 # * missingheads part of common (::commonheads)
273 # * missingheads part of common (::commonheads)
274 common = set(pushop.outgoing.common)
274 common = set(pushop.outgoing.common)
275 nm = pushop.repo.changelog.nodemap
275 nm = pushop.repo.changelog.nodemap
276 cheads = [node for node in pushop.revs if nm[node] in common]
276 cheads = [node for node in pushop.revs if nm[node] in common]
277 # and
277 # and
278 # * commonheads parents on missing
278 # * commonheads parents on missing
279 revset = unfi.set('%ln and parents(roots(%ln))',
279 revset = unfi.set('%ln and parents(roots(%ln))',
280 pushop.outgoing.commonheads,
280 pushop.outgoing.commonheads,
281 pushop.outgoing.missing)
281 pushop.outgoing.missing)
282 cheads.extend(c.node() for c in revset)
282 cheads.extend(c.node() for c in revset)
283 pushop.commonheads = cheads
283 pushop.commonheads = cheads
284 # even when we don't push, exchanging phase data is useful
284 # even when we don't push, exchanging phase data is useful
285 remotephases = pushop.remote.listkeys('phases')
285 remotephases = pushop.remote.listkeys('phases')
286 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
286 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
287 and remotephases # server supports phases
287 and remotephases # server supports phases
288 and pushop.ret is None # nothing was pushed
288 and pushop.ret is None # nothing was pushed
289 and remotephases.get('publishing', False)):
289 and remotephases.get('publishing', False)):
290 # When:
290 # When:
291 # - this is a subrepo push
291 # - this is a subrepo push
292 # - and remote support phase
292 # - and remote support phase
293 # - and no changeset was pushed
293 # - and no changeset was pushed
294 # - and remote is publishing
294 # - and remote is publishing
295 # We may be in issue 3871 case!
295 # We may be in issue 3871 case!
296 # We drop the possible phase synchronisation done by
296 # We drop the possible phase synchronisation done by
297 # courtesy to publish changesets possibly locally draft
297 # courtesy to publish changesets possibly locally draft
298 # on the remote.
298 # on the remote.
299 remotephases = {'publishing': 'True'}
299 remotephases = {'publishing': 'True'}
300 if not remotephases: # old server or public only rer
300 if not remotephases: # old server or public only rer
301 _localphasemove(pushop, cheads)
301 _localphasemove(pushop, cheads)
302 # don't push any phase data as there is nothing to push
302 # don't push any phase data as there is nothing to push
303 else:
303 else:
304 ana = phases.analyzeremotephases(pushop.repo, cheads,
304 ana = phases.analyzeremotephases(pushop.repo, cheads,
305 remotephases)
305 remotephases)
306 pheads, droots = ana
306 pheads, droots = ana
307 ### Apply remote phase on local
307 ### Apply remote phase on local
308 if remotephases.get('publishing', False):
308 if remotephases.get('publishing', False):
309 _localphasemove(pushop, cheads)
309 _localphasemove(pushop, cheads)
310 else: # publish = False
310 else: # publish = False
311 _localphasemove(pushop, pheads)
311 _localphasemove(pushop, pheads)
312 _localphasemove(pushop, cheads, phases.draft)
312 _localphasemove(pushop, cheads, phases.draft)
313 ### Apply local phase on remote
313 ### Apply local phase on remote
314
314
315 # Get the list of all revs draft on remote by public here.
315 # Get the list of all revs draft on remote by public here.
316 # XXX Beware that revset break if droots is not strictly
316 # XXX Beware that revset break if droots is not strictly
317 # XXX root we may want to ensure it is but it is costly
317 # XXX root we may want to ensure it is but it is costly
318 outdated = unfi.set('heads((%ln::%ln) and public())',
318 outdated = unfi.set('heads((%ln::%ln) and public())',
319 droots, cheads)
319 droots, cheads)
320 for newremotehead in outdated:
320 for newremotehead in outdated:
321 r = pushop.remote.pushkey('phases',
321 r = pushop.remote.pushkey('phases',
322 newremotehead.hex(),
322 newremotehead.hex(),
323 str(phases.draft),
323 str(phases.draft),
324 str(phases.public))
324 str(phases.public))
325 if not r:
325 if not r:
326 pushop.ui.warn(_('updating %s to public failed!\n')
326 pushop.ui.warn(_('updating %s to public failed!\n')
327 % newremotehead)
327 % newremotehead)
328
328
329 def _localphasemove(pushop, nodes, phase=phases.public):
329 def _localphasemove(pushop, nodes, phase=phases.public):
330 """move <nodes> to <phase> in the local source repo"""
330 """move <nodes> to <phase> in the local source repo"""
331 if pushop.locallocked:
331 if pushop.locallocked:
332 phases.advanceboundary(pushop.repo, phase, nodes)
332 phases.advanceboundary(pushop.repo, phase, nodes)
333 else:
333 else:
334 # repo is not locked, do not change any phases!
334 # repo is not locked, do not change any phases!
335 # Informs the user that phases should have been moved when
335 # Informs the user that phases should have been moved when
336 # applicable.
336 # applicable.
337 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
337 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
338 phasestr = phases.phasenames[phase]
338 phasestr = phases.phasenames[phase]
339 if actualmoves:
339 if actualmoves:
340 pushop.ui.status(_('cannot lock source repo, skipping '
340 pushop.ui.status(_('cannot lock source repo, skipping '
341 'local %s phase update\n') % phasestr)
341 'local %s phase update\n') % phasestr)
342
342
343 def _pushobsolete(pushop):
343 def _pushobsolete(pushop):
344 """utility function to push obsolete markers to a remote"""
344 """utility function to push obsolete markers to a remote"""
345 pushop.ui.debug('try to push obsolete markers to remote\n')
345 pushop.ui.debug('try to push obsolete markers to remote\n')
346 repo = pushop.repo
346 repo = pushop.repo
347 remote = pushop.remote
347 remote = pushop.remote
348 if (obsolete._enabled and repo.obsstore and
348 if (obsolete._enabled and repo.obsstore and
349 'obsolete' in remote.listkeys('namespaces')):
349 'obsolete' in remote.listkeys('namespaces')):
350 rslts = []
350 rslts = []
351 remotedata = repo.listkeys('obsolete')
351 remotedata = repo.listkeys('obsolete')
352 for key in sorted(remotedata, reverse=True):
352 for key in sorted(remotedata, reverse=True):
353 # reverse sort to ensure we end with dump0
353 # reverse sort to ensure we end with dump0
354 data = remotedata[key]
354 data = remotedata[key]
355 rslts.append(remote.pushkey('obsolete', key, '', data))
355 rslts.append(remote.pushkey('obsolete', key, '', data))
356 if [r for r in rslts if not r]:
356 if [r for r in rslts if not r]:
357 msg = _('failed to push some obsolete markers!\n')
357 msg = _('failed to push some obsolete markers!\n')
358 repo.ui.warn(msg)
358 repo.ui.warn(msg)
359
359
360 def _pushbookmark(pushop):
360 def _pushbookmark(pushop):
361 """Update bookmark position on remote"""
361 """Update bookmark position on remote"""
362 ui = pushop.ui
362 ui = pushop.ui
363 repo = pushop.repo.unfiltered()
363 repo = pushop.repo.unfiltered()
364 remote = pushop.remote
364 remote = pushop.remote
365 ui.debug("checking for updated bookmarks\n")
365 ui.debug("checking for updated bookmarks\n")
366 revnums = map(repo.changelog.rev, pushop.revs or [])
366 revnums = map(repo.changelog.rev, pushop.revs or [])
367 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
367 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
368 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
368 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
369 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
369 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
370 srchex=hex)
370 srchex=hex)
371
371
372 for b, scid, dcid in advsrc:
372 for b, scid, dcid in advsrc:
373 if ancestors and repo[scid].rev() not in ancestors:
373 if ancestors and repo[scid].rev() not in ancestors:
374 continue
374 continue
375 if remote.pushkey('bookmarks', b, dcid, scid):
375 if remote.pushkey('bookmarks', b, dcid, scid):
376 ui.status(_("updating bookmark %s\n") % b)
376 ui.status(_("updating bookmark %s\n") % b)
377 else:
377 else:
378 ui.warn(_('updating bookmark %s failed!\n') % b)
378 ui.warn(_('updating bookmark %s failed!\n') % b)
379
379
380 class pulloperation(object):
380 class pulloperation(object):
381 """A object that represent a single pull operation
381 """A object that represent a single pull operation
382
382
383 It purpose is to carry push related state and very common operation.
383 It purpose is to carry push related state and very common operation.
384
384
385 A new should be created at the begining of each pull and discarded
385 A new should be created at the begining of each pull and discarded
386 afterward.
386 afterward.
387 """
387 """
388
388
389 def __init__(self, repo, remote, heads=None, force=False):
389 def __init__(self, repo, remote, heads=None, force=False):
390 # repo we pull into
390 # repo we pull into
391 self.repo = repo
391 self.repo = repo
392 # repo we pull from
392 # repo we pull from
393 self.remote = remote
393 self.remote = remote
394 # revision we try to pull (None is "all")
394 # revision we try to pull (None is "all")
395 self.heads = heads
395 self.heads = heads
396 # do we force pull?
396 # do we force pull?
397 self.force = force
397 self.force = force
398 # the name the pull transaction
398 # the name the pull transaction
399 self._trname = 'pull\n' + util.hidepassword(remote.url())
399 self._trname = 'pull\n' + util.hidepassword(remote.url())
400 # hold the transaction once created
400 # hold the transaction once created
401 self._tr = None
401 self._tr = None
402 # set of common changeset between local and remote before pull
402 # set of common changeset between local and remote before pull
403 self.common = None
403 self.common = None
404 # set of pulled head
404 # set of pulled head
405 self.rheads = None
405 self.rheads = None
406 # list of missing changeset to fetch remotly
406 # list of missing changeset to fetch remotly
407 self.fetch = None
407 self.fetch = None
408 # result of changegroup pulling (used as returng code by pull)
408 # result of changegroup pulling (used as returng code by pull)
409 self.cgresult = None
409 self.cgresult = None
410 # list of step remaining todo (related to future bundle2 usage)
410 # list of step remaining todo (related to future bundle2 usage)
411 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
411 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
412
412
413 @util.propertycache
413 @util.propertycache
414 def pulledsubset(self):
414 def pulledsubset(self):
415 """heads of the set of changeset target by the pull"""
415 """heads of the set of changeset target by the pull"""
416 # compute target subset
416 # compute target subset
417 if self.heads is None:
417 if self.heads is None:
418 # We pulled every thing possible
418 # We pulled every thing possible
419 # sync on everything common
419 # sync on everything common
420 c = set(self.common)
420 c = set(self.common)
421 ret = list(self.common)
421 ret = list(self.common)
422 for n in self.rheads:
422 for n in self.rheads:
423 if n not in c:
423 if n not in c:
424 ret.append(n)
424 ret.append(n)
425 return ret
425 return ret
426 else:
426 else:
427 # We pulled a specific subset
427 # We pulled a specific subset
428 # sync on this subset
428 # sync on this subset
429 return self.heads
429 return self.heads
430
430
431 def gettransaction(self):
431 def gettransaction(self):
432 """get appropriate pull transaction, creating it if needed"""
432 """get appropriate pull transaction, creating it if needed"""
433 if self._tr is None:
433 if self._tr is None:
434 self._tr = self.repo.transaction(self._trname)
434 self._tr = self.repo.transaction(self._trname)
435 return self._tr
435 return self._tr
436
436
437 def closetransaction(self):
437 def closetransaction(self):
438 """close transaction if created"""
438 """close transaction if created"""
439 if self._tr is not None:
439 if self._tr is not None:
440 self._tr.close()
440 self._tr.close()
441
441
442 def releasetransaction(self):
442 def releasetransaction(self):
443 """release transaction if created"""
443 """release transaction if created"""
444 if self._tr is not None:
444 if self._tr is not None:
445 self._tr.release()
445 self._tr.release()
446
446
447 def pull(repo, remote, heads=None, force=False):
447 def pull(repo, remote, heads=None, force=False):
448 pullop = pulloperation(repo, remote, heads, force)
448 pullop = pulloperation(repo, remote, heads, force)
449 if pullop.remote.local():
449 if pullop.remote.local():
450 missing = set(pullop.remote.requirements) - pullop.repo.supported
450 missing = set(pullop.remote.requirements) - pullop.repo.supported
451 if missing:
451 if missing:
452 msg = _("required features are not"
452 msg = _("required features are not"
453 " supported in the destination:"
453 " supported in the destination:"
454 " %s") % (', '.join(sorted(missing)))
454 " %s") % (', '.join(sorted(missing)))
455 raise util.Abort(msg)
455 raise util.Abort(msg)
456
456
457 lock = pullop.repo.lock()
457 lock = pullop.repo.lock()
458 try:
458 try:
459 _pulldiscovery(pullop)
459 _pulldiscovery(pullop)
460 if pullop.remote.capable('bundle2'):
460 if pullop.remote.capable('bundle2'):
461 _pullbundle2(pullop)
461 _pullbundle2(pullop)
462 if 'changegroup' in pullop.todosteps:
462 if 'changegroup' in pullop.todosteps:
463 _pullchangeset(pullop)
463 _pullchangeset(pullop)
464 if 'phases' in pullop.todosteps:
464 if 'phases' in pullop.todosteps:
465 _pullphase(pullop)
465 _pullphase(pullop)
466 if 'obsmarkers' in pullop.todosteps:
466 if 'obsmarkers' in pullop.todosteps:
467 _pullobsolete(pullop)
467 _pullobsolete(pullop)
468 pullop.closetransaction()
468 pullop.closetransaction()
469 finally:
469 finally:
470 pullop.releasetransaction()
470 pullop.releasetransaction()
471 lock.release()
471 lock.release()
472
472
473 return pullop.cgresult
473 return pullop.cgresult
474
474
475 def _pulldiscovery(pullop):
475 def _pulldiscovery(pullop):
476 """discovery phase for the pull
476 """discovery phase for the pull
477
477
478 Current handle changeset discovery only, will change handle all discovery
478 Current handle changeset discovery only, will change handle all discovery
479 at some point."""
479 at some point."""
480 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
480 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
481 pullop.remote,
481 pullop.remote,
482 heads=pullop.heads,
482 heads=pullop.heads,
483 force=pullop.force)
483 force=pullop.force)
484 pullop.common, pullop.fetch, pullop.rheads = tmp
484 pullop.common, pullop.fetch, pullop.rheads = tmp
485
485
486 def _pullbundle2(pullop):
486 def _pullbundle2(pullop):
487 """pull data using bundle2
487 """pull data using bundle2
488
488
489 For now, the only supported data are changegroup."""
489 For now, the only supported data are changegroup."""
490 kwargs = {'bundlecaps': set(['HG20'])}
490 kwargs = {'bundlecaps': set(['HG20'])}
491 # pulling changegroup
491 # pulling changegroup
492 pullop.todosteps.remove('changegroup')
492 pullop.todosteps.remove('changegroup')
493 if not pullop.fetch:
493 if not pullop.fetch:
494 pullop.repo.ui.status(_("no changes found\n"))
494 pullop.repo.ui.status(_("no changes found\n"))
495 pullop.cgresult = 0
495 pullop.cgresult = 0
496 else:
496 else:
497 kwargs['common'] = pullop.common
497 kwargs['common'] = pullop.common
498 kwargs['heads'] = pullop.heads or pullop.rheads
498 kwargs['heads'] = pullop.heads or pullop.rheads
499 if pullop.heads is None and list(pullop.common) == [nullid]:
499 if pullop.heads is None and list(pullop.common) == [nullid]:
500 pullop.repo.ui.status(_("requesting all changes\n"))
500 pullop.repo.ui.status(_("requesting all changes\n"))
501 if kwargs.keys() == ['format']:
501 if kwargs.keys() == ['format']:
502 return # nothing to pull
502 return # nothing to pull
503 bundle = pullop.remote.getbundle('pull', **kwargs)
503 bundle = pullop.remote.getbundle('pull', **kwargs)
504 try:
504 try:
505 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
505 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
506 except KeyError, exc:
506 except KeyError, exc:
507 raise util.Abort('missing support for %s' % exc)
507 raise util.Abort('missing support for %s' % exc)
508 assert len(op.records['changegroup']) == 1
508 assert len(op.records['changegroup']) == 1
509 pullop.cgresult = op.records['changegroup'][0]['return']
509 pullop.cgresult = op.records['changegroup'][0]['return']
510
510
511 def _pullchangeset(pullop):
511 def _pullchangeset(pullop):
512 """pull changeset from unbundle into the local repo"""
512 """pull changeset from unbundle into the local repo"""
513 # We delay the open of the transaction as late as possible so we
513 # We delay the open of the transaction as late as possible so we
514 # don't open transaction for nothing or you break future useful
514 # don't open transaction for nothing or you break future useful
515 # rollback call
515 # rollback call
516 pullop.todosteps.remove('changegroup')
516 pullop.todosteps.remove('changegroup')
517 if not pullop.fetch:
517 if not pullop.fetch:
518 pullop.repo.ui.status(_("no changes found\n"))
518 pullop.repo.ui.status(_("no changes found\n"))
519 pullop.cgresult = 0
519 pullop.cgresult = 0
520 return
520 return
521 pullop.gettransaction()
521 pullop.gettransaction()
522 if pullop.heads is None and list(pullop.common) == [nullid]:
522 if pullop.heads is None and list(pullop.common) == [nullid]:
523 pullop.repo.ui.status(_("requesting all changes\n"))
523 pullop.repo.ui.status(_("requesting all changes\n"))
524 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
524 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
525 # issue1320, avoid a race if remote changed after discovery
525 # issue1320, avoid a race if remote changed after discovery
526 pullop.heads = pullop.rheads
526 pullop.heads = pullop.rheads
527
527
528 if pullop.remote.capable('getbundle'):
528 if pullop.remote.capable('getbundle'):
529 # TODO: get bundlecaps from remote
529 # TODO: get bundlecaps from remote
530 cg = pullop.remote.getbundle('pull', common=pullop.common,
530 cg = pullop.remote.getbundle('pull', common=pullop.common,
531 heads=pullop.heads or pullop.rheads)
531 heads=pullop.heads or pullop.rheads)
532 elif pullop.heads is None:
532 elif pullop.heads is None:
533 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
533 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
534 elif not pullop.remote.capable('changegroupsubset'):
534 elif not pullop.remote.capable('changegroupsubset'):
535 raise util.Abort(_("partial pull cannot be done because "
535 raise util.Abort(_("partial pull cannot be done because "
536 "other repository doesn't support "
536 "other repository doesn't support "
537 "changegroupsubset."))
537 "changegroupsubset."))
538 else:
538 else:
539 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
539 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
540 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
540 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
541 pullop.remote.url())
541 pullop.remote.url())
542
542
543 def _pullphase(pullop):
543 def _pullphase(pullop):
544 # Get remote phases data from remote
544 # Get remote phases data from remote
545 pullop.todosteps.remove('phases')
545 pullop.todosteps.remove('phases')
546 remotephases = pullop.remote.listkeys('phases')
546 remotephases = pullop.remote.listkeys('phases')
547 publishing = bool(remotephases.get('publishing', False))
547 publishing = bool(remotephases.get('publishing', False))
548 if remotephases and not publishing:
548 if remotephases and not publishing:
549 # remote is new and unpublishing
549 # remote is new and unpublishing
550 pheads, _dr = phases.analyzeremotephases(pullop.repo,
550 pheads, _dr = phases.analyzeremotephases(pullop.repo,
551 pullop.pulledsubset,
551 pullop.pulledsubset,
552 remotephases)
552 remotephases)
553 phases.advanceboundary(pullop.repo, phases.public, pheads)
553 phases.advanceboundary(pullop.repo, phases.public, pheads)
554 phases.advanceboundary(pullop.repo, phases.draft,
554 phases.advanceboundary(pullop.repo, phases.draft,
555 pullop.pulledsubset)
555 pullop.pulledsubset)
556 else:
556 else:
557 # Remote is old or publishing all common changesets
557 # Remote is old or publishing all common changesets
558 # should be seen as public
558 # should be seen as public
559 phases.advanceboundary(pullop.repo, phases.public,
559 phases.advanceboundary(pullop.repo, phases.public,
560 pullop.pulledsubset)
560 pullop.pulledsubset)
561
561
562 def _pullobsolete(pullop):
562 def _pullobsolete(pullop):
563 """utility function to pull obsolete markers from a remote
563 """utility function to pull obsolete markers from a remote
564
564
565 The `gettransaction` is function that return the pull transaction, creating
565 The `gettransaction` is function that return the pull transaction, creating
566 one if necessary. We return the transaction to inform the calling code that
566 one if necessary. We return the transaction to inform the calling code that
567 a new transaction have been created (when applicable).
567 a new transaction have been created (when applicable).
568
568
569 Exists mostly to allow overriding for experimentation purpose"""
569 Exists mostly to allow overriding for experimentation purpose"""
570 pullop.todosteps.remove('obsmarkers')
570 pullop.todosteps.remove('obsmarkers')
571 tr = None
571 tr = None
572 if obsolete._enabled:
572 if obsolete._enabled:
573 pullop.repo.ui.debug('fetching remote obsolete markers\n')
573 pullop.repo.ui.debug('fetching remote obsolete markers\n')
574 remoteobs = pullop.remote.listkeys('obsolete')
574 remoteobs = pullop.remote.listkeys('obsolete')
575 if 'dump0' in remoteobs:
575 if 'dump0' in remoteobs:
576 tr = pullop.gettransaction()
576 tr = pullop.gettransaction()
577 for key in sorted(remoteobs, reverse=True):
577 for key in sorted(remoteobs, reverse=True):
578 if key.startswith('dump'):
578 if key.startswith('dump'):
579 data = base85.b85decode(remoteobs[key])
579 data = base85.b85decode(remoteobs[key])
580 pullop.repo.obsstore.mergemarkers(tr, data)
580 pullop.repo.obsstore.mergemarkers(tr, data)
581 pullop.repo.invalidatevolatilesets()
581 pullop.repo.invalidatevolatilesets()
582 return tr
582 return tr
583
583
584 def getbundle(repo, source, heads=None, common=None, bundlecaps=None):
584 def getbundle(repo, source, heads=None, common=None, bundlecaps=None):
585 """return a full bundle (with potentially multiple kind of parts)
585 """return a full bundle (with potentially multiple kind of parts)
586
586
587 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
587 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
588 passed. For now, the bundle can contain only changegroup, but this will
588 passed. For now, the bundle can contain only changegroup, but this will
589 changes when more part type will be available for bundle2.
589 changes when more part type will be available for bundle2.
590
590
591 This is different from changegroup.getbundle that only returns an HG10
591 This is different from changegroup.getbundle that only returns an HG10
592 changegroup bundle. They may eventually get reunited in the future when we
592 changegroup bundle. They may eventually get reunited in the future when we
593 have a clearer idea of the API we what to query different data.
593 have a clearer idea of the API we what to query different data.
594
594
595 The implementation is at a very early stage and will get massive rework
595 The implementation is at a very early stage and will get massive rework
596 when the API of bundle is refined.
596 when the API of bundle is refined.
597 """
597 """
598 # build bundle here.
598 # build bundle here.
599 cg = changegroup.getbundle(repo, source, heads=heads,
599 cg = changegroup.getbundle(repo, source, heads=heads,
600 common=common, bundlecaps=bundlecaps)
600 common=common, bundlecaps=bundlecaps)
601 if bundlecaps is None or 'HG20' not in bundlecaps:
601 if bundlecaps is None or 'HG20' not in bundlecaps:
602 return cg
602 return cg
603 # very crude first implementation,
603 # very crude first implementation,
604 # the bundle API will change and the generation will be done lazily.
604 # the bundle API will change and the generation will be done lazily.
605 bundler = bundle2.bundle20(repo.ui)
605 bundler = bundle2.bundle20(repo.ui)
606 tempname = changegroup.writebundle(cg, None, 'HG10UN')
606 tempname = changegroup.writebundle(cg, None, 'HG10UN')
607 data = open(tempname).read()
607 data = open(tempname).read()
608 part = bundle2.part('changegroup', data=data)
608 part = bundle2.part('changegroup', data=data)
609 bundler.addpart(part)
609 bundler.addpart(part)
610 temp = cStringIO.StringIO()
610 temp = cStringIO.StringIO()
611 for c in bundler.getchunks():
611 for c in bundler.getchunks():
612 temp.write(c)
612 temp.write(c)
613 temp.seek(0)
613 temp.seek(0)
614 return bundle2.unbundle20(repo.ui, temp)
614 return bundle2.unbundle20(repo.ui, temp)
615
615
616 class PushRaced(RuntimeError):
616 class PushRaced(RuntimeError):
617 """An exception raised during unbunding that indicate a push race"""
617 """An exception raised during unbunding that indicate a push race"""
618
618
619 def check_heads(repo, their_heads, context):
619 def check_heads(repo, their_heads, context):
620 """check if the heads of a repo have been modified
620 """check if the heads of a repo have been modified
621
621
622 Used by peer for unbundling.
622 Used by peer for unbundling.
623 """
623 """
624 heads = repo.heads()
624 heads = repo.heads()
625 heads_hash = util.sha1(''.join(sorted(heads))).digest()
625 heads_hash = util.sha1(''.join(sorted(heads))).digest()
626 if not (their_heads == ['force'] or their_heads == heads or
626 if not (their_heads == ['force'] or their_heads == heads or
627 their_heads == ['hashed', heads_hash]):
627 their_heads == ['hashed', heads_hash]):
628 # someone else committed/pushed/unbundled while we
628 # someone else committed/pushed/unbundled while we
629 # were transferring data
629 # were transferring data
630 raise PushRaced('repository changed while %s - '
630 raise PushRaced('repository changed while %s - '
631 'please try again' % context)
631 'please try again' % context)
632
632
633 def unbundle(repo, cg, heads, source, url):
633 def unbundle(repo, cg, heads, source, url):
634 """Apply a bundle to a repo.
634 """Apply a bundle to a repo.
635
635
636 this function makes sure the repo is locked during the application and have
636 this function makes sure the repo is locked during the application and have
637 mechanism to check that no push race occured between the creation of the
637 mechanism to check that no push race occured between the creation of the
638 bundle and its application.
638 bundle and its application.
639
639
640 If the push was raced as PushRaced exception is raised."""
640 If the push was raced as PushRaced exception is raised."""
641 r = 0
641 r = 0
642 lock = repo.lock()
642 lock = repo.lock()
643 try:
643 try:
644 check_heads(repo, heads, 'uploading changes')
644 check_heads(repo, heads, 'uploading changes')
645 # push can proceed
645 # push can proceed
646 try:
646 r = changegroup.addchangegroup(repo, cg, source, url)
647 r = changegroup.addchangegroup(repo, cg, source, url)
648 except util.Abort, inst:
649 # The old code we moved used sys.stderr directly.
650 # We did not changed it to minise code change.
651 # This need to be moved to something proper.
652 # Feel free to do it.
653 sys.stderr.write("abort: %s\n" % inst)
654 finally:
647 finally:
655 lock.release()
648 lock.release()
656 return r
649 return r
@@ -1,1876 +1,1885 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import peer, changegroup, subrepo, pushkey, obsolete, repoview
9 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import lock as lockmod
11 import lock as lockmod
12 import transaction, store, encoding, exchange
12 import transaction, store, encoding, exchange
13 import scmutil, util, extensions, hook, error, revset
13 import scmutil, util, extensions, hook, error, revset
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 import branchmap, pathutil
19 import branchmap, pathutil
20 propertycache = util.propertycache
20 propertycache = util.propertycache
21 filecache = scmutil.filecache
21 filecache = scmutil.filecache
22
22
23 class repofilecache(filecache):
23 class repofilecache(filecache):
24 """All filecache usage on repo are done for logic that should be unfiltered
24 """All filecache usage on repo are done for logic that should be unfiltered
25 """
25 """
26
26
27 def __get__(self, repo, type=None):
27 def __get__(self, repo, type=None):
28 return super(repofilecache, self).__get__(repo.unfiltered(), type)
28 return super(repofilecache, self).__get__(repo.unfiltered(), type)
29 def __set__(self, repo, value):
29 def __set__(self, repo, value):
30 return super(repofilecache, self).__set__(repo.unfiltered(), value)
30 return super(repofilecache, self).__set__(repo.unfiltered(), value)
31 def __delete__(self, repo):
31 def __delete__(self, repo):
32 return super(repofilecache, self).__delete__(repo.unfiltered())
32 return super(repofilecache, self).__delete__(repo.unfiltered())
33
33
34 class storecache(repofilecache):
34 class storecache(repofilecache):
35 """filecache for files in the store"""
35 """filecache for files in the store"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj.sjoin(fname)
37 return obj.sjoin(fname)
38
38
39 class unfilteredpropertycache(propertycache):
39 class unfilteredpropertycache(propertycache):
40 """propertycache that apply to unfiltered repo only"""
40 """propertycache that apply to unfiltered repo only"""
41
41
42 def __get__(self, repo, type=None):
42 def __get__(self, repo, type=None):
43 unfi = repo.unfiltered()
43 unfi = repo.unfiltered()
44 if unfi is repo:
44 if unfi is repo:
45 return super(unfilteredpropertycache, self).__get__(unfi)
45 return super(unfilteredpropertycache, self).__get__(unfi)
46 return getattr(unfi, self.name)
46 return getattr(unfi, self.name)
47
47
48 class filteredpropertycache(propertycache):
48 class filteredpropertycache(propertycache):
49 """propertycache that must take filtering in account"""
49 """propertycache that must take filtering in account"""
50
50
51 def cachevalue(self, obj, value):
51 def cachevalue(self, obj, value):
52 object.__setattr__(obj, self.name, value)
52 object.__setattr__(obj, self.name, value)
53
53
54
54
55 def hasunfilteredcache(repo, name):
55 def hasunfilteredcache(repo, name):
56 """check if a repo has an unfilteredpropertycache value for <name>"""
56 """check if a repo has an unfilteredpropertycache value for <name>"""
57 return name in vars(repo.unfiltered())
57 return name in vars(repo.unfiltered())
58
58
59 def unfilteredmethod(orig):
59 def unfilteredmethod(orig):
60 """decorate method that always need to be run on unfiltered version"""
60 """decorate method that always need to be run on unfiltered version"""
61 def wrapper(repo, *args, **kwargs):
61 def wrapper(repo, *args, **kwargs):
62 return orig(repo.unfiltered(), *args, **kwargs)
62 return orig(repo.unfiltered(), *args, **kwargs)
63 return wrapper
63 return wrapper
64
64
65 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
65 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
66 'bundle2'))
66 'bundle2', 'unbundle'))
67 legacycaps = moderncaps.union(set(['changegroupsubset']))
67 legacycaps = moderncaps.union(set(['changegroupsubset']))
68
68
69 class localpeer(peer.peerrepository):
69 class localpeer(peer.peerrepository):
70 '''peer for a local repo; reflects only the most recent API'''
70 '''peer for a local repo; reflects only the most recent API'''
71
71
72 def __init__(self, repo, caps=moderncaps):
72 def __init__(self, repo, caps=moderncaps):
73 peer.peerrepository.__init__(self)
73 peer.peerrepository.__init__(self)
74 self._repo = repo.filtered('served')
74 self._repo = repo.filtered('served')
75 self.ui = repo.ui
75 self.ui = repo.ui
76 self._caps = repo._restrictcapabilities(caps)
76 self._caps = repo._restrictcapabilities(caps)
77 self.requirements = repo.requirements
77 self.requirements = repo.requirements
78 self.supportedformats = repo.supportedformats
78 self.supportedformats = repo.supportedformats
79
79
80 def close(self):
80 def close(self):
81 self._repo.close()
81 self._repo.close()
82
82
83 def _capabilities(self):
83 def _capabilities(self):
84 return self._caps
84 return self._caps
85
85
86 def local(self):
86 def local(self):
87 return self._repo
87 return self._repo
88
88
89 def canpush(self):
89 def canpush(self):
90 return True
90 return True
91
91
92 def url(self):
92 def url(self):
93 return self._repo.url()
93 return self._repo.url()
94
94
95 def lookup(self, key):
95 def lookup(self, key):
96 return self._repo.lookup(key)
96 return self._repo.lookup(key)
97
97
98 def branchmap(self):
98 def branchmap(self):
99 return self._repo.branchmap()
99 return self._repo.branchmap()
100
100
101 def heads(self):
101 def heads(self):
102 return self._repo.heads()
102 return self._repo.heads()
103
103
104 def known(self, nodes):
104 def known(self, nodes):
105 return self._repo.known(nodes)
105 return self._repo.known(nodes)
106
106
107 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
107 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
108 format='HG10'):
108 format='HG10'):
109 return exchange.getbundle(self._repo, source, heads=heads,
109 return exchange.getbundle(self._repo, source, heads=heads,
110 common=common, bundlecaps=bundlecaps)
110 common=common, bundlecaps=bundlecaps)
111
111
112 # TODO We might want to move the next two calls into legacypeer and add
112 # TODO We might want to move the next two calls into legacypeer and add
113 # unbundle instead.
113 # unbundle instead.
114
114
115 def unbundle(self, cg, heads, url):
116 """apply a bundle on a repo
117
118 This function handles the repo locking itself."""
119 try:
120 return exchange.unbundle(self._repo, cg, heads, 'push', url)
121 except exchange.PushRaced, exc:
122 raise error.ResponseError(_('push failed:'), exc.message)
123
115 def lock(self):
124 def lock(self):
116 return self._repo.lock()
125 return self._repo.lock()
117
126
118 def addchangegroup(self, cg, source, url):
127 def addchangegroup(self, cg, source, url):
119 return changegroup.addchangegroup(self._repo, cg, source, url)
128 return changegroup.addchangegroup(self._repo, cg, source, url)
120
129
121 def pushkey(self, namespace, key, old, new):
130 def pushkey(self, namespace, key, old, new):
122 return self._repo.pushkey(namespace, key, old, new)
131 return self._repo.pushkey(namespace, key, old, new)
123
132
124 def listkeys(self, namespace):
133 def listkeys(self, namespace):
125 return self._repo.listkeys(namespace)
134 return self._repo.listkeys(namespace)
126
135
127 def debugwireargs(self, one, two, three=None, four=None, five=None):
136 def debugwireargs(self, one, two, three=None, four=None, five=None):
128 '''used to test argument passing over the wire'''
137 '''used to test argument passing over the wire'''
129 return "%s %s %s %s %s" % (one, two, three, four, five)
138 return "%s %s %s %s %s" % (one, two, three, four, five)
130
139
131 class locallegacypeer(localpeer):
140 class locallegacypeer(localpeer):
132 '''peer extension which implements legacy methods too; used for tests with
141 '''peer extension which implements legacy methods too; used for tests with
133 restricted capabilities'''
142 restricted capabilities'''
134
143
135 def __init__(self, repo):
144 def __init__(self, repo):
136 localpeer.__init__(self, repo, caps=legacycaps)
145 localpeer.__init__(self, repo, caps=legacycaps)
137
146
138 def branches(self, nodes):
147 def branches(self, nodes):
139 return self._repo.branches(nodes)
148 return self._repo.branches(nodes)
140
149
141 def between(self, pairs):
150 def between(self, pairs):
142 return self._repo.between(pairs)
151 return self._repo.between(pairs)
143
152
144 def changegroup(self, basenodes, source):
153 def changegroup(self, basenodes, source):
145 return changegroup.changegroup(self._repo, basenodes, source)
154 return changegroup.changegroup(self._repo, basenodes, source)
146
155
147 def changegroupsubset(self, bases, heads, source):
156 def changegroupsubset(self, bases, heads, source):
148 return changegroup.changegroupsubset(self._repo, bases, heads, source)
157 return changegroup.changegroupsubset(self._repo, bases, heads, source)
149
158
150 class localrepository(object):
159 class localrepository(object):
151
160
152 supportedformats = set(('revlogv1', 'generaldelta'))
161 supportedformats = set(('revlogv1', 'generaldelta'))
153 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
162 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
154 'dotencode'))
163 'dotencode'))
155 openerreqs = set(('revlogv1', 'generaldelta'))
164 openerreqs = set(('revlogv1', 'generaldelta'))
156 requirements = ['revlogv1']
165 requirements = ['revlogv1']
157 filtername = None
166 filtername = None
158
167
159 # a list of (ui, featureset) functions.
168 # a list of (ui, featureset) functions.
160 # only functions defined in module of enabled extensions are invoked
169 # only functions defined in module of enabled extensions are invoked
161 featuresetupfuncs = set()
170 featuresetupfuncs = set()
162
171
163 def _baserequirements(self, create):
172 def _baserequirements(self, create):
164 return self.requirements[:]
173 return self.requirements[:]
165
174
166 def __init__(self, baseui, path=None, create=False):
175 def __init__(self, baseui, path=None, create=False):
167 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
176 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
168 self.wopener = self.wvfs
177 self.wopener = self.wvfs
169 self.root = self.wvfs.base
178 self.root = self.wvfs.base
170 self.path = self.wvfs.join(".hg")
179 self.path = self.wvfs.join(".hg")
171 self.origroot = path
180 self.origroot = path
172 self.auditor = pathutil.pathauditor(self.root, self._checknested)
181 self.auditor = pathutil.pathauditor(self.root, self._checknested)
173 self.vfs = scmutil.vfs(self.path)
182 self.vfs = scmutil.vfs(self.path)
174 self.opener = self.vfs
183 self.opener = self.vfs
175 self.baseui = baseui
184 self.baseui = baseui
176 self.ui = baseui.copy()
185 self.ui = baseui.copy()
177 self.ui.copy = baseui.copy # prevent copying repo configuration
186 self.ui.copy = baseui.copy # prevent copying repo configuration
178 # A list of callback to shape the phase if no data were found.
187 # A list of callback to shape the phase if no data were found.
179 # Callback are in the form: func(repo, roots) --> processed root.
188 # Callback are in the form: func(repo, roots) --> processed root.
180 # This list it to be filled by extension during repo setup
189 # This list it to be filled by extension during repo setup
181 self._phasedefaults = []
190 self._phasedefaults = []
182 try:
191 try:
183 self.ui.readconfig(self.join("hgrc"), self.root)
192 self.ui.readconfig(self.join("hgrc"), self.root)
184 extensions.loadall(self.ui)
193 extensions.loadall(self.ui)
185 except IOError:
194 except IOError:
186 pass
195 pass
187
196
188 if self.featuresetupfuncs:
197 if self.featuresetupfuncs:
189 self.supported = set(self._basesupported) # use private copy
198 self.supported = set(self._basesupported) # use private copy
190 extmods = set(m.__name__ for n, m
199 extmods = set(m.__name__ for n, m
191 in extensions.extensions(self.ui))
200 in extensions.extensions(self.ui))
192 for setupfunc in self.featuresetupfuncs:
201 for setupfunc in self.featuresetupfuncs:
193 if setupfunc.__module__ in extmods:
202 if setupfunc.__module__ in extmods:
194 setupfunc(self.ui, self.supported)
203 setupfunc(self.ui, self.supported)
195 else:
204 else:
196 self.supported = self._basesupported
205 self.supported = self._basesupported
197
206
198 if not self.vfs.isdir():
207 if not self.vfs.isdir():
199 if create:
208 if create:
200 if not self.wvfs.exists():
209 if not self.wvfs.exists():
201 self.wvfs.makedirs()
210 self.wvfs.makedirs()
202 self.vfs.makedir(notindexed=True)
211 self.vfs.makedir(notindexed=True)
203 requirements = self._baserequirements(create)
212 requirements = self._baserequirements(create)
204 if self.ui.configbool('format', 'usestore', True):
213 if self.ui.configbool('format', 'usestore', True):
205 self.vfs.mkdir("store")
214 self.vfs.mkdir("store")
206 requirements.append("store")
215 requirements.append("store")
207 if self.ui.configbool('format', 'usefncache', True):
216 if self.ui.configbool('format', 'usefncache', True):
208 requirements.append("fncache")
217 requirements.append("fncache")
209 if self.ui.configbool('format', 'dotencode', True):
218 if self.ui.configbool('format', 'dotencode', True):
210 requirements.append('dotencode')
219 requirements.append('dotencode')
211 # create an invalid changelog
220 # create an invalid changelog
212 self.vfs.append(
221 self.vfs.append(
213 "00changelog.i",
222 "00changelog.i",
214 '\0\0\0\2' # represents revlogv2
223 '\0\0\0\2' # represents revlogv2
215 ' dummy changelog to prevent using the old repo layout'
224 ' dummy changelog to prevent using the old repo layout'
216 )
225 )
217 if self.ui.configbool('format', 'generaldelta', False):
226 if self.ui.configbool('format', 'generaldelta', False):
218 requirements.append("generaldelta")
227 requirements.append("generaldelta")
219 requirements = set(requirements)
228 requirements = set(requirements)
220 else:
229 else:
221 raise error.RepoError(_("repository %s not found") % path)
230 raise error.RepoError(_("repository %s not found") % path)
222 elif create:
231 elif create:
223 raise error.RepoError(_("repository %s already exists") % path)
232 raise error.RepoError(_("repository %s already exists") % path)
224 else:
233 else:
225 try:
234 try:
226 requirements = scmutil.readrequires(self.vfs, self.supported)
235 requirements = scmutil.readrequires(self.vfs, self.supported)
227 except IOError, inst:
236 except IOError, inst:
228 if inst.errno != errno.ENOENT:
237 if inst.errno != errno.ENOENT:
229 raise
238 raise
230 requirements = set()
239 requirements = set()
231
240
232 self.sharedpath = self.path
241 self.sharedpath = self.path
233 try:
242 try:
234 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
243 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
235 realpath=True)
244 realpath=True)
236 s = vfs.base
245 s = vfs.base
237 if not vfs.exists():
246 if not vfs.exists():
238 raise error.RepoError(
247 raise error.RepoError(
239 _('.hg/sharedpath points to nonexistent directory %s') % s)
248 _('.hg/sharedpath points to nonexistent directory %s') % s)
240 self.sharedpath = s
249 self.sharedpath = s
241 except IOError, inst:
250 except IOError, inst:
242 if inst.errno != errno.ENOENT:
251 if inst.errno != errno.ENOENT:
243 raise
252 raise
244
253
245 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
254 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
246 self.spath = self.store.path
255 self.spath = self.store.path
247 self.svfs = self.store.vfs
256 self.svfs = self.store.vfs
248 self.sopener = self.svfs
257 self.sopener = self.svfs
249 self.sjoin = self.store.join
258 self.sjoin = self.store.join
250 self.vfs.createmode = self.store.createmode
259 self.vfs.createmode = self.store.createmode
251 self._applyrequirements(requirements)
260 self._applyrequirements(requirements)
252 if create:
261 if create:
253 self._writerequirements()
262 self._writerequirements()
254
263
255
264
256 self._branchcaches = {}
265 self._branchcaches = {}
257 self.filterpats = {}
266 self.filterpats = {}
258 self._datafilters = {}
267 self._datafilters = {}
259 self._transref = self._lockref = self._wlockref = None
268 self._transref = self._lockref = self._wlockref = None
260
269
261 # A cache for various files under .hg/ that tracks file changes,
270 # A cache for various files under .hg/ that tracks file changes,
262 # (used by the filecache decorator)
271 # (used by the filecache decorator)
263 #
272 #
264 # Maps a property name to its util.filecacheentry
273 # Maps a property name to its util.filecacheentry
265 self._filecache = {}
274 self._filecache = {}
266
275
267 # hold sets of revision to be filtered
276 # hold sets of revision to be filtered
268 # should be cleared when something might have changed the filter value:
277 # should be cleared when something might have changed the filter value:
269 # - new changesets,
278 # - new changesets,
270 # - phase change,
279 # - phase change,
271 # - new obsolescence marker,
280 # - new obsolescence marker,
272 # - working directory parent change,
281 # - working directory parent change,
273 # - bookmark changes
282 # - bookmark changes
274 self.filteredrevcache = {}
283 self.filteredrevcache = {}
275
284
276 def close(self):
285 def close(self):
277 pass
286 pass
278
287
279 def _restrictcapabilities(self, caps):
288 def _restrictcapabilities(self, caps):
280 # bundle2 is not ready for prime time, drop it unless explicitly
289 # bundle2 is not ready for prime time, drop it unless explicitly
281 # required by the tests (or some brave tester)
290 # required by the tests (or some brave tester)
282 if not self.ui.configbool('server', 'bundle2', False):
291 if not self.ui.configbool('server', 'bundle2', False):
283 caps = set(caps)
292 caps = set(caps)
284 caps.discard('bundle2')
293 caps.discard('bundle2')
285 return caps
294 return caps
286
295
287 def _applyrequirements(self, requirements):
296 def _applyrequirements(self, requirements):
288 self.requirements = requirements
297 self.requirements = requirements
289 self.sopener.options = dict((r, 1) for r in requirements
298 self.sopener.options = dict((r, 1) for r in requirements
290 if r in self.openerreqs)
299 if r in self.openerreqs)
291 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
300 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
292 if chunkcachesize is not None:
301 if chunkcachesize is not None:
293 self.sopener.options['chunkcachesize'] = chunkcachesize
302 self.sopener.options['chunkcachesize'] = chunkcachesize
294
303
295 def _writerequirements(self):
304 def _writerequirements(self):
296 reqfile = self.opener("requires", "w")
305 reqfile = self.opener("requires", "w")
297 for r in sorted(self.requirements):
306 for r in sorted(self.requirements):
298 reqfile.write("%s\n" % r)
307 reqfile.write("%s\n" % r)
299 reqfile.close()
308 reqfile.close()
300
309
301 def _checknested(self, path):
310 def _checknested(self, path):
302 """Determine if path is a legal nested repository."""
311 """Determine if path is a legal nested repository."""
303 if not path.startswith(self.root):
312 if not path.startswith(self.root):
304 return False
313 return False
305 subpath = path[len(self.root) + 1:]
314 subpath = path[len(self.root) + 1:]
306 normsubpath = util.pconvert(subpath)
315 normsubpath = util.pconvert(subpath)
307
316
308 # XXX: Checking against the current working copy is wrong in
317 # XXX: Checking against the current working copy is wrong in
309 # the sense that it can reject things like
318 # the sense that it can reject things like
310 #
319 #
311 # $ hg cat -r 10 sub/x.txt
320 # $ hg cat -r 10 sub/x.txt
312 #
321 #
313 # if sub/ is no longer a subrepository in the working copy
322 # if sub/ is no longer a subrepository in the working copy
314 # parent revision.
323 # parent revision.
315 #
324 #
316 # However, it can of course also allow things that would have
325 # However, it can of course also allow things that would have
317 # been rejected before, such as the above cat command if sub/
326 # been rejected before, such as the above cat command if sub/
318 # is a subrepository now, but was a normal directory before.
327 # is a subrepository now, but was a normal directory before.
319 # The old path auditor would have rejected by mistake since it
328 # The old path auditor would have rejected by mistake since it
320 # panics when it sees sub/.hg/.
329 # panics when it sees sub/.hg/.
321 #
330 #
322 # All in all, checking against the working copy seems sensible
331 # All in all, checking against the working copy seems sensible
323 # since we want to prevent access to nested repositories on
332 # since we want to prevent access to nested repositories on
324 # the filesystem *now*.
333 # the filesystem *now*.
325 ctx = self[None]
334 ctx = self[None]
326 parts = util.splitpath(subpath)
335 parts = util.splitpath(subpath)
327 while parts:
336 while parts:
328 prefix = '/'.join(parts)
337 prefix = '/'.join(parts)
329 if prefix in ctx.substate:
338 if prefix in ctx.substate:
330 if prefix == normsubpath:
339 if prefix == normsubpath:
331 return True
340 return True
332 else:
341 else:
333 sub = ctx.sub(prefix)
342 sub = ctx.sub(prefix)
334 return sub.checknested(subpath[len(prefix) + 1:])
343 return sub.checknested(subpath[len(prefix) + 1:])
335 else:
344 else:
336 parts.pop()
345 parts.pop()
337 return False
346 return False
338
347
339 def peer(self):
348 def peer(self):
340 return localpeer(self) # not cached to avoid reference cycle
349 return localpeer(self) # not cached to avoid reference cycle
341
350
342 def unfiltered(self):
351 def unfiltered(self):
343 """Return unfiltered version of the repository
352 """Return unfiltered version of the repository
344
353
345 Intended to be overwritten by filtered repo."""
354 Intended to be overwritten by filtered repo."""
346 return self
355 return self
347
356
348 def filtered(self, name):
357 def filtered(self, name):
349 """Return a filtered version of a repository"""
358 """Return a filtered version of a repository"""
350 # build a new class with the mixin and the current class
359 # build a new class with the mixin and the current class
351 # (possibly subclass of the repo)
360 # (possibly subclass of the repo)
352 class proxycls(repoview.repoview, self.unfiltered().__class__):
361 class proxycls(repoview.repoview, self.unfiltered().__class__):
353 pass
362 pass
354 return proxycls(self, name)
363 return proxycls(self, name)
355
364
356 @repofilecache('bookmarks')
365 @repofilecache('bookmarks')
357 def _bookmarks(self):
366 def _bookmarks(self):
358 return bookmarks.bmstore(self)
367 return bookmarks.bmstore(self)
359
368
360 @repofilecache('bookmarks.current')
369 @repofilecache('bookmarks.current')
361 def _bookmarkcurrent(self):
370 def _bookmarkcurrent(self):
362 return bookmarks.readcurrent(self)
371 return bookmarks.readcurrent(self)
363
372
364 def bookmarkheads(self, bookmark):
373 def bookmarkheads(self, bookmark):
365 name = bookmark.split('@', 1)[0]
374 name = bookmark.split('@', 1)[0]
366 heads = []
375 heads = []
367 for mark, n in self._bookmarks.iteritems():
376 for mark, n in self._bookmarks.iteritems():
368 if mark.split('@', 1)[0] == name:
377 if mark.split('@', 1)[0] == name:
369 heads.append(n)
378 heads.append(n)
370 return heads
379 return heads
371
380
372 @storecache('phaseroots')
381 @storecache('phaseroots')
373 def _phasecache(self):
382 def _phasecache(self):
374 return phases.phasecache(self, self._phasedefaults)
383 return phases.phasecache(self, self._phasedefaults)
375
384
376 @storecache('obsstore')
385 @storecache('obsstore')
377 def obsstore(self):
386 def obsstore(self):
378 store = obsolete.obsstore(self.sopener)
387 store = obsolete.obsstore(self.sopener)
379 if store and not obsolete._enabled:
388 if store and not obsolete._enabled:
380 # message is rare enough to not be translated
389 # message is rare enough to not be translated
381 msg = 'obsolete feature not enabled but %i markers found!\n'
390 msg = 'obsolete feature not enabled but %i markers found!\n'
382 self.ui.warn(msg % len(list(store)))
391 self.ui.warn(msg % len(list(store)))
383 return store
392 return store
384
393
385 @storecache('00changelog.i')
394 @storecache('00changelog.i')
386 def changelog(self):
395 def changelog(self):
387 c = changelog.changelog(self.sopener)
396 c = changelog.changelog(self.sopener)
388 if 'HG_PENDING' in os.environ:
397 if 'HG_PENDING' in os.environ:
389 p = os.environ['HG_PENDING']
398 p = os.environ['HG_PENDING']
390 if p.startswith(self.root):
399 if p.startswith(self.root):
391 c.readpending('00changelog.i.a')
400 c.readpending('00changelog.i.a')
392 return c
401 return c
393
402
394 @storecache('00manifest.i')
403 @storecache('00manifest.i')
395 def manifest(self):
404 def manifest(self):
396 return manifest.manifest(self.sopener)
405 return manifest.manifest(self.sopener)
397
406
398 @repofilecache('dirstate')
407 @repofilecache('dirstate')
399 def dirstate(self):
408 def dirstate(self):
400 warned = [0]
409 warned = [0]
401 def validate(node):
410 def validate(node):
402 try:
411 try:
403 self.changelog.rev(node)
412 self.changelog.rev(node)
404 return node
413 return node
405 except error.LookupError:
414 except error.LookupError:
406 if not warned[0]:
415 if not warned[0]:
407 warned[0] = True
416 warned[0] = True
408 self.ui.warn(_("warning: ignoring unknown"
417 self.ui.warn(_("warning: ignoring unknown"
409 " working parent %s!\n") % short(node))
418 " working parent %s!\n") % short(node))
410 return nullid
419 return nullid
411
420
412 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
421 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
413
422
414 def __getitem__(self, changeid):
423 def __getitem__(self, changeid):
415 if changeid is None:
424 if changeid is None:
416 return context.workingctx(self)
425 return context.workingctx(self)
417 return context.changectx(self, changeid)
426 return context.changectx(self, changeid)
418
427
419 def __contains__(self, changeid):
428 def __contains__(self, changeid):
420 try:
429 try:
421 return bool(self.lookup(changeid))
430 return bool(self.lookup(changeid))
422 except error.RepoLookupError:
431 except error.RepoLookupError:
423 return False
432 return False
424
433
425 def __nonzero__(self):
434 def __nonzero__(self):
426 return True
435 return True
427
436
428 def __len__(self):
437 def __len__(self):
429 return len(self.changelog)
438 return len(self.changelog)
430
439
431 def __iter__(self):
440 def __iter__(self):
432 return iter(self.changelog)
441 return iter(self.changelog)
433
442
434 def revs(self, expr, *args):
443 def revs(self, expr, *args):
435 '''Return a list of revisions matching the given revset'''
444 '''Return a list of revisions matching the given revset'''
436 expr = revset.formatspec(expr, *args)
445 expr = revset.formatspec(expr, *args)
437 m = revset.match(None, expr)
446 m = revset.match(None, expr)
438 return m(self, revset.spanset(self))
447 return m(self, revset.spanset(self))
439
448
440 def set(self, expr, *args):
449 def set(self, expr, *args):
441 '''
450 '''
442 Yield a context for each matching revision, after doing arg
451 Yield a context for each matching revision, after doing arg
443 replacement via revset.formatspec
452 replacement via revset.formatspec
444 '''
453 '''
445 for r in self.revs(expr, *args):
454 for r in self.revs(expr, *args):
446 yield self[r]
455 yield self[r]
447
456
448 def url(self):
457 def url(self):
449 return 'file:' + self.root
458 return 'file:' + self.root
450
459
451 def hook(self, name, throw=False, **args):
460 def hook(self, name, throw=False, **args):
452 return hook.hook(self.ui, self, name, throw, **args)
461 return hook.hook(self.ui, self, name, throw, **args)
453
462
454 @unfilteredmethod
463 @unfilteredmethod
455 def _tag(self, names, node, message, local, user, date, extra={}):
464 def _tag(self, names, node, message, local, user, date, extra={}):
456 if isinstance(names, str):
465 if isinstance(names, str):
457 names = (names,)
466 names = (names,)
458
467
459 branches = self.branchmap()
468 branches = self.branchmap()
460 for name in names:
469 for name in names:
461 self.hook('pretag', throw=True, node=hex(node), tag=name,
470 self.hook('pretag', throw=True, node=hex(node), tag=name,
462 local=local)
471 local=local)
463 if name in branches:
472 if name in branches:
464 self.ui.warn(_("warning: tag %s conflicts with existing"
473 self.ui.warn(_("warning: tag %s conflicts with existing"
465 " branch name\n") % name)
474 " branch name\n") % name)
466
475
467 def writetags(fp, names, munge, prevtags):
476 def writetags(fp, names, munge, prevtags):
468 fp.seek(0, 2)
477 fp.seek(0, 2)
469 if prevtags and prevtags[-1] != '\n':
478 if prevtags and prevtags[-1] != '\n':
470 fp.write('\n')
479 fp.write('\n')
471 for name in names:
480 for name in names:
472 m = munge and munge(name) or name
481 m = munge and munge(name) or name
473 if (self._tagscache.tagtypes and
482 if (self._tagscache.tagtypes and
474 name in self._tagscache.tagtypes):
483 name in self._tagscache.tagtypes):
475 old = self.tags().get(name, nullid)
484 old = self.tags().get(name, nullid)
476 fp.write('%s %s\n' % (hex(old), m))
485 fp.write('%s %s\n' % (hex(old), m))
477 fp.write('%s %s\n' % (hex(node), m))
486 fp.write('%s %s\n' % (hex(node), m))
478 fp.close()
487 fp.close()
479
488
480 prevtags = ''
489 prevtags = ''
481 if local:
490 if local:
482 try:
491 try:
483 fp = self.opener('localtags', 'r+')
492 fp = self.opener('localtags', 'r+')
484 except IOError:
493 except IOError:
485 fp = self.opener('localtags', 'a')
494 fp = self.opener('localtags', 'a')
486 else:
495 else:
487 prevtags = fp.read()
496 prevtags = fp.read()
488
497
489 # local tags are stored in the current charset
498 # local tags are stored in the current charset
490 writetags(fp, names, None, prevtags)
499 writetags(fp, names, None, prevtags)
491 for name in names:
500 for name in names:
492 self.hook('tag', node=hex(node), tag=name, local=local)
501 self.hook('tag', node=hex(node), tag=name, local=local)
493 return
502 return
494
503
495 try:
504 try:
496 fp = self.wfile('.hgtags', 'rb+')
505 fp = self.wfile('.hgtags', 'rb+')
497 except IOError, e:
506 except IOError, e:
498 if e.errno != errno.ENOENT:
507 if e.errno != errno.ENOENT:
499 raise
508 raise
500 fp = self.wfile('.hgtags', 'ab')
509 fp = self.wfile('.hgtags', 'ab')
501 else:
510 else:
502 prevtags = fp.read()
511 prevtags = fp.read()
503
512
504 # committed tags are stored in UTF-8
513 # committed tags are stored in UTF-8
505 writetags(fp, names, encoding.fromlocal, prevtags)
514 writetags(fp, names, encoding.fromlocal, prevtags)
506
515
507 fp.close()
516 fp.close()
508
517
509 self.invalidatecaches()
518 self.invalidatecaches()
510
519
511 if '.hgtags' not in self.dirstate:
520 if '.hgtags' not in self.dirstate:
512 self[None].add(['.hgtags'])
521 self[None].add(['.hgtags'])
513
522
514 m = matchmod.exact(self.root, '', ['.hgtags'])
523 m = matchmod.exact(self.root, '', ['.hgtags'])
515 tagnode = self.commit(message, user, date, extra=extra, match=m)
524 tagnode = self.commit(message, user, date, extra=extra, match=m)
516
525
517 for name in names:
526 for name in names:
518 self.hook('tag', node=hex(node), tag=name, local=local)
527 self.hook('tag', node=hex(node), tag=name, local=local)
519
528
520 return tagnode
529 return tagnode
521
530
522 def tag(self, names, node, message, local, user, date):
531 def tag(self, names, node, message, local, user, date):
523 '''tag a revision with one or more symbolic names.
532 '''tag a revision with one or more symbolic names.
524
533
525 names is a list of strings or, when adding a single tag, names may be a
534 names is a list of strings or, when adding a single tag, names may be a
526 string.
535 string.
527
536
528 if local is True, the tags are stored in a per-repository file.
537 if local is True, the tags are stored in a per-repository file.
529 otherwise, they are stored in the .hgtags file, and a new
538 otherwise, they are stored in the .hgtags file, and a new
530 changeset is committed with the change.
539 changeset is committed with the change.
531
540
532 keyword arguments:
541 keyword arguments:
533
542
534 local: whether to store tags in non-version-controlled file
543 local: whether to store tags in non-version-controlled file
535 (default False)
544 (default False)
536
545
537 message: commit message to use if committing
546 message: commit message to use if committing
538
547
539 user: name of user to use if committing
548 user: name of user to use if committing
540
549
541 date: date tuple to use if committing'''
550 date: date tuple to use if committing'''
542
551
543 if not local:
552 if not local:
544 for x in self.status()[:5]:
553 for x in self.status()[:5]:
545 if '.hgtags' in x:
554 if '.hgtags' in x:
546 raise util.Abort(_('working copy of .hgtags is changed '
555 raise util.Abort(_('working copy of .hgtags is changed '
547 '(please commit .hgtags manually)'))
556 '(please commit .hgtags manually)'))
548
557
549 self.tags() # instantiate the cache
558 self.tags() # instantiate the cache
550 self._tag(names, node, message, local, user, date)
559 self._tag(names, node, message, local, user, date)
551
560
552 @filteredpropertycache
561 @filteredpropertycache
553 def _tagscache(self):
562 def _tagscache(self):
554 '''Returns a tagscache object that contains various tags related
563 '''Returns a tagscache object that contains various tags related
555 caches.'''
564 caches.'''
556
565
557 # This simplifies its cache management by having one decorated
566 # This simplifies its cache management by having one decorated
558 # function (this one) and the rest simply fetch things from it.
567 # function (this one) and the rest simply fetch things from it.
559 class tagscache(object):
568 class tagscache(object):
560 def __init__(self):
569 def __init__(self):
561 # These two define the set of tags for this repository. tags
570 # These two define the set of tags for this repository. tags
562 # maps tag name to node; tagtypes maps tag name to 'global' or
571 # maps tag name to node; tagtypes maps tag name to 'global' or
563 # 'local'. (Global tags are defined by .hgtags across all
572 # 'local'. (Global tags are defined by .hgtags across all
564 # heads, and local tags are defined in .hg/localtags.)
573 # heads, and local tags are defined in .hg/localtags.)
565 # They constitute the in-memory cache of tags.
574 # They constitute the in-memory cache of tags.
566 self.tags = self.tagtypes = None
575 self.tags = self.tagtypes = None
567
576
568 self.nodetagscache = self.tagslist = None
577 self.nodetagscache = self.tagslist = None
569
578
570 cache = tagscache()
579 cache = tagscache()
571 cache.tags, cache.tagtypes = self._findtags()
580 cache.tags, cache.tagtypes = self._findtags()
572
581
573 return cache
582 return cache
574
583
575 def tags(self):
584 def tags(self):
576 '''return a mapping of tag to node'''
585 '''return a mapping of tag to node'''
577 t = {}
586 t = {}
578 if self.changelog.filteredrevs:
587 if self.changelog.filteredrevs:
579 tags, tt = self._findtags()
588 tags, tt = self._findtags()
580 else:
589 else:
581 tags = self._tagscache.tags
590 tags = self._tagscache.tags
582 for k, v in tags.iteritems():
591 for k, v in tags.iteritems():
583 try:
592 try:
584 # ignore tags to unknown nodes
593 # ignore tags to unknown nodes
585 self.changelog.rev(v)
594 self.changelog.rev(v)
586 t[k] = v
595 t[k] = v
587 except (error.LookupError, ValueError):
596 except (error.LookupError, ValueError):
588 pass
597 pass
589 return t
598 return t
590
599
591 def _findtags(self):
600 def _findtags(self):
592 '''Do the hard work of finding tags. Return a pair of dicts
601 '''Do the hard work of finding tags. Return a pair of dicts
593 (tags, tagtypes) where tags maps tag name to node, and tagtypes
602 (tags, tagtypes) where tags maps tag name to node, and tagtypes
594 maps tag name to a string like \'global\' or \'local\'.
603 maps tag name to a string like \'global\' or \'local\'.
595 Subclasses or extensions are free to add their own tags, but
604 Subclasses or extensions are free to add their own tags, but
596 should be aware that the returned dicts will be retained for the
605 should be aware that the returned dicts will be retained for the
597 duration of the localrepo object.'''
606 duration of the localrepo object.'''
598
607
599 # XXX what tagtype should subclasses/extensions use? Currently
608 # XXX what tagtype should subclasses/extensions use? Currently
600 # mq and bookmarks add tags, but do not set the tagtype at all.
609 # mq and bookmarks add tags, but do not set the tagtype at all.
601 # Should each extension invent its own tag type? Should there
610 # Should each extension invent its own tag type? Should there
602 # be one tagtype for all such "virtual" tags? Or is the status
611 # be one tagtype for all such "virtual" tags? Or is the status
603 # quo fine?
612 # quo fine?
604
613
605 alltags = {} # map tag name to (node, hist)
614 alltags = {} # map tag name to (node, hist)
606 tagtypes = {}
615 tagtypes = {}
607
616
608 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
617 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
609 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
618 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
610
619
611 # Build the return dicts. Have to re-encode tag names because
620 # Build the return dicts. Have to re-encode tag names because
612 # the tags module always uses UTF-8 (in order not to lose info
621 # the tags module always uses UTF-8 (in order not to lose info
613 # writing to the cache), but the rest of Mercurial wants them in
622 # writing to the cache), but the rest of Mercurial wants them in
614 # local encoding.
623 # local encoding.
615 tags = {}
624 tags = {}
616 for (name, (node, hist)) in alltags.iteritems():
625 for (name, (node, hist)) in alltags.iteritems():
617 if node != nullid:
626 if node != nullid:
618 tags[encoding.tolocal(name)] = node
627 tags[encoding.tolocal(name)] = node
619 tags['tip'] = self.changelog.tip()
628 tags['tip'] = self.changelog.tip()
620 tagtypes = dict([(encoding.tolocal(name), value)
629 tagtypes = dict([(encoding.tolocal(name), value)
621 for (name, value) in tagtypes.iteritems()])
630 for (name, value) in tagtypes.iteritems()])
622 return (tags, tagtypes)
631 return (tags, tagtypes)
623
632
624 def tagtype(self, tagname):
633 def tagtype(self, tagname):
625 '''
634 '''
626 return the type of the given tag. result can be:
635 return the type of the given tag. result can be:
627
636
628 'local' : a local tag
637 'local' : a local tag
629 'global' : a global tag
638 'global' : a global tag
630 None : tag does not exist
639 None : tag does not exist
631 '''
640 '''
632
641
633 return self._tagscache.tagtypes.get(tagname)
642 return self._tagscache.tagtypes.get(tagname)
634
643
635 def tagslist(self):
644 def tagslist(self):
636 '''return a list of tags ordered by revision'''
645 '''return a list of tags ordered by revision'''
637 if not self._tagscache.tagslist:
646 if not self._tagscache.tagslist:
638 l = []
647 l = []
639 for t, n in self.tags().iteritems():
648 for t, n in self.tags().iteritems():
640 r = self.changelog.rev(n)
649 r = self.changelog.rev(n)
641 l.append((r, t, n))
650 l.append((r, t, n))
642 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
651 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
643
652
644 return self._tagscache.tagslist
653 return self._tagscache.tagslist
645
654
646 def nodetags(self, node):
655 def nodetags(self, node):
647 '''return the tags associated with a node'''
656 '''return the tags associated with a node'''
648 if not self._tagscache.nodetagscache:
657 if not self._tagscache.nodetagscache:
649 nodetagscache = {}
658 nodetagscache = {}
650 for t, n in self._tagscache.tags.iteritems():
659 for t, n in self._tagscache.tags.iteritems():
651 nodetagscache.setdefault(n, []).append(t)
660 nodetagscache.setdefault(n, []).append(t)
652 for tags in nodetagscache.itervalues():
661 for tags in nodetagscache.itervalues():
653 tags.sort()
662 tags.sort()
654 self._tagscache.nodetagscache = nodetagscache
663 self._tagscache.nodetagscache = nodetagscache
655 return self._tagscache.nodetagscache.get(node, [])
664 return self._tagscache.nodetagscache.get(node, [])
656
665
657 def nodebookmarks(self, node):
666 def nodebookmarks(self, node):
658 marks = []
667 marks = []
659 for bookmark, n in self._bookmarks.iteritems():
668 for bookmark, n in self._bookmarks.iteritems():
660 if n == node:
669 if n == node:
661 marks.append(bookmark)
670 marks.append(bookmark)
662 return sorted(marks)
671 return sorted(marks)
663
672
664 def branchmap(self):
673 def branchmap(self):
665 '''returns a dictionary {branch: [branchheads]} with branchheads
674 '''returns a dictionary {branch: [branchheads]} with branchheads
666 ordered by increasing revision number'''
675 ordered by increasing revision number'''
667 branchmap.updatecache(self)
676 branchmap.updatecache(self)
668 return self._branchcaches[self.filtername]
677 return self._branchcaches[self.filtername]
669
678
670 def branchtip(self, branch):
679 def branchtip(self, branch):
671 '''return the tip node for a given branch'''
680 '''return the tip node for a given branch'''
672 try:
681 try:
673 return self.branchmap().branchtip(branch)
682 return self.branchmap().branchtip(branch)
674 except KeyError:
683 except KeyError:
675 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
684 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
676
685
677 def lookup(self, key):
686 def lookup(self, key):
678 return self[key].node()
687 return self[key].node()
679
688
680 def lookupbranch(self, key, remote=None):
689 def lookupbranch(self, key, remote=None):
681 repo = remote or self
690 repo = remote or self
682 if key in repo.branchmap():
691 if key in repo.branchmap():
683 return key
692 return key
684
693
685 repo = (remote and remote.local()) and remote or self
694 repo = (remote and remote.local()) and remote or self
686 return repo[key].branch()
695 return repo[key].branch()
687
696
688 def known(self, nodes):
697 def known(self, nodes):
689 nm = self.changelog.nodemap
698 nm = self.changelog.nodemap
690 pc = self._phasecache
699 pc = self._phasecache
691 result = []
700 result = []
692 for n in nodes:
701 for n in nodes:
693 r = nm.get(n)
702 r = nm.get(n)
694 resp = not (r is None or pc.phase(self, r) >= phases.secret)
703 resp = not (r is None or pc.phase(self, r) >= phases.secret)
695 result.append(resp)
704 result.append(resp)
696 return result
705 return result
697
706
698 def local(self):
707 def local(self):
699 return self
708 return self
700
709
701 def cancopy(self):
710 def cancopy(self):
702 # so statichttprepo's override of local() works
711 # so statichttprepo's override of local() works
703 if not self.local():
712 if not self.local():
704 return False
713 return False
705 if not self.ui.configbool('phases', 'publish', True):
714 if not self.ui.configbool('phases', 'publish', True):
706 return True
715 return True
707 # if publishing we can't copy if there is filtered content
716 # if publishing we can't copy if there is filtered content
708 return not self.filtered('visible').changelog.filteredrevs
717 return not self.filtered('visible').changelog.filteredrevs
709
718
710 def join(self, f):
719 def join(self, f):
711 return os.path.join(self.path, f)
720 return os.path.join(self.path, f)
712
721
713 def wjoin(self, f):
722 def wjoin(self, f):
714 return os.path.join(self.root, f)
723 return os.path.join(self.root, f)
715
724
716 def file(self, f):
725 def file(self, f):
717 if f[0] == '/':
726 if f[0] == '/':
718 f = f[1:]
727 f = f[1:]
719 return filelog.filelog(self.sopener, f)
728 return filelog.filelog(self.sopener, f)
720
729
721 def changectx(self, changeid):
730 def changectx(self, changeid):
722 return self[changeid]
731 return self[changeid]
723
732
724 def parents(self, changeid=None):
733 def parents(self, changeid=None):
725 '''get list of changectxs for parents of changeid'''
734 '''get list of changectxs for parents of changeid'''
726 return self[changeid].parents()
735 return self[changeid].parents()
727
736
728 def setparents(self, p1, p2=nullid):
737 def setparents(self, p1, p2=nullid):
729 copies = self.dirstate.setparents(p1, p2)
738 copies = self.dirstate.setparents(p1, p2)
730 pctx = self[p1]
739 pctx = self[p1]
731 if copies:
740 if copies:
732 # Adjust copy records, the dirstate cannot do it, it
741 # Adjust copy records, the dirstate cannot do it, it
733 # requires access to parents manifests. Preserve them
742 # requires access to parents manifests. Preserve them
734 # only for entries added to first parent.
743 # only for entries added to first parent.
735 for f in copies:
744 for f in copies:
736 if f not in pctx and copies[f] in pctx:
745 if f not in pctx and copies[f] in pctx:
737 self.dirstate.copy(copies[f], f)
746 self.dirstate.copy(copies[f], f)
738 if p2 == nullid:
747 if p2 == nullid:
739 for f, s in sorted(self.dirstate.copies().items()):
748 for f, s in sorted(self.dirstate.copies().items()):
740 if f not in pctx and s not in pctx:
749 if f not in pctx and s not in pctx:
741 self.dirstate.copy(None, f)
750 self.dirstate.copy(None, f)
742
751
743 def filectx(self, path, changeid=None, fileid=None):
752 def filectx(self, path, changeid=None, fileid=None):
744 """changeid can be a changeset revision, node, or tag.
753 """changeid can be a changeset revision, node, or tag.
745 fileid can be a file revision or node."""
754 fileid can be a file revision or node."""
746 return context.filectx(self, path, changeid, fileid)
755 return context.filectx(self, path, changeid, fileid)
747
756
748 def getcwd(self):
757 def getcwd(self):
749 return self.dirstate.getcwd()
758 return self.dirstate.getcwd()
750
759
751 def pathto(self, f, cwd=None):
760 def pathto(self, f, cwd=None):
752 return self.dirstate.pathto(f, cwd)
761 return self.dirstate.pathto(f, cwd)
753
762
754 def wfile(self, f, mode='r'):
763 def wfile(self, f, mode='r'):
755 return self.wopener(f, mode)
764 return self.wopener(f, mode)
756
765
757 def _link(self, f):
766 def _link(self, f):
758 return self.wvfs.islink(f)
767 return self.wvfs.islink(f)
759
768
760 def _loadfilter(self, filter):
769 def _loadfilter(self, filter):
761 if filter not in self.filterpats:
770 if filter not in self.filterpats:
762 l = []
771 l = []
763 for pat, cmd in self.ui.configitems(filter):
772 for pat, cmd in self.ui.configitems(filter):
764 if cmd == '!':
773 if cmd == '!':
765 continue
774 continue
766 mf = matchmod.match(self.root, '', [pat])
775 mf = matchmod.match(self.root, '', [pat])
767 fn = None
776 fn = None
768 params = cmd
777 params = cmd
769 for name, filterfn in self._datafilters.iteritems():
778 for name, filterfn in self._datafilters.iteritems():
770 if cmd.startswith(name):
779 if cmd.startswith(name):
771 fn = filterfn
780 fn = filterfn
772 params = cmd[len(name):].lstrip()
781 params = cmd[len(name):].lstrip()
773 break
782 break
774 if not fn:
783 if not fn:
775 fn = lambda s, c, **kwargs: util.filter(s, c)
784 fn = lambda s, c, **kwargs: util.filter(s, c)
776 # Wrap old filters not supporting keyword arguments
785 # Wrap old filters not supporting keyword arguments
777 if not inspect.getargspec(fn)[2]:
786 if not inspect.getargspec(fn)[2]:
778 oldfn = fn
787 oldfn = fn
779 fn = lambda s, c, **kwargs: oldfn(s, c)
788 fn = lambda s, c, **kwargs: oldfn(s, c)
780 l.append((mf, fn, params))
789 l.append((mf, fn, params))
781 self.filterpats[filter] = l
790 self.filterpats[filter] = l
782 return self.filterpats[filter]
791 return self.filterpats[filter]
783
792
784 def _filter(self, filterpats, filename, data):
793 def _filter(self, filterpats, filename, data):
785 for mf, fn, cmd in filterpats:
794 for mf, fn, cmd in filterpats:
786 if mf(filename):
795 if mf(filename):
787 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
796 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
788 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
797 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
789 break
798 break
790
799
791 return data
800 return data
792
801
793 @unfilteredpropertycache
802 @unfilteredpropertycache
794 def _encodefilterpats(self):
803 def _encodefilterpats(self):
795 return self._loadfilter('encode')
804 return self._loadfilter('encode')
796
805
797 @unfilteredpropertycache
806 @unfilteredpropertycache
798 def _decodefilterpats(self):
807 def _decodefilterpats(self):
799 return self._loadfilter('decode')
808 return self._loadfilter('decode')
800
809
801 def adddatafilter(self, name, filter):
810 def adddatafilter(self, name, filter):
802 self._datafilters[name] = filter
811 self._datafilters[name] = filter
803
812
804 def wread(self, filename):
813 def wread(self, filename):
805 if self._link(filename):
814 if self._link(filename):
806 data = self.wvfs.readlink(filename)
815 data = self.wvfs.readlink(filename)
807 else:
816 else:
808 data = self.wopener.read(filename)
817 data = self.wopener.read(filename)
809 return self._filter(self._encodefilterpats, filename, data)
818 return self._filter(self._encodefilterpats, filename, data)
810
819
811 def wwrite(self, filename, data, flags):
820 def wwrite(self, filename, data, flags):
812 data = self._filter(self._decodefilterpats, filename, data)
821 data = self._filter(self._decodefilterpats, filename, data)
813 if 'l' in flags:
822 if 'l' in flags:
814 self.wopener.symlink(data, filename)
823 self.wopener.symlink(data, filename)
815 else:
824 else:
816 self.wopener.write(filename, data)
825 self.wopener.write(filename, data)
817 if 'x' in flags:
826 if 'x' in flags:
818 self.wvfs.setflags(filename, False, True)
827 self.wvfs.setflags(filename, False, True)
819
828
820 def wwritedata(self, filename, data):
829 def wwritedata(self, filename, data):
821 return self._filter(self._decodefilterpats, filename, data)
830 return self._filter(self._decodefilterpats, filename, data)
822
831
823 def transaction(self, desc, report=None):
832 def transaction(self, desc, report=None):
824 tr = self._transref and self._transref() or None
833 tr = self._transref and self._transref() or None
825 if tr and tr.running():
834 if tr and tr.running():
826 return tr.nest()
835 return tr.nest()
827
836
828 # abort here if the journal already exists
837 # abort here if the journal already exists
829 if self.svfs.exists("journal"):
838 if self.svfs.exists("journal"):
830 raise error.RepoError(
839 raise error.RepoError(
831 _("abandoned transaction found - run hg recover"))
840 _("abandoned transaction found - run hg recover"))
832
841
833 def onclose():
842 def onclose():
834 self.store.write(tr)
843 self.store.write(tr)
835
844
836 self._writejournal(desc)
845 self._writejournal(desc)
837 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
846 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
838 rp = report and report or self.ui.warn
847 rp = report and report or self.ui.warn
839 tr = transaction.transaction(rp, self.sopener,
848 tr = transaction.transaction(rp, self.sopener,
840 "journal",
849 "journal",
841 aftertrans(renames),
850 aftertrans(renames),
842 self.store.createmode,
851 self.store.createmode,
843 onclose)
852 onclose)
844 self._transref = weakref.ref(tr)
853 self._transref = weakref.ref(tr)
845 return tr
854 return tr
846
855
847 def _journalfiles(self):
856 def _journalfiles(self):
848 return ((self.svfs, 'journal'),
857 return ((self.svfs, 'journal'),
849 (self.vfs, 'journal.dirstate'),
858 (self.vfs, 'journal.dirstate'),
850 (self.vfs, 'journal.branch'),
859 (self.vfs, 'journal.branch'),
851 (self.vfs, 'journal.desc'),
860 (self.vfs, 'journal.desc'),
852 (self.vfs, 'journal.bookmarks'),
861 (self.vfs, 'journal.bookmarks'),
853 (self.svfs, 'journal.phaseroots'))
862 (self.svfs, 'journal.phaseroots'))
854
863
855 def undofiles(self):
864 def undofiles(self):
856 return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
865 return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
857
866
858 def _writejournal(self, desc):
867 def _writejournal(self, desc):
859 self.opener.write("journal.dirstate",
868 self.opener.write("journal.dirstate",
860 self.opener.tryread("dirstate"))
869 self.opener.tryread("dirstate"))
861 self.opener.write("journal.branch",
870 self.opener.write("journal.branch",
862 encoding.fromlocal(self.dirstate.branch()))
871 encoding.fromlocal(self.dirstate.branch()))
863 self.opener.write("journal.desc",
872 self.opener.write("journal.desc",
864 "%d\n%s\n" % (len(self), desc))
873 "%d\n%s\n" % (len(self), desc))
865 self.opener.write("journal.bookmarks",
874 self.opener.write("journal.bookmarks",
866 self.opener.tryread("bookmarks"))
875 self.opener.tryread("bookmarks"))
867 self.sopener.write("journal.phaseroots",
876 self.sopener.write("journal.phaseroots",
868 self.sopener.tryread("phaseroots"))
877 self.sopener.tryread("phaseroots"))
869
878
870 def recover(self):
879 def recover(self):
871 lock = self.lock()
880 lock = self.lock()
872 try:
881 try:
873 if self.svfs.exists("journal"):
882 if self.svfs.exists("journal"):
874 self.ui.status(_("rolling back interrupted transaction\n"))
883 self.ui.status(_("rolling back interrupted transaction\n"))
875 transaction.rollback(self.sopener, "journal",
884 transaction.rollback(self.sopener, "journal",
876 self.ui.warn)
885 self.ui.warn)
877 self.invalidate()
886 self.invalidate()
878 return True
887 return True
879 else:
888 else:
880 self.ui.warn(_("no interrupted transaction available\n"))
889 self.ui.warn(_("no interrupted transaction available\n"))
881 return False
890 return False
882 finally:
891 finally:
883 lock.release()
892 lock.release()
884
893
885 def rollback(self, dryrun=False, force=False):
894 def rollback(self, dryrun=False, force=False):
886 wlock = lock = None
895 wlock = lock = None
887 try:
896 try:
888 wlock = self.wlock()
897 wlock = self.wlock()
889 lock = self.lock()
898 lock = self.lock()
890 if self.svfs.exists("undo"):
899 if self.svfs.exists("undo"):
891 return self._rollback(dryrun, force)
900 return self._rollback(dryrun, force)
892 else:
901 else:
893 self.ui.warn(_("no rollback information available\n"))
902 self.ui.warn(_("no rollback information available\n"))
894 return 1
903 return 1
895 finally:
904 finally:
896 release(lock, wlock)
905 release(lock, wlock)
897
906
898 @unfilteredmethod # Until we get smarter cache management
907 @unfilteredmethod # Until we get smarter cache management
899 def _rollback(self, dryrun, force):
908 def _rollback(self, dryrun, force):
900 ui = self.ui
909 ui = self.ui
901 try:
910 try:
902 args = self.opener.read('undo.desc').splitlines()
911 args = self.opener.read('undo.desc').splitlines()
903 (oldlen, desc, detail) = (int(args[0]), args[1], None)
912 (oldlen, desc, detail) = (int(args[0]), args[1], None)
904 if len(args) >= 3:
913 if len(args) >= 3:
905 detail = args[2]
914 detail = args[2]
906 oldtip = oldlen - 1
915 oldtip = oldlen - 1
907
916
908 if detail and ui.verbose:
917 if detail and ui.verbose:
909 msg = (_('repository tip rolled back to revision %s'
918 msg = (_('repository tip rolled back to revision %s'
910 ' (undo %s: %s)\n')
919 ' (undo %s: %s)\n')
911 % (oldtip, desc, detail))
920 % (oldtip, desc, detail))
912 else:
921 else:
913 msg = (_('repository tip rolled back to revision %s'
922 msg = (_('repository tip rolled back to revision %s'
914 ' (undo %s)\n')
923 ' (undo %s)\n')
915 % (oldtip, desc))
924 % (oldtip, desc))
916 except IOError:
925 except IOError:
917 msg = _('rolling back unknown transaction\n')
926 msg = _('rolling back unknown transaction\n')
918 desc = None
927 desc = None
919
928
920 if not force and self['.'] != self['tip'] and desc == 'commit':
929 if not force and self['.'] != self['tip'] and desc == 'commit':
921 raise util.Abort(
930 raise util.Abort(
922 _('rollback of last commit while not checked out '
931 _('rollback of last commit while not checked out '
923 'may lose data'), hint=_('use -f to force'))
932 'may lose data'), hint=_('use -f to force'))
924
933
925 ui.status(msg)
934 ui.status(msg)
926 if dryrun:
935 if dryrun:
927 return 0
936 return 0
928
937
929 parents = self.dirstate.parents()
938 parents = self.dirstate.parents()
930 self.destroying()
939 self.destroying()
931 transaction.rollback(self.sopener, 'undo', ui.warn)
940 transaction.rollback(self.sopener, 'undo', ui.warn)
932 if self.vfs.exists('undo.bookmarks'):
941 if self.vfs.exists('undo.bookmarks'):
933 self.vfs.rename('undo.bookmarks', 'bookmarks')
942 self.vfs.rename('undo.bookmarks', 'bookmarks')
934 if self.svfs.exists('undo.phaseroots'):
943 if self.svfs.exists('undo.phaseroots'):
935 self.svfs.rename('undo.phaseroots', 'phaseroots')
944 self.svfs.rename('undo.phaseroots', 'phaseroots')
936 self.invalidate()
945 self.invalidate()
937
946
938 parentgone = (parents[0] not in self.changelog.nodemap or
947 parentgone = (parents[0] not in self.changelog.nodemap or
939 parents[1] not in self.changelog.nodemap)
948 parents[1] not in self.changelog.nodemap)
940 if parentgone:
949 if parentgone:
941 self.vfs.rename('undo.dirstate', 'dirstate')
950 self.vfs.rename('undo.dirstate', 'dirstate')
942 try:
951 try:
943 branch = self.opener.read('undo.branch')
952 branch = self.opener.read('undo.branch')
944 self.dirstate.setbranch(encoding.tolocal(branch))
953 self.dirstate.setbranch(encoding.tolocal(branch))
945 except IOError:
954 except IOError:
946 ui.warn(_('named branch could not be reset: '
955 ui.warn(_('named branch could not be reset: '
947 'current branch is still \'%s\'\n')
956 'current branch is still \'%s\'\n')
948 % self.dirstate.branch())
957 % self.dirstate.branch())
949
958
950 self.dirstate.invalidate()
959 self.dirstate.invalidate()
951 parents = tuple([p.rev() for p in self.parents()])
960 parents = tuple([p.rev() for p in self.parents()])
952 if len(parents) > 1:
961 if len(parents) > 1:
953 ui.status(_('working directory now based on '
962 ui.status(_('working directory now based on '
954 'revisions %d and %d\n') % parents)
963 'revisions %d and %d\n') % parents)
955 else:
964 else:
956 ui.status(_('working directory now based on '
965 ui.status(_('working directory now based on '
957 'revision %d\n') % parents)
966 'revision %d\n') % parents)
958 # TODO: if we know which new heads may result from this rollback, pass
967 # TODO: if we know which new heads may result from this rollback, pass
959 # them to destroy(), which will prevent the branchhead cache from being
968 # them to destroy(), which will prevent the branchhead cache from being
960 # invalidated.
969 # invalidated.
961 self.destroyed()
970 self.destroyed()
962 return 0
971 return 0
963
972
964 def invalidatecaches(self):
973 def invalidatecaches(self):
965
974
966 if '_tagscache' in vars(self):
975 if '_tagscache' in vars(self):
967 # can't use delattr on proxy
976 # can't use delattr on proxy
968 del self.__dict__['_tagscache']
977 del self.__dict__['_tagscache']
969
978
970 self.unfiltered()._branchcaches.clear()
979 self.unfiltered()._branchcaches.clear()
971 self.invalidatevolatilesets()
980 self.invalidatevolatilesets()
972
981
973 def invalidatevolatilesets(self):
982 def invalidatevolatilesets(self):
974 self.filteredrevcache.clear()
983 self.filteredrevcache.clear()
975 obsolete.clearobscaches(self)
984 obsolete.clearobscaches(self)
976
985
977 def invalidatedirstate(self):
986 def invalidatedirstate(self):
978 '''Invalidates the dirstate, causing the next call to dirstate
987 '''Invalidates the dirstate, causing the next call to dirstate
979 to check if it was modified since the last time it was read,
988 to check if it was modified since the last time it was read,
980 rereading it if it has.
989 rereading it if it has.
981
990
982 This is different to dirstate.invalidate() that it doesn't always
991 This is different to dirstate.invalidate() that it doesn't always
983 rereads the dirstate. Use dirstate.invalidate() if you want to
992 rereads the dirstate. Use dirstate.invalidate() if you want to
984 explicitly read the dirstate again (i.e. restoring it to a previous
993 explicitly read the dirstate again (i.e. restoring it to a previous
985 known good state).'''
994 known good state).'''
986 if hasunfilteredcache(self, 'dirstate'):
995 if hasunfilteredcache(self, 'dirstate'):
987 for k in self.dirstate._filecache:
996 for k in self.dirstate._filecache:
988 try:
997 try:
989 delattr(self.dirstate, k)
998 delattr(self.dirstate, k)
990 except AttributeError:
999 except AttributeError:
991 pass
1000 pass
992 delattr(self.unfiltered(), 'dirstate')
1001 delattr(self.unfiltered(), 'dirstate')
993
1002
994 def invalidate(self):
1003 def invalidate(self):
995 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1004 unfiltered = self.unfiltered() # all file caches are stored unfiltered
996 for k in self._filecache:
1005 for k in self._filecache:
997 # dirstate is invalidated separately in invalidatedirstate()
1006 # dirstate is invalidated separately in invalidatedirstate()
998 if k == 'dirstate':
1007 if k == 'dirstate':
999 continue
1008 continue
1000
1009
1001 try:
1010 try:
1002 delattr(unfiltered, k)
1011 delattr(unfiltered, k)
1003 except AttributeError:
1012 except AttributeError:
1004 pass
1013 pass
1005 self.invalidatecaches()
1014 self.invalidatecaches()
1006 self.store.invalidatecaches()
1015 self.store.invalidatecaches()
1007
1016
1008 def invalidateall(self):
1017 def invalidateall(self):
1009 '''Fully invalidates both store and non-store parts, causing the
1018 '''Fully invalidates both store and non-store parts, causing the
1010 subsequent operation to reread any outside changes.'''
1019 subsequent operation to reread any outside changes.'''
1011 # extension should hook this to invalidate its caches
1020 # extension should hook this to invalidate its caches
1012 self.invalidate()
1021 self.invalidate()
1013 self.invalidatedirstate()
1022 self.invalidatedirstate()
1014
1023
1015 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1024 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1016 try:
1025 try:
1017 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1026 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1018 except error.LockHeld, inst:
1027 except error.LockHeld, inst:
1019 if not wait:
1028 if not wait:
1020 raise
1029 raise
1021 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1030 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1022 (desc, inst.locker))
1031 (desc, inst.locker))
1023 # default to 600 seconds timeout
1032 # default to 600 seconds timeout
1024 l = lockmod.lock(vfs, lockname,
1033 l = lockmod.lock(vfs, lockname,
1025 int(self.ui.config("ui", "timeout", "600")),
1034 int(self.ui.config("ui", "timeout", "600")),
1026 releasefn, desc=desc)
1035 releasefn, desc=desc)
1027 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1036 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1028 if acquirefn:
1037 if acquirefn:
1029 acquirefn()
1038 acquirefn()
1030 return l
1039 return l
1031
1040
1032 def _afterlock(self, callback):
1041 def _afterlock(self, callback):
1033 """add a callback to the current repository lock.
1042 """add a callback to the current repository lock.
1034
1043
1035 The callback will be executed on lock release."""
1044 The callback will be executed on lock release."""
1036 l = self._lockref and self._lockref()
1045 l = self._lockref and self._lockref()
1037 if l:
1046 if l:
1038 l.postrelease.append(callback)
1047 l.postrelease.append(callback)
1039 else:
1048 else:
1040 callback()
1049 callback()
1041
1050
1042 def lock(self, wait=True):
1051 def lock(self, wait=True):
1043 '''Lock the repository store (.hg/store) and return a weak reference
1052 '''Lock the repository store (.hg/store) and return a weak reference
1044 to the lock. Use this before modifying the store (e.g. committing or
1053 to the lock. Use this before modifying the store (e.g. committing or
1045 stripping). If you are opening a transaction, get a lock as well.)'''
1054 stripping). If you are opening a transaction, get a lock as well.)'''
1046 l = self._lockref and self._lockref()
1055 l = self._lockref and self._lockref()
1047 if l is not None and l.held:
1056 if l is not None and l.held:
1048 l.lock()
1057 l.lock()
1049 return l
1058 return l
1050
1059
1051 def unlock():
1060 def unlock():
1052 if hasunfilteredcache(self, '_phasecache'):
1061 if hasunfilteredcache(self, '_phasecache'):
1053 self._phasecache.write()
1062 self._phasecache.write()
1054 for k, ce in self._filecache.items():
1063 for k, ce in self._filecache.items():
1055 if k == 'dirstate' or k not in self.__dict__:
1064 if k == 'dirstate' or k not in self.__dict__:
1056 continue
1065 continue
1057 ce.refresh()
1066 ce.refresh()
1058
1067
1059 l = self._lock(self.svfs, "lock", wait, unlock,
1068 l = self._lock(self.svfs, "lock", wait, unlock,
1060 self.invalidate, _('repository %s') % self.origroot)
1069 self.invalidate, _('repository %s') % self.origroot)
1061 self._lockref = weakref.ref(l)
1070 self._lockref = weakref.ref(l)
1062 return l
1071 return l
1063
1072
1064 def wlock(self, wait=True):
1073 def wlock(self, wait=True):
1065 '''Lock the non-store parts of the repository (everything under
1074 '''Lock the non-store parts of the repository (everything under
1066 .hg except .hg/store) and return a weak reference to the lock.
1075 .hg except .hg/store) and return a weak reference to the lock.
1067 Use this before modifying files in .hg.'''
1076 Use this before modifying files in .hg.'''
1068 l = self._wlockref and self._wlockref()
1077 l = self._wlockref and self._wlockref()
1069 if l is not None and l.held:
1078 if l is not None and l.held:
1070 l.lock()
1079 l.lock()
1071 return l
1080 return l
1072
1081
1073 def unlock():
1082 def unlock():
1074 self.dirstate.write()
1083 self.dirstate.write()
1075 self._filecache['dirstate'].refresh()
1084 self._filecache['dirstate'].refresh()
1076
1085
1077 l = self._lock(self.vfs, "wlock", wait, unlock,
1086 l = self._lock(self.vfs, "wlock", wait, unlock,
1078 self.invalidatedirstate, _('working directory of %s') %
1087 self.invalidatedirstate, _('working directory of %s') %
1079 self.origroot)
1088 self.origroot)
1080 self._wlockref = weakref.ref(l)
1089 self._wlockref = weakref.ref(l)
1081 return l
1090 return l
1082
1091
1083 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1092 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1084 """
1093 """
1085 commit an individual file as part of a larger transaction
1094 commit an individual file as part of a larger transaction
1086 """
1095 """
1087
1096
1088 fname = fctx.path()
1097 fname = fctx.path()
1089 text = fctx.data()
1098 text = fctx.data()
1090 flog = self.file(fname)
1099 flog = self.file(fname)
1091 fparent1 = manifest1.get(fname, nullid)
1100 fparent1 = manifest1.get(fname, nullid)
1092 fparent2 = fparent2o = manifest2.get(fname, nullid)
1101 fparent2 = fparent2o = manifest2.get(fname, nullid)
1093
1102
1094 meta = {}
1103 meta = {}
1095 copy = fctx.renamed()
1104 copy = fctx.renamed()
1096 if copy and copy[0] != fname:
1105 if copy and copy[0] != fname:
1097 # Mark the new revision of this file as a copy of another
1106 # Mark the new revision of this file as a copy of another
1098 # file. This copy data will effectively act as a parent
1107 # file. This copy data will effectively act as a parent
1099 # of this new revision. If this is a merge, the first
1108 # of this new revision. If this is a merge, the first
1100 # parent will be the nullid (meaning "look up the copy data")
1109 # parent will be the nullid (meaning "look up the copy data")
1101 # and the second one will be the other parent. For example:
1110 # and the second one will be the other parent. For example:
1102 #
1111 #
1103 # 0 --- 1 --- 3 rev1 changes file foo
1112 # 0 --- 1 --- 3 rev1 changes file foo
1104 # \ / rev2 renames foo to bar and changes it
1113 # \ / rev2 renames foo to bar and changes it
1105 # \- 2 -/ rev3 should have bar with all changes and
1114 # \- 2 -/ rev3 should have bar with all changes and
1106 # should record that bar descends from
1115 # should record that bar descends from
1107 # bar in rev2 and foo in rev1
1116 # bar in rev2 and foo in rev1
1108 #
1117 #
1109 # this allows this merge to succeed:
1118 # this allows this merge to succeed:
1110 #
1119 #
1111 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1120 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1112 # \ / merging rev3 and rev4 should use bar@rev2
1121 # \ / merging rev3 and rev4 should use bar@rev2
1113 # \- 2 --- 4 as the merge base
1122 # \- 2 --- 4 as the merge base
1114 #
1123 #
1115
1124
1116 cfname = copy[0]
1125 cfname = copy[0]
1117 crev = manifest1.get(cfname)
1126 crev = manifest1.get(cfname)
1118 newfparent = fparent2
1127 newfparent = fparent2
1119
1128
1120 if manifest2: # branch merge
1129 if manifest2: # branch merge
1121 if fparent2 == nullid or crev is None: # copied on remote side
1130 if fparent2 == nullid or crev is None: # copied on remote side
1122 if cfname in manifest2:
1131 if cfname in manifest2:
1123 crev = manifest2[cfname]
1132 crev = manifest2[cfname]
1124 newfparent = fparent1
1133 newfparent = fparent1
1125
1134
1126 # find source in nearest ancestor if we've lost track
1135 # find source in nearest ancestor if we've lost track
1127 if not crev:
1136 if not crev:
1128 self.ui.debug(" %s: searching for copy revision for %s\n" %
1137 self.ui.debug(" %s: searching for copy revision for %s\n" %
1129 (fname, cfname))
1138 (fname, cfname))
1130 for ancestor in self[None].ancestors():
1139 for ancestor in self[None].ancestors():
1131 if cfname in ancestor:
1140 if cfname in ancestor:
1132 crev = ancestor[cfname].filenode()
1141 crev = ancestor[cfname].filenode()
1133 break
1142 break
1134
1143
1135 if crev:
1144 if crev:
1136 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1145 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1137 meta["copy"] = cfname
1146 meta["copy"] = cfname
1138 meta["copyrev"] = hex(crev)
1147 meta["copyrev"] = hex(crev)
1139 fparent1, fparent2 = nullid, newfparent
1148 fparent1, fparent2 = nullid, newfparent
1140 else:
1149 else:
1141 self.ui.warn(_("warning: can't find ancestor for '%s' "
1150 self.ui.warn(_("warning: can't find ancestor for '%s' "
1142 "copied from '%s'!\n") % (fname, cfname))
1151 "copied from '%s'!\n") % (fname, cfname))
1143
1152
1144 elif fparent1 == nullid:
1153 elif fparent1 == nullid:
1145 fparent1, fparent2 = fparent2, nullid
1154 fparent1, fparent2 = fparent2, nullid
1146 elif fparent2 != nullid:
1155 elif fparent2 != nullid:
1147 # is one parent an ancestor of the other?
1156 # is one parent an ancestor of the other?
1148 fparentancestor = flog.ancestor(fparent1, fparent2)
1157 fparentancestor = flog.ancestor(fparent1, fparent2)
1149 if fparentancestor == fparent1:
1158 if fparentancestor == fparent1:
1150 fparent1, fparent2 = fparent2, nullid
1159 fparent1, fparent2 = fparent2, nullid
1151 elif fparentancestor == fparent2:
1160 elif fparentancestor == fparent2:
1152 fparent2 = nullid
1161 fparent2 = nullid
1153
1162
1154 # is the file changed?
1163 # is the file changed?
1155 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1164 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1156 changelist.append(fname)
1165 changelist.append(fname)
1157 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1166 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1158
1167
1159 # are just the flags changed during merge?
1168 # are just the flags changed during merge?
1160 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1169 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1161 changelist.append(fname)
1170 changelist.append(fname)
1162
1171
1163 return fparent1
1172 return fparent1
1164
1173
1165 @unfilteredmethod
1174 @unfilteredmethod
1166 def commit(self, text="", user=None, date=None, match=None, force=False,
1175 def commit(self, text="", user=None, date=None, match=None, force=False,
1167 editor=False, extra={}):
1176 editor=False, extra={}):
1168 """Add a new revision to current repository.
1177 """Add a new revision to current repository.
1169
1178
1170 Revision information is gathered from the working directory,
1179 Revision information is gathered from the working directory,
1171 match can be used to filter the committed files. If editor is
1180 match can be used to filter the committed files. If editor is
1172 supplied, it is called to get a commit message.
1181 supplied, it is called to get a commit message.
1173 """
1182 """
1174
1183
1175 def fail(f, msg):
1184 def fail(f, msg):
1176 raise util.Abort('%s: %s' % (f, msg))
1185 raise util.Abort('%s: %s' % (f, msg))
1177
1186
1178 if not match:
1187 if not match:
1179 match = matchmod.always(self.root, '')
1188 match = matchmod.always(self.root, '')
1180
1189
1181 if not force:
1190 if not force:
1182 vdirs = []
1191 vdirs = []
1183 match.explicitdir = vdirs.append
1192 match.explicitdir = vdirs.append
1184 match.bad = fail
1193 match.bad = fail
1185
1194
1186 wlock = self.wlock()
1195 wlock = self.wlock()
1187 try:
1196 try:
1188 wctx = self[None]
1197 wctx = self[None]
1189 merge = len(wctx.parents()) > 1
1198 merge = len(wctx.parents()) > 1
1190
1199
1191 if (not force and merge and match and
1200 if (not force and merge and match and
1192 (match.files() or match.anypats())):
1201 (match.files() or match.anypats())):
1193 raise util.Abort(_('cannot partially commit a merge '
1202 raise util.Abort(_('cannot partially commit a merge '
1194 '(do not specify files or patterns)'))
1203 '(do not specify files or patterns)'))
1195
1204
1196 changes = self.status(match=match, clean=force)
1205 changes = self.status(match=match, clean=force)
1197 if force:
1206 if force:
1198 changes[0].extend(changes[6]) # mq may commit unchanged files
1207 changes[0].extend(changes[6]) # mq may commit unchanged files
1199
1208
1200 # check subrepos
1209 # check subrepos
1201 subs = []
1210 subs = []
1202 commitsubs = set()
1211 commitsubs = set()
1203 newstate = wctx.substate.copy()
1212 newstate = wctx.substate.copy()
1204 # only manage subrepos and .hgsubstate if .hgsub is present
1213 # only manage subrepos and .hgsubstate if .hgsub is present
1205 if '.hgsub' in wctx:
1214 if '.hgsub' in wctx:
1206 # we'll decide whether to track this ourselves, thanks
1215 # we'll decide whether to track this ourselves, thanks
1207 for c in changes[:3]:
1216 for c in changes[:3]:
1208 if '.hgsubstate' in c:
1217 if '.hgsubstate' in c:
1209 c.remove('.hgsubstate')
1218 c.remove('.hgsubstate')
1210
1219
1211 # compare current state to last committed state
1220 # compare current state to last committed state
1212 # build new substate based on last committed state
1221 # build new substate based on last committed state
1213 oldstate = wctx.p1().substate
1222 oldstate = wctx.p1().substate
1214 for s in sorted(newstate.keys()):
1223 for s in sorted(newstate.keys()):
1215 if not match(s):
1224 if not match(s):
1216 # ignore working copy, use old state if present
1225 # ignore working copy, use old state if present
1217 if s in oldstate:
1226 if s in oldstate:
1218 newstate[s] = oldstate[s]
1227 newstate[s] = oldstate[s]
1219 continue
1228 continue
1220 if not force:
1229 if not force:
1221 raise util.Abort(
1230 raise util.Abort(
1222 _("commit with new subrepo %s excluded") % s)
1231 _("commit with new subrepo %s excluded") % s)
1223 if wctx.sub(s).dirty(True):
1232 if wctx.sub(s).dirty(True):
1224 if not self.ui.configbool('ui', 'commitsubrepos'):
1233 if not self.ui.configbool('ui', 'commitsubrepos'):
1225 raise util.Abort(
1234 raise util.Abort(
1226 _("uncommitted changes in subrepo %s") % s,
1235 _("uncommitted changes in subrepo %s") % s,
1227 hint=_("use --subrepos for recursive commit"))
1236 hint=_("use --subrepos for recursive commit"))
1228 subs.append(s)
1237 subs.append(s)
1229 commitsubs.add(s)
1238 commitsubs.add(s)
1230 else:
1239 else:
1231 bs = wctx.sub(s).basestate()
1240 bs = wctx.sub(s).basestate()
1232 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1241 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1233 if oldstate.get(s, (None, None, None))[1] != bs:
1242 if oldstate.get(s, (None, None, None))[1] != bs:
1234 subs.append(s)
1243 subs.append(s)
1235
1244
1236 # check for removed subrepos
1245 # check for removed subrepos
1237 for p in wctx.parents():
1246 for p in wctx.parents():
1238 r = [s for s in p.substate if s not in newstate]
1247 r = [s for s in p.substate if s not in newstate]
1239 subs += [s for s in r if match(s)]
1248 subs += [s for s in r if match(s)]
1240 if subs:
1249 if subs:
1241 if (not match('.hgsub') and
1250 if (not match('.hgsub') and
1242 '.hgsub' in (wctx.modified() + wctx.added())):
1251 '.hgsub' in (wctx.modified() + wctx.added())):
1243 raise util.Abort(
1252 raise util.Abort(
1244 _("can't commit subrepos without .hgsub"))
1253 _("can't commit subrepos without .hgsub"))
1245 changes[0].insert(0, '.hgsubstate')
1254 changes[0].insert(0, '.hgsubstate')
1246
1255
1247 elif '.hgsub' in changes[2]:
1256 elif '.hgsub' in changes[2]:
1248 # clean up .hgsubstate when .hgsub is removed
1257 # clean up .hgsubstate when .hgsub is removed
1249 if ('.hgsubstate' in wctx and
1258 if ('.hgsubstate' in wctx and
1250 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1259 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1251 changes[2].insert(0, '.hgsubstate')
1260 changes[2].insert(0, '.hgsubstate')
1252
1261
1253 # make sure all explicit patterns are matched
1262 # make sure all explicit patterns are matched
1254 if not force and match.files():
1263 if not force and match.files():
1255 matched = set(changes[0] + changes[1] + changes[2])
1264 matched = set(changes[0] + changes[1] + changes[2])
1256
1265
1257 for f in match.files():
1266 for f in match.files():
1258 f = self.dirstate.normalize(f)
1267 f = self.dirstate.normalize(f)
1259 if f == '.' or f in matched or f in wctx.substate:
1268 if f == '.' or f in matched or f in wctx.substate:
1260 continue
1269 continue
1261 if f in changes[3]: # missing
1270 if f in changes[3]: # missing
1262 fail(f, _('file not found!'))
1271 fail(f, _('file not found!'))
1263 if f in vdirs: # visited directory
1272 if f in vdirs: # visited directory
1264 d = f + '/'
1273 d = f + '/'
1265 for mf in matched:
1274 for mf in matched:
1266 if mf.startswith(d):
1275 if mf.startswith(d):
1267 break
1276 break
1268 else:
1277 else:
1269 fail(f, _("no match under directory!"))
1278 fail(f, _("no match under directory!"))
1270 elif f not in self.dirstate:
1279 elif f not in self.dirstate:
1271 fail(f, _("file not tracked!"))
1280 fail(f, _("file not tracked!"))
1272
1281
1273 cctx = context.workingctx(self, text, user, date, extra, changes)
1282 cctx = context.workingctx(self, text, user, date, extra, changes)
1274
1283
1275 if (not force and not extra.get("close") and not merge
1284 if (not force and not extra.get("close") and not merge
1276 and not cctx.files()
1285 and not cctx.files()
1277 and wctx.branch() == wctx.p1().branch()):
1286 and wctx.branch() == wctx.p1().branch()):
1278 return None
1287 return None
1279
1288
1280 if merge and cctx.deleted():
1289 if merge and cctx.deleted():
1281 raise util.Abort(_("cannot commit merge with missing files"))
1290 raise util.Abort(_("cannot commit merge with missing files"))
1282
1291
1283 ms = mergemod.mergestate(self)
1292 ms = mergemod.mergestate(self)
1284 for f in changes[0]:
1293 for f in changes[0]:
1285 if f in ms and ms[f] == 'u':
1294 if f in ms and ms[f] == 'u':
1286 raise util.Abort(_("unresolved merge conflicts "
1295 raise util.Abort(_("unresolved merge conflicts "
1287 "(see hg help resolve)"))
1296 "(see hg help resolve)"))
1288
1297
1289 if editor:
1298 if editor:
1290 cctx._text = editor(self, cctx, subs)
1299 cctx._text = editor(self, cctx, subs)
1291 edited = (text != cctx._text)
1300 edited = (text != cctx._text)
1292
1301
1293 # Save commit message in case this transaction gets rolled back
1302 # Save commit message in case this transaction gets rolled back
1294 # (e.g. by a pretxncommit hook). Leave the content alone on
1303 # (e.g. by a pretxncommit hook). Leave the content alone on
1295 # the assumption that the user will use the same editor again.
1304 # the assumption that the user will use the same editor again.
1296 msgfn = self.savecommitmessage(cctx._text)
1305 msgfn = self.savecommitmessage(cctx._text)
1297
1306
1298 # commit subs and write new state
1307 # commit subs and write new state
1299 if subs:
1308 if subs:
1300 for s in sorted(commitsubs):
1309 for s in sorted(commitsubs):
1301 sub = wctx.sub(s)
1310 sub = wctx.sub(s)
1302 self.ui.status(_('committing subrepository %s\n') %
1311 self.ui.status(_('committing subrepository %s\n') %
1303 subrepo.subrelpath(sub))
1312 subrepo.subrelpath(sub))
1304 sr = sub.commit(cctx._text, user, date)
1313 sr = sub.commit(cctx._text, user, date)
1305 newstate[s] = (newstate[s][0], sr)
1314 newstate[s] = (newstate[s][0], sr)
1306 subrepo.writestate(self, newstate)
1315 subrepo.writestate(self, newstate)
1307
1316
1308 p1, p2 = self.dirstate.parents()
1317 p1, p2 = self.dirstate.parents()
1309 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1318 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1310 try:
1319 try:
1311 self.hook("precommit", throw=True, parent1=hookp1,
1320 self.hook("precommit", throw=True, parent1=hookp1,
1312 parent2=hookp2)
1321 parent2=hookp2)
1313 ret = self.commitctx(cctx, True)
1322 ret = self.commitctx(cctx, True)
1314 except: # re-raises
1323 except: # re-raises
1315 if edited:
1324 if edited:
1316 self.ui.write(
1325 self.ui.write(
1317 _('note: commit message saved in %s\n') % msgfn)
1326 _('note: commit message saved in %s\n') % msgfn)
1318 raise
1327 raise
1319
1328
1320 # update bookmarks, dirstate and mergestate
1329 # update bookmarks, dirstate and mergestate
1321 bookmarks.update(self, [p1, p2], ret)
1330 bookmarks.update(self, [p1, p2], ret)
1322 cctx.markcommitted(ret)
1331 cctx.markcommitted(ret)
1323 ms.reset()
1332 ms.reset()
1324 finally:
1333 finally:
1325 wlock.release()
1334 wlock.release()
1326
1335
1327 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1336 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1328 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1337 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1329 self._afterlock(commithook)
1338 self._afterlock(commithook)
1330 return ret
1339 return ret
1331
1340
1332 @unfilteredmethod
1341 @unfilteredmethod
1333 def commitctx(self, ctx, error=False):
1342 def commitctx(self, ctx, error=False):
1334 """Add a new revision to current repository.
1343 """Add a new revision to current repository.
1335 Revision information is passed via the context argument.
1344 Revision information is passed via the context argument.
1336 """
1345 """
1337
1346
1338 tr = lock = None
1347 tr = lock = None
1339 removed = list(ctx.removed())
1348 removed = list(ctx.removed())
1340 p1, p2 = ctx.p1(), ctx.p2()
1349 p1, p2 = ctx.p1(), ctx.p2()
1341 user = ctx.user()
1350 user = ctx.user()
1342
1351
1343 lock = self.lock()
1352 lock = self.lock()
1344 try:
1353 try:
1345 tr = self.transaction("commit")
1354 tr = self.transaction("commit")
1346 trp = weakref.proxy(tr)
1355 trp = weakref.proxy(tr)
1347
1356
1348 if ctx.files():
1357 if ctx.files():
1349 m1 = p1.manifest().copy()
1358 m1 = p1.manifest().copy()
1350 m2 = p2.manifest()
1359 m2 = p2.manifest()
1351
1360
1352 # check in files
1361 # check in files
1353 new = {}
1362 new = {}
1354 changed = []
1363 changed = []
1355 linkrev = len(self)
1364 linkrev = len(self)
1356 for f in sorted(ctx.modified() + ctx.added()):
1365 for f in sorted(ctx.modified() + ctx.added()):
1357 self.ui.note(f + "\n")
1366 self.ui.note(f + "\n")
1358 try:
1367 try:
1359 fctx = ctx[f]
1368 fctx = ctx[f]
1360 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1369 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1361 changed)
1370 changed)
1362 m1.set(f, fctx.flags())
1371 m1.set(f, fctx.flags())
1363 except OSError, inst:
1372 except OSError, inst:
1364 self.ui.warn(_("trouble committing %s!\n") % f)
1373 self.ui.warn(_("trouble committing %s!\n") % f)
1365 raise
1374 raise
1366 except IOError, inst:
1375 except IOError, inst:
1367 errcode = getattr(inst, 'errno', errno.ENOENT)
1376 errcode = getattr(inst, 'errno', errno.ENOENT)
1368 if error or errcode and errcode != errno.ENOENT:
1377 if error or errcode and errcode != errno.ENOENT:
1369 self.ui.warn(_("trouble committing %s!\n") % f)
1378 self.ui.warn(_("trouble committing %s!\n") % f)
1370 raise
1379 raise
1371 else:
1380 else:
1372 removed.append(f)
1381 removed.append(f)
1373
1382
1374 # update manifest
1383 # update manifest
1375 m1.update(new)
1384 m1.update(new)
1376 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1385 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1377 drop = [f for f in removed if f in m1]
1386 drop = [f for f in removed if f in m1]
1378 for f in drop:
1387 for f in drop:
1379 del m1[f]
1388 del m1[f]
1380 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1389 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1381 p2.manifestnode(), (new, drop))
1390 p2.manifestnode(), (new, drop))
1382 files = changed + removed
1391 files = changed + removed
1383 else:
1392 else:
1384 mn = p1.manifestnode()
1393 mn = p1.manifestnode()
1385 files = []
1394 files = []
1386
1395
1387 # update changelog
1396 # update changelog
1388 self.changelog.delayupdate()
1397 self.changelog.delayupdate()
1389 n = self.changelog.add(mn, files, ctx.description(),
1398 n = self.changelog.add(mn, files, ctx.description(),
1390 trp, p1.node(), p2.node(),
1399 trp, p1.node(), p2.node(),
1391 user, ctx.date(), ctx.extra().copy())
1400 user, ctx.date(), ctx.extra().copy())
1392 p = lambda: self.changelog.writepending() and self.root or ""
1401 p = lambda: self.changelog.writepending() and self.root or ""
1393 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1402 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1394 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1403 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1395 parent2=xp2, pending=p)
1404 parent2=xp2, pending=p)
1396 self.changelog.finalize(trp)
1405 self.changelog.finalize(trp)
1397 # set the new commit is proper phase
1406 # set the new commit is proper phase
1398 targetphase = subrepo.newcommitphase(self.ui, ctx)
1407 targetphase = subrepo.newcommitphase(self.ui, ctx)
1399 if targetphase:
1408 if targetphase:
1400 # retract boundary do not alter parent changeset.
1409 # retract boundary do not alter parent changeset.
1401 # if a parent have higher the resulting phase will
1410 # if a parent have higher the resulting phase will
1402 # be compliant anyway
1411 # be compliant anyway
1403 #
1412 #
1404 # if minimal phase was 0 we don't need to retract anything
1413 # if minimal phase was 0 we don't need to retract anything
1405 phases.retractboundary(self, targetphase, [n])
1414 phases.retractboundary(self, targetphase, [n])
1406 tr.close()
1415 tr.close()
1407 branchmap.updatecache(self.filtered('served'))
1416 branchmap.updatecache(self.filtered('served'))
1408 return n
1417 return n
1409 finally:
1418 finally:
1410 if tr:
1419 if tr:
1411 tr.release()
1420 tr.release()
1412 lock.release()
1421 lock.release()
1413
1422
1414 @unfilteredmethod
1423 @unfilteredmethod
1415 def destroying(self):
1424 def destroying(self):
1416 '''Inform the repository that nodes are about to be destroyed.
1425 '''Inform the repository that nodes are about to be destroyed.
1417 Intended for use by strip and rollback, so there's a common
1426 Intended for use by strip and rollback, so there's a common
1418 place for anything that has to be done before destroying history.
1427 place for anything that has to be done before destroying history.
1419
1428
1420 This is mostly useful for saving state that is in memory and waiting
1429 This is mostly useful for saving state that is in memory and waiting
1421 to be flushed when the current lock is released. Because a call to
1430 to be flushed when the current lock is released. Because a call to
1422 destroyed is imminent, the repo will be invalidated causing those
1431 destroyed is imminent, the repo will be invalidated causing those
1423 changes to stay in memory (waiting for the next unlock), or vanish
1432 changes to stay in memory (waiting for the next unlock), or vanish
1424 completely.
1433 completely.
1425 '''
1434 '''
1426 # When using the same lock to commit and strip, the phasecache is left
1435 # When using the same lock to commit and strip, the phasecache is left
1427 # dirty after committing. Then when we strip, the repo is invalidated,
1436 # dirty after committing. Then when we strip, the repo is invalidated,
1428 # causing those changes to disappear.
1437 # causing those changes to disappear.
1429 if '_phasecache' in vars(self):
1438 if '_phasecache' in vars(self):
1430 self._phasecache.write()
1439 self._phasecache.write()
1431
1440
1432 @unfilteredmethod
1441 @unfilteredmethod
1433 def destroyed(self):
1442 def destroyed(self):
1434 '''Inform the repository that nodes have been destroyed.
1443 '''Inform the repository that nodes have been destroyed.
1435 Intended for use by strip and rollback, so there's a common
1444 Intended for use by strip and rollback, so there's a common
1436 place for anything that has to be done after destroying history.
1445 place for anything that has to be done after destroying history.
1437 '''
1446 '''
1438 # When one tries to:
1447 # When one tries to:
1439 # 1) destroy nodes thus calling this method (e.g. strip)
1448 # 1) destroy nodes thus calling this method (e.g. strip)
1440 # 2) use phasecache somewhere (e.g. commit)
1449 # 2) use phasecache somewhere (e.g. commit)
1441 #
1450 #
1442 # then 2) will fail because the phasecache contains nodes that were
1451 # then 2) will fail because the phasecache contains nodes that were
1443 # removed. We can either remove phasecache from the filecache,
1452 # removed. We can either remove phasecache from the filecache,
1444 # causing it to reload next time it is accessed, or simply filter
1453 # causing it to reload next time it is accessed, or simply filter
1445 # the removed nodes now and write the updated cache.
1454 # the removed nodes now and write the updated cache.
1446 self._phasecache.filterunknown(self)
1455 self._phasecache.filterunknown(self)
1447 self._phasecache.write()
1456 self._phasecache.write()
1448
1457
1449 # update the 'served' branch cache to help read only server process
1458 # update the 'served' branch cache to help read only server process
1450 # Thanks to branchcache collaboration this is done from the nearest
1459 # Thanks to branchcache collaboration this is done from the nearest
1451 # filtered subset and it is expected to be fast.
1460 # filtered subset and it is expected to be fast.
1452 branchmap.updatecache(self.filtered('served'))
1461 branchmap.updatecache(self.filtered('served'))
1453
1462
1454 # Ensure the persistent tag cache is updated. Doing it now
1463 # Ensure the persistent tag cache is updated. Doing it now
1455 # means that the tag cache only has to worry about destroyed
1464 # means that the tag cache only has to worry about destroyed
1456 # heads immediately after a strip/rollback. That in turn
1465 # heads immediately after a strip/rollback. That in turn
1457 # guarantees that "cachetip == currenttip" (comparing both rev
1466 # guarantees that "cachetip == currenttip" (comparing both rev
1458 # and node) always means no nodes have been added or destroyed.
1467 # and node) always means no nodes have been added or destroyed.
1459
1468
1460 # XXX this is suboptimal when qrefresh'ing: we strip the current
1469 # XXX this is suboptimal when qrefresh'ing: we strip the current
1461 # head, refresh the tag cache, then immediately add a new head.
1470 # head, refresh the tag cache, then immediately add a new head.
1462 # But I think doing it this way is necessary for the "instant
1471 # But I think doing it this way is necessary for the "instant
1463 # tag cache retrieval" case to work.
1472 # tag cache retrieval" case to work.
1464 self.invalidate()
1473 self.invalidate()
1465
1474
1466 def walk(self, match, node=None):
1475 def walk(self, match, node=None):
1467 '''
1476 '''
1468 walk recursively through the directory tree or a given
1477 walk recursively through the directory tree or a given
1469 changeset, finding all files matched by the match
1478 changeset, finding all files matched by the match
1470 function
1479 function
1471 '''
1480 '''
1472 return self[node].walk(match)
1481 return self[node].walk(match)
1473
1482
1474 def status(self, node1='.', node2=None, match=None,
1483 def status(self, node1='.', node2=None, match=None,
1475 ignored=False, clean=False, unknown=False,
1484 ignored=False, clean=False, unknown=False,
1476 listsubrepos=False):
1485 listsubrepos=False):
1477 """return status of files between two nodes or node and working
1486 """return status of files between two nodes or node and working
1478 directory.
1487 directory.
1479
1488
1480 If node1 is None, use the first dirstate parent instead.
1489 If node1 is None, use the first dirstate parent instead.
1481 If node2 is None, compare node1 with working directory.
1490 If node2 is None, compare node1 with working directory.
1482 """
1491 """
1483
1492
1484 def mfmatches(ctx):
1493 def mfmatches(ctx):
1485 mf = ctx.manifest().copy()
1494 mf = ctx.manifest().copy()
1486 if match.always():
1495 if match.always():
1487 return mf
1496 return mf
1488 for fn in mf.keys():
1497 for fn in mf.keys():
1489 if not match(fn):
1498 if not match(fn):
1490 del mf[fn]
1499 del mf[fn]
1491 return mf
1500 return mf
1492
1501
1493 ctx1 = self[node1]
1502 ctx1 = self[node1]
1494 ctx2 = self[node2]
1503 ctx2 = self[node2]
1495
1504
1496 working = ctx2.rev() is None
1505 working = ctx2.rev() is None
1497 parentworking = working and ctx1 == self['.']
1506 parentworking = working and ctx1 == self['.']
1498 match = match or matchmod.always(self.root, self.getcwd())
1507 match = match or matchmod.always(self.root, self.getcwd())
1499 listignored, listclean, listunknown = ignored, clean, unknown
1508 listignored, listclean, listunknown = ignored, clean, unknown
1500
1509
1501 # load earliest manifest first for caching reasons
1510 # load earliest manifest first for caching reasons
1502 if not working and ctx2.rev() < ctx1.rev():
1511 if not working and ctx2.rev() < ctx1.rev():
1503 ctx2.manifest()
1512 ctx2.manifest()
1504
1513
1505 if not parentworking:
1514 if not parentworking:
1506 def bad(f, msg):
1515 def bad(f, msg):
1507 # 'f' may be a directory pattern from 'match.files()',
1516 # 'f' may be a directory pattern from 'match.files()',
1508 # so 'f not in ctx1' is not enough
1517 # so 'f not in ctx1' is not enough
1509 if f not in ctx1 and f not in ctx1.dirs():
1518 if f not in ctx1 and f not in ctx1.dirs():
1510 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1519 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1511 match.bad = bad
1520 match.bad = bad
1512
1521
1513 if working: # we need to scan the working dir
1522 if working: # we need to scan the working dir
1514 subrepos = []
1523 subrepos = []
1515 if '.hgsub' in self.dirstate:
1524 if '.hgsub' in self.dirstate:
1516 subrepos = sorted(ctx2.substate)
1525 subrepos = sorted(ctx2.substate)
1517 s = self.dirstate.status(match, subrepos, listignored,
1526 s = self.dirstate.status(match, subrepos, listignored,
1518 listclean, listunknown)
1527 listclean, listunknown)
1519 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1528 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1520
1529
1521 # check for any possibly clean files
1530 # check for any possibly clean files
1522 if parentworking and cmp:
1531 if parentworking and cmp:
1523 fixup = []
1532 fixup = []
1524 # do a full compare of any files that might have changed
1533 # do a full compare of any files that might have changed
1525 for f in sorted(cmp):
1534 for f in sorted(cmp):
1526 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1535 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1527 or ctx1[f].cmp(ctx2[f])):
1536 or ctx1[f].cmp(ctx2[f])):
1528 modified.append(f)
1537 modified.append(f)
1529 else:
1538 else:
1530 fixup.append(f)
1539 fixup.append(f)
1531
1540
1532 # update dirstate for files that are actually clean
1541 # update dirstate for files that are actually clean
1533 if fixup:
1542 if fixup:
1534 if listclean:
1543 if listclean:
1535 clean += fixup
1544 clean += fixup
1536
1545
1537 try:
1546 try:
1538 # updating the dirstate is optional
1547 # updating the dirstate is optional
1539 # so we don't wait on the lock
1548 # so we don't wait on the lock
1540 wlock = self.wlock(False)
1549 wlock = self.wlock(False)
1541 try:
1550 try:
1542 for f in fixup:
1551 for f in fixup:
1543 self.dirstate.normal(f)
1552 self.dirstate.normal(f)
1544 finally:
1553 finally:
1545 wlock.release()
1554 wlock.release()
1546 except error.LockError:
1555 except error.LockError:
1547 pass
1556 pass
1548
1557
1549 if not parentworking:
1558 if not parentworking:
1550 mf1 = mfmatches(ctx1)
1559 mf1 = mfmatches(ctx1)
1551 if working:
1560 if working:
1552 # we are comparing working dir against non-parent
1561 # we are comparing working dir against non-parent
1553 # generate a pseudo-manifest for the working dir
1562 # generate a pseudo-manifest for the working dir
1554 mf2 = mfmatches(self['.'])
1563 mf2 = mfmatches(self['.'])
1555 for f in cmp + modified + added:
1564 for f in cmp + modified + added:
1556 mf2[f] = None
1565 mf2[f] = None
1557 mf2.set(f, ctx2.flags(f))
1566 mf2.set(f, ctx2.flags(f))
1558 for f in removed:
1567 for f in removed:
1559 if f in mf2:
1568 if f in mf2:
1560 del mf2[f]
1569 del mf2[f]
1561 else:
1570 else:
1562 # we are comparing two revisions
1571 # we are comparing two revisions
1563 deleted, unknown, ignored = [], [], []
1572 deleted, unknown, ignored = [], [], []
1564 mf2 = mfmatches(ctx2)
1573 mf2 = mfmatches(ctx2)
1565
1574
1566 modified, added, clean = [], [], []
1575 modified, added, clean = [], [], []
1567 withflags = mf1.withflags() | mf2.withflags()
1576 withflags = mf1.withflags() | mf2.withflags()
1568 for fn, mf2node in mf2.iteritems():
1577 for fn, mf2node in mf2.iteritems():
1569 if fn in mf1:
1578 if fn in mf1:
1570 if (fn not in deleted and
1579 if (fn not in deleted and
1571 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1580 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1572 (mf1[fn] != mf2node and
1581 (mf1[fn] != mf2node and
1573 (mf2node or ctx1[fn].cmp(ctx2[fn]))))):
1582 (mf2node or ctx1[fn].cmp(ctx2[fn]))))):
1574 modified.append(fn)
1583 modified.append(fn)
1575 elif listclean:
1584 elif listclean:
1576 clean.append(fn)
1585 clean.append(fn)
1577 del mf1[fn]
1586 del mf1[fn]
1578 elif fn not in deleted:
1587 elif fn not in deleted:
1579 added.append(fn)
1588 added.append(fn)
1580 removed = mf1.keys()
1589 removed = mf1.keys()
1581
1590
1582 if working and modified and not self.dirstate._checklink:
1591 if working and modified and not self.dirstate._checklink:
1583 # Symlink placeholders may get non-symlink-like contents
1592 # Symlink placeholders may get non-symlink-like contents
1584 # via user error or dereferencing by NFS or Samba servers,
1593 # via user error or dereferencing by NFS or Samba servers,
1585 # so we filter out any placeholders that don't look like a
1594 # so we filter out any placeholders that don't look like a
1586 # symlink
1595 # symlink
1587 sane = []
1596 sane = []
1588 for f in modified:
1597 for f in modified:
1589 if ctx2.flags(f) == 'l':
1598 if ctx2.flags(f) == 'l':
1590 d = ctx2[f].data()
1599 d = ctx2[f].data()
1591 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1600 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1592 self.ui.debug('ignoring suspect symlink placeholder'
1601 self.ui.debug('ignoring suspect symlink placeholder'
1593 ' "%s"\n' % f)
1602 ' "%s"\n' % f)
1594 continue
1603 continue
1595 sane.append(f)
1604 sane.append(f)
1596 modified = sane
1605 modified = sane
1597
1606
1598 r = modified, added, removed, deleted, unknown, ignored, clean
1607 r = modified, added, removed, deleted, unknown, ignored, clean
1599
1608
1600 if listsubrepos:
1609 if listsubrepos:
1601 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1610 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1602 if working:
1611 if working:
1603 rev2 = None
1612 rev2 = None
1604 else:
1613 else:
1605 rev2 = ctx2.substate[subpath][1]
1614 rev2 = ctx2.substate[subpath][1]
1606 try:
1615 try:
1607 submatch = matchmod.narrowmatcher(subpath, match)
1616 submatch = matchmod.narrowmatcher(subpath, match)
1608 s = sub.status(rev2, match=submatch, ignored=listignored,
1617 s = sub.status(rev2, match=submatch, ignored=listignored,
1609 clean=listclean, unknown=listunknown,
1618 clean=listclean, unknown=listunknown,
1610 listsubrepos=True)
1619 listsubrepos=True)
1611 for rfiles, sfiles in zip(r, s):
1620 for rfiles, sfiles in zip(r, s):
1612 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1621 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1613 except error.LookupError:
1622 except error.LookupError:
1614 self.ui.status(_("skipping missing subrepository: %s\n")
1623 self.ui.status(_("skipping missing subrepository: %s\n")
1615 % subpath)
1624 % subpath)
1616
1625
1617 for l in r:
1626 for l in r:
1618 l.sort()
1627 l.sort()
1619 return r
1628 return r
1620
1629
1621 def heads(self, start=None):
1630 def heads(self, start=None):
1622 heads = self.changelog.heads(start)
1631 heads = self.changelog.heads(start)
1623 # sort the output in rev descending order
1632 # sort the output in rev descending order
1624 return sorted(heads, key=self.changelog.rev, reverse=True)
1633 return sorted(heads, key=self.changelog.rev, reverse=True)
1625
1634
1626 def branchheads(self, branch=None, start=None, closed=False):
1635 def branchheads(self, branch=None, start=None, closed=False):
1627 '''return a (possibly filtered) list of heads for the given branch
1636 '''return a (possibly filtered) list of heads for the given branch
1628
1637
1629 Heads are returned in topological order, from newest to oldest.
1638 Heads are returned in topological order, from newest to oldest.
1630 If branch is None, use the dirstate branch.
1639 If branch is None, use the dirstate branch.
1631 If start is not None, return only heads reachable from start.
1640 If start is not None, return only heads reachable from start.
1632 If closed is True, return heads that are marked as closed as well.
1641 If closed is True, return heads that are marked as closed as well.
1633 '''
1642 '''
1634 if branch is None:
1643 if branch is None:
1635 branch = self[None].branch()
1644 branch = self[None].branch()
1636 branches = self.branchmap()
1645 branches = self.branchmap()
1637 if branch not in branches:
1646 if branch not in branches:
1638 return []
1647 return []
1639 # the cache returns heads ordered lowest to highest
1648 # the cache returns heads ordered lowest to highest
1640 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1649 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1641 if start is not None:
1650 if start is not None:
1642 # filter out the heads that cannot be reached from startrev
1651 # filter out the heads that cannot be reached from startrev
1643 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1652 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1644 bheads = [h for h in bheads if h in fbheads]
1653 bheads = [h for h in bheads if h in fbheads]
1645 return bheads
1654 return bheads
1646
1655
1647 def branches(self, nodes):
1656 def branches(self, nodes):
1648 if not nodes:
1657 if not nodes:
1649 nodes = [self.changelog.tip()]
1658 nodes = [self.changelog.tip()]
1650 b = []
1659 b = []
1651 for n in nodes:
1660 for n in nodes:
1652 t = n
1661 t = n
1653 while True:
1662 while True:
1654 p = self.changelog.parents(n)
1663 p = self.changelog.parents(n)
1655 if p[1] != nullid or p[0] == nullid:
1664 if p[1] != nullid or p[0] == nullid:
1656 b.append((t, n, p[0], p[1]))
1665 b.append((t, n, p[0], p[1]))
1657 break
1666 break
1658 n = p[0]
1667 n = p[0]
1659 return b
1668 return b
1660
1669
1661 def between(self, pairs):
1670 def between(self, pairs):
1662 r = []
1671 r = []
1663
1672
1664 for top, bottom in pairs:
1673 for top, bottom in pairs:
1665 n, l, i = top, [], 0
1674 n, l, i = top, [], 0
1666 f = 1
1675 f = 1
1667
1676
1668 while n != bottom and n != nullid:
1677 while n != bottom and n != nullid:
1669 p = self.changelog.parents(n)[0]
1678 p = self.changelog.parents(n)[0]
1670 if i == f:
1679 if i == f:
1671 l.append(n)
1680 l.append(n)
1672 f = f * 2
1681 f = f * 2
1673 n = p
1682 n = p
1674 i += 1
1683 i += 1
1675
1684
1676 r.append(l)
1685 r.append(l)
1677
1686
1678 return r
1687 return r
1679
1688
1680 def pull(self, remote, heads=None, force=False):
1689 def pull(self, remote, heads=None, force=False):
1681 return exchange.pull (self, remote, heads, force)
1690 return exchange.pull (self, remote, heads, force)
1682
1691
1683 def checkpush(self, pushop):
1692 def checkpush(self, pushop):
1684 """Extensions can override this function if additional checks have
1693 """Extensions can override this function if additional checks have
1685 to be performed before pushing, or call it if they override push
1694 to be performed before pushing, or call it if they override push
1686 command.
1695 command.
1687 """
1696 """
1688 pass
1697 pass
1689
1698
1690 def push(self, remote, force=False, revs=None, newbranch=False):
1699 def push(self, remote, force=False, revs=None, newbranch=False):
1691 return exchange.push(self, remote, force, revs, newbranch)
1700 return exchange.push(self, remote, force, revs, newbranch)
1692
1701
1693 def stream_in(self, remote, requirements):
1702 def stream_in(self, remote, requirements):
1694 lock = self.lock()
1703 lock = self.lock()
1695 try:
1704 try:
1696 # Save remote branchmap. We will use it later
1705 # Save remote branchmap. We will use it later
1697 # to speed up branchcache creation
1706 # to speed up branchcache creation
1698 rbranchmap = None
1707 rbranchmap = None
1699 if remote.capable("branchmap"):
1708 if remote.capable("branchmap"):
1700 rbranchmap = remote.branchmap()
1709 rbranchmap = remote.branchmap()
1701
1710
1702 fp = remote.stream_out()
1711 fp = remote.stream_out()
1703 l = fp.readline()
1712 l = fp.readline()
1704 try:
1713 try:
1705 resp = int(l)
1714 resp = int(l)
1706 except ValueError:
1715 except ValueError:
1707 raise error.ResponseError(
1716 raise error.ResponseError(
1708 _('unexpected response from remote server:'), l)
1717 _('unexpected response from remote server:'), l)
1709 if resp == 1:
1718 if resp == 1:
1710 raise util.Abort(_('operation forbidden by server'))
1719 raise util.Abort(_('operation forbidden by server'))
1711 elif resp == 2:
1720 elif resp == 2:
1712 raise util.Abort(_('locking the remote repository failed'))
1721 raise util.Abort(_('locking the remote repository failed'))
1713 elif resp != 0:
1722 elif resp != 0:
1714 raise util.Abort(_('the server sent an unknown error code'))
1723 raise util.Abort(_('the server sent an unknown error code'))
1715 self.ui.status(_('streaming all changes\n'))
1724 self.ui.status(_('streaming all changes\n'))
1716 l = fp.readline()
1725 l = fp.readline()
1717 try:
1726 try:
1718 total_files, total_bytes = map(int, l.split(' ', 1))
1727 total_files, total_bytes = map(int, l.split(' ', 1))
1719 except (ValueError, TypeError):
1728 except (ValueError, TypeError):
1720 raise error.ResponseError(
1729 raise error.ResponseError(
1721 _('unexpected response from remote server:'), l)
1730 _('unexpected response from remote server:'), l)
1722 self.ui.status(_('%d files to transfer, %s of data\n') %
1731 self.ui.status(_('%d files to transfer, %s of data\n') %
1723 (total_files, util.bytecount(total_bytes)))
1732 (total_files, util.bytecount(total_bytes)))
1724 handled_bytes = 0
1733 handled_bytes = 0
1725 self.ui.progress(_('clone'), 0, total=total_bytes)
1734 self.ui.progress(_('clone'), 0, total=total_bytes)
1726 start = time.time()
1735 start = time.time()
1727
1736
1728 tr = self.transaction(_('clone'))
1737 tr = self.transaction(_('clone'))
1729 try:
1738 try:
1730 for i in xrange(total_files):
1739 for i in xrange(total_files):
1731 # XXX doesn't support '\n' or '\r' in filenames
1740 # XXX doesn't support '\n' or '\r' in filenames
1732 l = fp.readline()
1741 l = fp.readline()
1733 try:
1742 try:
1734 name, size = l.split('\0', 1)
1743 name, size = l.split('\0', 1)
1735 size = int(size)
1744 size = int(size)
1736 except (ValueError, TypeError):
1745 except (ValueError, TypeError):
1737 raise error.ResponseError(
1746 raise error.ResponseError(
1738 _('unexpected response from remote server:'), l)
1747 _('unexpected response from remote server:'), l)
1739 if self.ui.debugflag:
1748 if self.ui.debugflag:
1740 self.ui.debug('adding %s (%s)\n' %
1749 self.ui.debug('adding %s (%s)\n' %
1741 (name, util.bytecount(size)))
1750 (name, util.bytecount(size)))
1742 # for backwards compat, name was partially encoded
1751 # for backwards compat, name was partially encoded
1743 ofp = self.sopener(store.decodedir(name), 'w')
1752 ofp = self.sopener(store.decodedir(name), 'w')
1744 for chunk in util.filechunkiter(fp, limit=size):
1753 for chunk in util.filechunkiter(fp, limit=size):
1745 handled_bytes += len(chunk)
1754 handled_bytes += len(chunk)
1746 self.ui.progress(_('clone'), handled_bytes,
1755 self.ui.progress(_('clone'), handled_bytes,
1747 total=total_bytes)
1756 total=total_bytes)
1748 ofp.write(chunk)
1757 ofp.write(chunk)
1749 ofp.close()
1758 ofp.close()
1750 tr.close()
1759 tr.close()
1751 finally:
1760 finally:
1752 tr.release()
1761 tr.release()
1753
1762
1754 # Writing straight to files circumvented the inmemory caches
1763 # Writing straight to files circumvented the inmemory caches
1755 self.invalidate()
1764 self.invalidate()
1756
1765
1757 elapsed = time.time() - start
1766 elapsed = time.time() - start
1758 if elapsed <= 0:
1767 if elapsed <= 0:
1759 elapsed = 0.001
1768 elapsed = 0.001
1760 self.ui.progress(_('clone'), None)
1769 self.ui.progress(_('clone'), None)
1761 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1770 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1762 (util.bytecount(total_bytes), elapsed,
1771 (util.bytecount(total_bytes), elapsed,
1763 util.bytecount(total_bytes / elapsed)))
1772 util.bytecount(total_bytes / elapsed)))
1764
1773
1765 # new requirements = old non-format requirements +
1774 # new requirements = old non-format requirements +
1766 # new format-related
1775 # new format-related
1767 # requirements from the streamed-in repository
1776 # requirements from the streamed-in repository
1768 requirements.update(set(self.requirements) - self.supportedformats)
1777 requirements.update(set(self.requirements) - self.supportedformats)
1769 self._applyrequirements(requirements)
1778 self._applyrequirements(requirements)
1770 self._writerequirements()
1779 self._writerequirements()
1771
1780
1772 if rbranchmap:
1781 if rbranchmap:
1773 rbheads = []
1782 rbheads = []
1774 for bheads in rbranchmap.itervalues():
1783 for bheads in rbranchmap.itervalues():
1775 rbheads.extend(bheads)
1784 rbheads.extend(bheads)
1776
1785
1777 if rbheads:
1786 if rbheads:
1778 rtiprev = max((int(self.changelog.rev(node))
1787 rtiprev = max((int(self.changelog.rev(node))
1779 for node in rbheads))
1788 for node in rbheads))
1780 cache = branchmap.branchcache(rbranchmap,
1789 cache = branchmap.branchcache(rbranchmap,
1781 self[rtiprev].node(),
1790 self[rtiprev].node(),
1782 rtiprev)
1791 rtiprev)
1783 # Try to stick it as low as possible
1792 # Try to stick it as low as possible
1784 # filter above served are unlikely to be fetch from a clone
1793 # filter above served are unlikely to be fetch from a clone
1785 for candidate in ('base', 'immutable', 'served'):
1794 for candidate in ('base', 'immutable', 'served'):
1786 rview = self.filtered(candidate)
1795 rview = self.filtered(candidate)
1787 if cache.validfor(rview):
1796 if cache.validfor(rview):
1788 self._branchcaches[candidate] = cache
1797 self._branchcaches[candidate] = cache
1789 cache.write(rview)
1798 cache.write(rview)
1790 break
1799 break
1791 self.invalidate()
1800 self.invalidate()
1792 return len(self.heads()) + 1
1801 return len(self.heads()) + 1
1793 finally:
1802 finally:
1794 lock.release()
1803 lock.release()
1795
1804
1796 def clone(self, remote, heads=[], stream=False):
1805 def clone(self, remote, heads=[], stream=False):
1797 '''clone remote repository.
1806 '''clone remote repository.
1798
1807
1799 keyword arguments:
1808 keyword arguments:
1800 heads: list of revs to clone (forces use of pull)
1809 heads: list of revs to clone (forces use of pull)
1801 stream: use streaming clone if possible'''
1810 stream: use streaming clone if possible'''
1802
1811
1803 # now, all clients that can request uncompressed clones can
1812 # now, all clients that can request uncompressed clones can
1804 # read repo formats supported by all servers that can serve
1813 # read repo formats supported by all servers that can serve
1805 # them.
1814 # them.
1806
1815
1807 # if revlog format changes, client will have to check version
1816 # if revlog format changes, client will have to check version
1808 # and format flags on "stream" capability, and use
1817 # and format flags on "stream" capability, and use
1809 # uncompressed only if compatible.
1818 # uncompressed only if compatible.
1810
1819
1811 if not stream:
1820 if not stream:
1812 # if the server explicitly prefers to stream (for fast LANs)
1821 # if the server explicitly prefers to stream (for fast LANs)
1813 stream = remote.capable('stream-preferred')
1822 stream = remote.capable('stream-preferred')
1814
1823
1815 if stream and not heads:
1824 if stream and not heads:
1816 # 'stream' means remote revlog format is revlogv1 only
1825 # 'stream' means remote revlog format is revlogv1 only
1817 if remote.capable('stream'):
1826 if remote.capable('stream'):
1818 return self.stream_in(remote, set(('revlogv1',)))
1827 return self.stream_in(remote, set(('revlogv1',)))
1819 # otherwise, 'streamreqs' contains the remote revlog format
1828 # otherwise, 'streamreqs' contains the remote revlog format
1820 streamreqs = remote.capable('streamreqs')
1829 streamreqs = remote.capable('streamreqs')
1821 if streamreqs:
1830 if streamreqs:
1822 streamreqs = set(streamreqs.split(','))
1831 streamreqs = set(streamreqs.split(','))
1823 # if we support it, stream in and adjust our requirements
1832 # if we support it, stream in and adjust our requirements
1824 if not streamreqs - self.supportedformats:
1833 if not streamreqs - self.supportedformats:
1825 return self.stream_in(remote, streamreqs)
1834 return self.stream_in(remote, streamreqs)
1826 return self.pull(remote, heads)
1835 return self.pull(remote, heads)
1827
1836
1828 def pushkey(self, namespace, key, old, new):
1837 def pushkey(self, namespace, key, old, new):
1829 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1838 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1830 old=old, new=new)
1839 old=old, new=new)
1831 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1840 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1832 ret = pushkey.push(self, namespace, key, old, new)
1841 ret = pushkey.push(self, namespace, key, old, new)
1833 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1842 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1834 ret=ret)
1843 ret=ret)
1835 return ret
1844 return ret
1836
1845
1837 def listkeys(self, namespace):
1846 def listkeys(self, namespace):
1838 self.hook('prelistkeys', throw=True, namespace=namespace)
1847 self.hook('prelistkeys', throw=True, namespace=namespace)
1839 self.ui.debug('listing keys for "%s"\n' % namespace)
1848 self.ui.debug('listing keys for "%s"\n' % namespace)
1840 values = pushkey.list(self, namespace)
1849 values = pushkey.list(self, namespace)
1841 self.hook('listkeys', namespace=namespace, values=values)
1850 self.hook('listkeys', namespace=namespace, values=values)
1842 return values
1851 return values
1843
1852
1844 def debugwireargs(self, one, two, three=None, four=None, five=None):
1853 def debugwireargs(self, one, two, three=None, four=None, five=None):
1845 '''used to test argument passing over the wire'''
1854 '''used to test argument passing over the wire'''
1846 return "%s %s %s %s %s" % (one, two, three, four, five)
1855 return "%s %s %s %s %s" % (one, two, three, four, five)
1847
1856
1848 def savecommitmessage(self, text):
1857 def savecommitmessage(self, text):
1849 fp = self.opener('last-message.txt', 'wb')
1858 fp = self.opener('last-message.txt', 'wb')
1850 try:
1859 try:
1851 fp.write(text)
1860 fp.write(text)
1852 finally:
1861 finally:
1853 fp.close()
1862 fp.close()
1854 return self.pathto(fp.name[len(self.root) + 1:])
1863 return self.pathto(fp.name[len(self.root) + 1:])
1855
1864
1856 # used to avoid circular references so destructors work
1865 # used to avoid circular references so destructors work
1857 def aftertrans(files):
1866 def aftertrans(files):
1858 renamefiles = [tuple(t) for t in files]
1867 renamefiles = [tuple(t) for t in files]
1859 def a():
1868 def a():
1860 for vfs, src, dest in renamefiles:
1869 for vfs, src, dest in renamefiles:
1861 try:
1870 try:
1862 vfs.rename(src, dest)
1871 vfs.rename(src, dest)
1863 except OSError: # journal file does not yet exist
1872 except OSError: # journal file does not yet exist
1864 pass
1873 pass
1865 return a
1874 return a
1866
1875
1867 def undoname(fn):
1876 def undoname(fn):
1868 base, name = os.path.split(fn)
1877 base, name = os.path.split(fn)
1869 assert name.startswith('journal')
1878 assert name.startswith('journal')
1870 return os.path.join(base, name.replace('journal', 'undo', 1))
1879 return os.path.join(base, name.replace('journal', 'undo', 1))
1871
1880
1872 def instance(ui, path, create):
1881 def instance(ui, path, create):
1873 return localrepository(ui, util.urllocalpath(path), create)
1882 return localrepository(ui, util.urllocalpath(path), create)
1874
1883
1875 def islocal(path):
1884 def islocal(path):
1876 return True
1885 return True
@@ -1,778 +1,785 b''
1 # wireproto.py - generic wire protocol support functions
1 # wireproto.py - generic wire protocol support functions
2 #
2 #
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import urllib, tempfile, os, sys
8 import urllib, tempfile, os, sys
9 from i18n import _
9 from i18n import _
10 from node import bin, hex
10 from node import bin, hex
11 import changegroup as changegroupmod
11 import changegroup as changegroupmod
12 import peer, error, encoding, util, store, exchange
12 import peer, error, encoding, util, store, exchange
13
13
14
14
15 class abstractserverproto(object):
15 class abstractserverproto(object):
16 """abstract class that summarizes the protocol API
16 """abstract class that summarizes the protocol API
17
17
18 Used as reference and documentation.
18 Used as reference and documentation.
19 """
19 """
20
20
21 def getargs(self, args):
21 def getargs(self, args):
22 """return the value for arguments in <args>
22 """return the value for arguments in <args>
23
23
24 returns a list of values (same order as <args>)"""
24 returns a list of values (same order as <args>)"""
25 raise NotImplementedError()
25 raise NotImplementedError()
26
26
27 def getfile(self, fp):
27 def getfile(self, fp):
28 """write the whole content of a file into a file like object
28 """write the whole content of a file into a file like object
29
29
30 The file is in the form::
30 The file is in the form::
31
31
32 (<chunk-size>\n<chunk>)+0\n
32 (<chunk-size>\n<chunk>)+0\n
33
33
34 chunk size is the ascii version of the int.
34 chunk size is the ascii version of the int.
35 """
35 """
36 raise NotImplementedError()
36 raise NotImplementedError()
37
37
38 def redirect(self):
38 def redirect(self):
39 """may setup interception for stdout and stderr
39 """may setup interception for stdout and stderr
40
40
41 See also the `restore` method."""
41 See also the `restore` method."""
42 raise NotImplementedError()
42 raise NotImplementedError()
43
43
44 # If the `redirect` function does install interception, the `restore`
44 # If the `redirect` function does install interception, the `restore`
45 # function MUST be defined. If interception is not used, this function
45 # function MUST be defined. If interception is not used, this function
46 # MUST NOT be defined.
46 # MUST NOT be defined.
47 #
47 #
48 # left commented here on purpose
48 # left commented here on purpose
49 #
49 #
50 #def restore(self):
50 #def restore(self):
51 # """reinstall previous stdout and stderr and return intercepted stdout
51 # """reinstall previous stdout and stderr and return intercepted stdout
52 # """
52 # """
53 # raise NotImplementedError()
53 # raise NotImplementedError()
54
54
55 def groupchunks(self, cg):
55 def groupchunks(self, cg):
56 """return 4096 chunks from a changegroup object
56 """return 4096 chunks from a changegroup object
57
57
58 Some protocols may have compressed the contents."""
58 Some protocols may have compressed the contents."""
59 raise NotImplementedError()
59 raise NotImplementedError()
60
60
61 # abstract batching support
61 # abstract batching support
62
62
63 class future(object):
63 class future(object):
64 '''placeholder for a value to be set later'''
64 '''placeholder for a value to be set later'''
65 def set(self, value):
65 def set(self, value):
66 if util.safehasattr(self, 'value'):
66 if util.safehasattr(self, 'value'):
67 raise error.RepoError("future is already set")
67 raise error.RepoError("future is already set")
68 self.value = value
68 self.value = value
69
69
70 class batcher(object):
70 class batcher(object):
71 '''base class for batches of commands submittable in a single request
71 '''base class for batches of commands submittable in a single request
72
72
73 All methods invoked on instances of this class are simply queued and
73 All methods invoked on instances of this class are simply queued and
74 return a a future for the result. Once you call submit(), all the queued
74 return a a future for the result. Once you call submit(), all the queued
75 calls are performed and the results set in their respective futures.
75 calls are performed and the results set in their respective futures.
76 '''
76 '''
77 def __init__(self):
77 def __init__(self):
78 self.calls = []
78 self.calls = []
79 def __getattr__(self, name):
79 def __getattr__(self, name):
80 def call(*args, **opts):
80 def call(*args, **opts):
81 resref = future()
81 resref = future()
82 self.calls.append((name, args, opts, resref,))
82 self.calls.append((name, args, opts, resref,))
83 return resref
83 return resref
84 return call
84 return call
85 def submit(self):
85 def submit(self):
86 pass
86 pass
87
87
88 class localbatch(batcher):
88 class localbatch(batcher):
89 '''performs the queued calls directly'''
89 '''performs the queued calls directly'''
90 def __init__(self, local):
90 def __init__(self, local):
91 batcher.__init__(self)
91 batcher.__init__(self)
92 self.local = local
92 self.local = local
93 def submit(self):
93 def submit(self):
94 for name, args, opts, resref in self.calls:
94 for name, args, opts, resref in self.calls:
95 resref.set(getattr(self.local, name)(*args, **opts))
95 resref.set(getattr(self.local, name)(*args, **opts))
96
96
97 class remotebatch(batcher):
97 class remotebatch(batcher):
98 '''batches the queued calls; uses as few roundtrips as possible'''
98 '''batches the queued calls; uses as few roundtrips as possible'''
99 def __init__(self, remote):
99 def __init__(self, remote):
100 '''remote must support _submitbatch(encbatch) and
100 '''remote must support _submitbatch(encbatch) and
101 _submitone(op, encargs)'''
101 _submitone(op, encargs)'''
102 batcher.__init__(self)
102 batcher.__init__(self)
103 self.remote = remote
103 self.remote = remote
104 def submit(self):
104 def submit(self):
105 req, rsp = [], []
105 req, rsp = [], []
106 for name, args, opts, resref in self.calls:
106 for name, args, opts, resref in self.calls:
107 mtd = getattr(self.remote, name)
107 mtd = getattr(self.remote, name)
108 batchablefn = getattr(mtd, 'batchable', None)
108 batchablefn = getattr(mtd, 'batchable', None)
109 if batchablefn is not None:
109 if batchablefn is not None:
110 batchable = batchablefn(mtd.im_self, *args, **opts)
110 batchable = batchablefn(mtd.im_self, *args, **opts)
111 encargsorres, encresref = batchable.next()
111 encargsorres, encresref = batchable.next()
112 if encresref:
112 if encresref:
113 req.append((name, encargsorres,))
113 req.append((name, encargsorres,))
114 rsp.append((batchable, encresref, resref,))
114 rsp.append((batchable, encresref, resref,))
115 else:
115 else:
116 resref.set(encargsorres)
116 resref.set(encargsorres)
117 else:
117 else:
118 if req:
118 if req:
119 self._submitreq(req, rsp)
119 self._submitreq(req, rsp)
120 req, rsp = [], []
120 req, rsp = [], []
121 resref.set(mtd(*args, **opts))
121 resref.set(mtd(*args, **opts))
122 if req:
122 if req:
123 self._submitreq(req, rsp)
123 self._submitreq(req, rsp)
124 def _submitreq(self, req, rsp):
124 def _submitreq(self, req, rsp):
125 encresults = self.remote._submitbatch(req)
125 encresults = self.remote._submitbatch(req)
126 for encres, r in zip(encresults, rsp):
126 for encres, r in zip(encresults, rsp):
127 batchable, encresref, resref = r
127 batchable, encresref, resref = r
128 encresref.set(encres)
128 encresref.set(encres)
129 resref.set(batchable.next())
129 resref.set(batchable.next())
130
130
131 def batchable(f):
131 def batchable(f):
132 '''annotation for batchable methods
132 '''annotation for batchable methods
133
133
134 Such methods must implement a coroutine as follows:
134 Such methods must implement a coroutine as follows:
135
135
136 @batchable
136 @batchable
137 def sample(self, one, two=None):
137 def sample(self, one, two=None):
138 # Handle locally computable results first:
138 # Handle locally computable results first:
139 if not one:
139 if not one:
140 yield "a local result", None
140 yield "a local result", None
141 # Build list of encoded arguments suitable for your wire protocol:
141 # Build list of encoded arguments suitable for your wire protocol:
142 encargs = [('one', encode(one),), ('two', encode(two),)]
142 encargs = [('one', encode(one),), ('two', encode(two),)]
143 # Create future for injection of encoded result:
143 # Create future for injection of encoded result:
144 encresref = future()
144 encresref = future()
145 # Return encoded arguments and future:
145 # Return encoded arguments and future:
146 yield encargs, encresref
146 yield encargs, encresref
147 # Assuming the future to be filled with the result from the batched
147 # Assuming the future to be filled with the result from the batched
148 # request now. Decode it:
148 # request now. Decode it:
149 yield decode(encresref.value)
149 yield decode(encresref.value)
150
150
151 The decorator returns a function which wraps this coroutine as a plain
151 The decorator returns a function which wraps this coroutine as a plain
152 method, but adds the original method as an attribute called "batchable",
152 method, but adds the original method as an attribute called "batchable",
153 which is used by remotebatch to split the call into separate encoding and
153 which is used by remotebatch to split the call into separate encoding and
154 decoding phases.
154 decoding phases.
155 '''
155 '''
156 def plain(*args, **opts):
156 def plain(*args, **opts):
157 batchable = f(*args, **opts)
157 batchable = f(*args, **opts)
158 encargsorres, encresref = batchable.next()
158 encargsorres, encresref = batchable.next()
159 if not encresref:
159 if not encresref:
160 return encargsorres # a local result in this case
160 return encargsorres # a local result in this case
161 self = args[0]
161 self = args[0]
162 encresref.set(self._submitone(f.func_name, encargsorres))
162 encresref.set(self._submitone(f.func_name, encargsorres))
163 return batchable.next()
163 return batchable.next()
164 setattr(plain, 'batchable', f)
164 setattr(plain, 'batchable', f)
165 return plain
165 return plain
166
166
167 # list of nodes encoding / decoding
167 # list of nodes encoding / decoding
168
168
169 def decodelist(l, sep=' '):
169 def decodelist(l, sep=' '):
170 if l:
170 if l:
171 return map(bin, l.split(sep))
171 return map(bin, l.split(sep))
172 return []
172 return []
173
173
174 def encodelist(l, sep=' '):
174 def encodelist(l, sep=' '):
175 return sep.join(map(hex, l))
175 return sep.join(map(hex, l))
176
176
177 # batched call argument encoding
177 # batched call argument encoding
178
178
179 def escapearg(plain):
179 def escapearg(plain):
180 return (plain
180 return (plain
181 .replace(':', '::')
181 .replace(':', '::')
182 .replace(',', ':,')
182 .replace(',', ':,')
183 .replace(';', ':;')
183 .replace(';', ':;')
184 .replace('=', ':='))
184 .replace('=', ':='))
185
185
186 def unescapearg(escaped):
186 def unescapearg(escaped):
187 return (escaped
187 return (escaped
188 .replace(':=', '=')
188 .replace(':=', '=')
189 .replace(':;', ';')
189 .replace(':;', ';')
190 .replace(':,', ',')
190 .replace(':,', ',')
191 .replace('::', ':'))
191 .replace('::', ':'))
192
192
193 # client side
193 # client side
194
194
195 class wirepeer(peer.peerrepository):
195 class wirepeer(peer.peerrepository):
196
196
197 def batch(self):
197 def batch(self):
198 return remotebatch(self)
198 return remotebatch(self)
199 def _submitbatch(self, req):
199 def _submitbatch(self, req):
200 cmds = []
200 cmds = []
201 for op, argsdict in req:
201 for op, argsdict in req:
202 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
202 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
203 cmds.append('%s %s' % (op, args))
203 cmds.append('%s %s' % (op, args))
204 rsp = self._call("batch", cmds=';'.join(cmds))
204 rsp = self._call("batch", cmds=';'.join(cmds))
205 return rsp.split(';')
205 return rsp.split(';')
206 def _submitone(self, op, args):
206 def _submitone(self, op, args):
207 return self._call(op, **args)
207 return self._call(op, **args)
208
208
209 @batchable
209 @batchable
210 def lookup(self, key):
210 def lookup(self, key):
211 self.requirecap('lookup', _('look up remote revision'))
211 self.requirecap('lookup', _('look up remote revision'))
212 f = future()
212 f = future()
213 yield {'key': encoding.fromlocal(key)}, f
213 yield {'key': encoding.fromlocal(key)}, f
214 d = f.value
214 d = f.value
215 success, data = d[:-1].split(" ", 1)
215 success, data = d[:-1].split(" ", 1)
216 if int(success):
216 if int(success):
217 yield bin(data)
217 yield bin(data)
218 self._abort(error.RepoError(data))
218 self._abort(error.RepoError(data))
219
219
220 @batchable
220 @batchable
221 def heads(self):
221 def heads(self):
222 f = future()
222 f = future()
223 yield {}, f
223 yield {}, f
224 d = f.value
224 d = f.value
225 try:
225 try:
226 yield decodelist(d[:-1])
226 yield decodelist(d[:-1])
227 except ValueError:
227 except ValueError:
228 self._abort(error.ResponseError(_("unexpected response:"), d))
228 self._abort(error.ResponseError(_("unexpected response:"), d))
229
229
230 @batchable
230 @batchable
231 def known(self, nodes):
231 def known(self, nodes):
232 f = future()
232 f = future()
233 yield {'nodes': encodelist(nodes)}, f
233 yield {'nodes': encodelist(nodes)}, f
234 d = f.value
234 d = f.value
235 try:
235 try:
236 yield [bool(int(f)) for f in d]
236 yield [bool(int(f)) for f in d]
237 except ValueError:
237 except ValueError:
238 self._abort(error.ResponseError(_("unexpected response:"), d))
238 self._abort(error.ResponseError(_("unexpected response:"), d))
239
239
240 @batchable
240 @batchable
241 def branchmap(self):
241 def branchmap(self):
242 f = future()
242 f = future()
243 yield {}, f
243 yield {}, f
244 d = f.value
244 d = f.value
245 try:
245 try:
246 branchmap = {}
246 branchmap = {}
247 for branchpart in d.splitlines():
247 for branchpart in d.splitlines():
248 branchname, branchheads = branchpart.split(' ', 1)
248 branchname, branchheads = branchpart.split(' ', 1)
249 branchname = encoding.tolocal(urllib.unquote(branchname))
249 branchname = encoding.tolocal(urllib.unquote(branchname))
250 branchheads = decodelist(branchheads)
250 branchheads = decodelist(branchheads)
251 branchmap[branchname] = branchheads
251 branchmap[branchname] = branchheads
252 yield branchmap
252 yield branchmap
253 except TypeError:
253 except TypeError:
254 self._abort(error.ResponseError(_("unexpected response:"), d))
254 self._abort(error.ResponseError(_("unexpected response:"), d))
255
255
256 def branches(self, nodes):
256 def branches(self, nodes):
257 n = encodelist(nodes)
257 n = encodelist(nodes)
258 d = self._call("branches", nodes=n)
258 d = self._call("branches", nodes=n)
259 try:
259 try:
260 br = [tuple(decodelist(b)) for b in d.splitlines()]
260 br = [tuple(decodelist(b)) for b in d.splitlines()]
261 return br
261 return br
262 except ValueError:
262 except ValueError:
263 self._abort(error.ResponseError(_("unexpected response:"), d))
263 self._abort(error.ResponseError(_("unexpected response:"), d))
264
264
265 def between(self, pairs):
265 def between(self, pairs):
266 batch = 8 # avoid giant requests
266 batch = 8 # avoid giant requests
267 r = []
267 r = []
268 for i in xrange(0, len(pairs), batch):
268 for i in xrange(0, len(pairs), batch):
269 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
269 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
270 d = self._call("between", pairs=n)
270 d = self._call("between", pairs=n)
271 try:
271 try:
272 r.extend(l and decodelist(l) or [] for l in d.splitlines())
272 r.extend(l and decodelist(l) or [] for l in d.splitlines())
273 except ValueError:
273 except ValueError:
274 self._abort(error.ResponseError(_("unexpected response:"), d))
274 self._abort(error.ResponseError(_("unexpected response:"), d))
275 return r
275 return r
276
276
277 @batchable
277 @batchable
278 def pushkey(self, namespace, key, old, new):
278 def pushkey(self, namespace, key, old, new):
279 if not self.capable('pushkey'):
279 if not self.capable('pushkey'):
280 yield False, None
280 yield False, None
281 f = future()
281 f = future()
282 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
282 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
283 yield {'namespace': encoding.fromlocal(namespace),
283 yield {'namespace': encoding.fromlocal(namespace),
284 'key': encoding.fromlocal(key),
284 'key': encoding.fromlocal(key),
285 'old': encoding.fromlocal(old),
285 'old': encoding.fromlocal(old),
286 'new': encoding.fromlocal(new)}, f
286 'new': encoding.fromlocal(new)}, f
287 d = f.value
287 d = f.value
288 d, output = d.split('\n', 1)
288 d, output = d.split('\n', 1)
289 try:
289 try:
290 d = bool(int(d))
290 d = bool(int(d))
291 except ValueError:
291 except ValueError:
292 raise error.ResponseError(
292 raise error.ResponseError(
293 _('push failed (unexpected response):'), d)
293 _('push failed (unexpected response):'), d)
294 for l in output.splitlines(True):
294 for l in output.splitlines(True):
295 self.ui.status(_('remote: '), l)
295 self.ui.status(_('remote: '), l)
296 yield d
296 yield d
297
297
298 @batchable
298 @batchable
299 def listkeys(self, namespace):
299 def listkeys(self, namespace):
300 if not self.capable('pushkey'):
300 if not self.capable('pushkey'):
301 yield {}, None
301 yield {}, None
302 f = future()
302 f = future()
303 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
303 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
304 yield {'namespace': encoding.fromlocal(namespace)}, f
304 yield {'namespace': encoding.fromlocal(namespace)}, f
305 d = f.value
305 d = f.value
306 r = {}
306 r = {}
307 for l in d.splitlines():
307 for l in d.splitlines():
308 k, v = l.split('\t')
308 k, v = l.split('\t')
309 r[encoding.tolocal(k)] = encoding.tolocal(v)
309 r[encoding.tolocal(k)] = encoding.tolocal(v)
310 yield r
310 yield r
311
311
312 def stream_out(self):
312 def stream_out(self):
313 return self._callstream('stream_out')
313 return self._callstream('stream_out')
314
314
315 def changegroup(self, nodes, kind):
315 def changegroup(self, nodes, kind):
316 n = encodelist(nodes)
316 n = encodelist(nodes)
317 f = self._callcompressable("changegroup", roots=n)
317 f = self._callcompressable("changegroup", roots=n)
318 return changegroupmod.unbundle10(f, 'UN')
318 return changegroupmod.unbundle10(f, 'UN')
319
319
320 def changegroupsubset(self, bases, heads, kind):
320 def changegroupsubset(self, bases, heads, kind):
321 self.requirecap('changegroupsubset', _('look up remote changes'))
321 self.requirecap('changegroupsubset', _('look up remote changes'))
322 bases = encodelist(bases)
322 bases = encodelist(bases)
323 heads = encodelist(heads)
323 heads = encodelist(heads)
324 f = self._callcompressable("changegroupsubset",
324 f = self._callcompressable("changegroupsubset",
325 bases=bases, heads=heads)
325 bases=bases, heads=heads)
326 return changegroupmod.unbundle10(f, 'UN')
326 return changegroupmod.unbundle10(f, 'UN')
327
327
328 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
328 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
329 self.requirecap('getbundle', _('look up remote changes'))
329 self.requirecap('getbundle', _('look up remote changes'))
330 opts = {}
330 opts = {}
331 if heads is not None:
331 if heads is not None:
332 opts['heads'] = encodelist(heads)
332 opts['heads'] = encodelist(heads)
333 if common is not None:
333 if common is not None:
334 opts['common'] = encodelist(common)
334 opts['common'] = encodelist(common)
335 if bundlecaps is not None:
335 if bundlecaps is not None:
336 opts['bundlecaps'] = ','.join(bundlecaps)
336 opts['bundlecaps'] = ','.join(bundlecaps)
337 f = self._callcompressable("getbundle", **opts)
337 f = self._callcompressable("getbundle", **opts)
338 return changegroupmod.unbundle10(f, 'UN')
338 return changegroupmod.unbundle10(f, 'UN')
339
339
340 def unbundle(self, cg, heads, source):
340 def unbundle(self, cg, heads, source):
341 '''Send cg (a readable file-like object representing the
341 '''Send cg (a readable file-like object representing the
342 changegroup to push, typically a chunkbuffer object) to the
342 changegroup to push, typically a chunkbuffer object) to the
343 remote server as a bundle. Return an integer indicating the
343 remote server as a bundle. Return an integer indicating the
344 result of the push (see localrepository.addchangegroup()).'''
344 result of the push (see localrepository.addchangegroup()).'''
345
345
346 if heads != ['force'] and self.capable('unbundlehash'):
346 if heads != ['force'] and self.capable('unbundlehash'):
347 heads = encodelist(['hashed',
347 heads = encodelist(['hashed',
348 util.sha1(''.join(sorted(heads))).digest()])
348 util.sha1(''.join(sorted(heads))).digest()])
349 else:
349 else:
350 heads = encodelist(heads)
350 heads = encodelist(heads)
351
351
352 ret, output = self._callpush("unbundle", cg, heads=heads)
352 ret, output = self._callpush("unbundle", cg, heads=heads)
353 if ret == "":
353 if ret == "":
354 raise error.ResponseError(
354 raise error.ResponseError(
355 _('push failed:'), output)
355 _('push failed:'), output)
356 try:
356 try:
357 ret = int(ret)
357 ret = int(ret)
358 except ValueError:
358 except ValueError:
359 raise error.ResponseError(
359 raise error.ResponseError(
360 _('push failed (unexpected response):'), ret)
360 _('push failed (unexpected response):'), ret)
361
361
362 for l in output.splitlines(True):
362 for l in output.splitlines(True):
363 self.ui.status(_('remote: '), l)
363 self.ui.status(_('remote: '), l)
364 return ret
364 return ret
365
365
366 def debugwireargs(self, one, two, three=None, four=None, five=None):
366 def debugwireargs(self, one, two, three=None, four=None, five=None):
367 # don't pass optional arguments left at their default value
367 # don't pass optional arguments left at their default value
368 opts = {}
368 opts = {}
369 if three is not None:
369 if three is not None:
370 opts['three'] = three
370 opts['three'] = three
371 if four is not None:
371 if four is not None:
372 opts['four'] = four
372 opts['four'] = four
373 return self._call('debugwireargs', one=one, two=two, **opts)
373 return self._call('debugwireargs', one=one, two=two, **opts)
374
374
375 def _call(self, cmd, **args):
375 def _call(self, cmd, **args):
376 """execute <cmd> on the server
376 """execute <cmd> on the server
377
377
378 The command is expected to return a simple string.
378 The command is expected to return a simple string.
379
379
380 returns the server reply as a string."""
380 returns the server reply as a string."""
381 raise NotImplementedError()
381 raise NotImplementedError()
382
382
383 def _callstream(self, cmd, **args):
383 def _callstream(self, cmd, **args):
384 """execute <cmd> on the server
384 """execute <cmd> on the server
385
385
386 The command is expected to return a stream.
386 The command is expected to return a stream.
387
387
388 returns the server reply as a file like object."""
388 returns the server reply as a file like object."""
389 raise NotImplementedError()
389 raise NotImplementedError()
390
390
391 def _callcompressable(self, cmd, **args):
391 def _callcompressable(self, cmd, **args):
392 """execute <cmd> on the server
392 """execute <cmd> on the server
393
393
394 The command is expected to return a stream.
394 The command is expected to return a stream.
395
395
396 The stream may have been compressed in some implementaitons. This
396 The stream may have been compressed in some implementaitons. This
397 function takes care of the decompression. This is the only difference
397 function takes care of the decompression. This is the only difference
398 with _callstream.
398 with _callstream.
399
399
400 returns the server reply as a file like object.
400 returns the server reply as a file like object.
401 """
401 """
402 raise NotImplementedError()
402 raise NotImplementedError()
403
403
404 def _callpush(self, cmd, fp, **args):
404 def _callpush(self, cmd, fp, **args):
405 """execute a <cmd> on server
405 """execute a <cmd> on server
406
406
407 The command is expected to be related to a push. Push has a special
407 The command is expected to be related to a push. Push has a special
408 return method.
408 return method.
409
409
410 returns the server reply as a (ret, output) tuple. ret is either
410 returns the server reply as a (ret, output) tuple. ret is either
411 empty (error) or a stringified int.
411 empty (error) or a stringified int.
412 """
412 """
413 raise NotImplementedError()
413 raise NotImplementedError()
414
414
415 def _abort(self, exception):
415 def _abort(self, exception):
416 """clearly abort the wire protocol connection and raise the exception
416 """clearly abort the wire protocol connection and raise the exception
417 """
417 """
418 raise NotImplementedError()
418 raise NotImplementedError()
419
419
420 # server side
420 # server side
421
421
422 # wire protocol command can either return a string or one of these classes.
422 # wire protocol command can either return a string or one of these classes.
423 class streamres(object):
423 class streamres(object):
424 """wireproto reply: binary stream
424 """wireproto reply: binary stream
425
425
426 The call was successful and the result is a stream.
426 The call was successful and the result is a stream.
427 Iterate on the `self.gen` attribute to retrieve chunks.
427 Iterate on the `self.gen` attribute to retrieve chunks.
428 """
428 """
429 def __init__(self, gen):
429 def __init__(self, gen):
430 self.gen = gen
430 self.gen = gen
431
431
432 class pushres(object):
432 class pushres(object):
433 """wireproto reply: success with simple integer return
433 """wireproto reply: success with simple integer return
434
434
435 The call was successful and returned an integer contained in `self.res`.
435 The call was successful and returned an integer contained in `self.res`.
436 """
436 """
437 def __init__(self, res):
437 def __init__(self, res):
438 self.res = res
438 self.res = res
439
439
440 class pusherr(object):
440 class pusherr(object):
441 """wireproto reply: failure
441 """wireproto reply: failure
442
442
443 The call failed. The `self.res` attribute contains the error message.
443 The call failed. The `self.res` attribute contains the error message.
444 """
444 """
445 def __init__(self, res):
445 def __init__(self, res):
446 self.res = res
446 self.res = res
447
447
448 class ooberror(object):
448 class ooberror(object):
449 """wireproto reply: failure of a batch of operation
449 """wireproto reply: failure of a batch of operation
450
450
451 Something failed during a batch call. The error message is stored in
451 Something failed during a batch call. The error message is stored in
452 `self.message`.
452 `self.message`.
453 """
453 """
454 def __init__(self, message):
454 def __init__(self, message):
455 self.message = message
455 self.message = message
456
456
457 def dispatch(repo, proto, command):
457 def dispatch(repo, proto, command):
458 repo = repo.filtered("served")
458 repo = repo.filtered("served")
459 func, spec = commands[command]
459 func, spec = commands[command]
460 args = proto.getargs(spec)
460 args = proto.getargs(spec)
461 return func(repo, proto, *args)
461 return func(repo, proto, *args)
462
462
463 def options(cmd, keys, others):
463 def options(cmd, keys, others):
464 opts = {}
464 opts = {}
465 for k in keys:
465 for k in keys:
466 if k in others:
466 if k in others:
467 opts[k] = others[k]
467 opts[k] = others[k]
468 del others[k]
468 del others[k]
469 if others:
469 if others:
470 sys.stderr.write("abort: %s got unexpected arguments %s\n"
470 sys.stderr.write("abort: %s got unexpected arguments %s\n"
471 % (cmd, ",".join(others)))
471 % (cmd, ",".join(others)))
472 return opts
472 return opts
473
473
474 # list of commands
474 # list of commands
475 commands = {}
475 commands = {}
476
476
477 def wireprotocommand(name, args=''):
477 def wireprotocommand(name, args=''):
478 """decorator for wireprotocol command"""
478 """decorator for wireprotocol command"""
479 def register(func):
479 def register(func):
480 commands[name] = (func, args)
480 commands[name] = (func, args)
481 return func
481 return func
482 return register
482 return register
483
483
484 @wireprotocommand('batch', 'cmds *')
484 @wireprotocommand('batch', 'cmds *')
485 def batch(repo, proto, cmds, others):
485 def batch(repo, proto, cmds, others):
486 repo = repo.filtered("served")
486 repo = repo.filtered("served")
487 res = []
487 res = []
488 for pair in cmds.split(';'):
488 for pair in cmds.split(';'):
489 op, args = pair.split(' ', 1)
489 op, args = pair.split(' ', 1)
490 vals = {}
490 vals = {}
491 for a in args.split(','):
491 for a in args.split(','):
492 if a:
492 if a:
493 n, v = a.split('=')
493 n, v = a.split('=')
494 vals[n] = unescapearg(v)
494 vals[n] = unescapearg(v)
495 func, spec = commands[op]
495 func, spec = commands[op]
496 if spec:
496 if spec:
497 keys = spec.split()
497 keys = spec.split()
498 data = {}
498 data = {}
499 for k in keys:
499 for k in keys:
500 if k == '*':
500 if k == '*':
501 star = {}
501 star = {}
502 for key in vals.keys():
502 for key in vals.keys():
503 if key not in keys:
503 if key not in keys:
504 star[key] = vals[key]
504 star[key] = vals[key]
505 data['*'] = star
505 data['*'] = star
506 else:
506 else:
507 data[k] = vals[k]
507 data[k] = vals[k]
508 result = func(repo, proto, *[data[k] for k in keys])
508 result = func(repo, proto, *[data[k] for k in keys])
509 else:
509 else:
510 result = func(repo, proto)
510 result = func(repo, proto)
511 if isinstance(result, ooberror):
511 if isinstance(result, ooberror):
512 return result
512 return result
513 res.append(escapearg(result))
513 res.append(escapearg(result))
514 return ';'.join(res)
514 return ';'.join(res)
515
515
516 @wireprotocommand('between', 'pairs')
516 @wireprotocommand('between', 'pairs')
517 def between(repo, proto, pairs):
517 def between(repo, proto, pairs):
518 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
518 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
519 r = []
519 r = []
520 for b in repo.between(pairs):
520 for b in repo.between(pairs):
521 r.append(encodelist(b) + "\n")
521 r.append(encodelist(b) + "\n")
522 return "".join(r)
522 return "".join(r)
523
523
524 @wireprotocommand('branchmap')
524 @wireprotocommand('branchmap')
525 def branchmap(repo, proto):
525 def branchmap(repo, proto):
526 branchmap = repo.branchmap()
526 branchmap = repo.branchmap()
527 heads = []
527 heads = []
528 for branch, nodes in branchmap.iteritems():
528 for branch, nodes in branchmap.iteritems():
529 branchname = urllib.quote(encoding.fromlocal(branch))
529 branchname = urllib.quote(encoding.fromlocal(branch))
530 branchnodes = encodelist(nodes)
530 branchnodes = encodelist(nodes)
531 heads.append('%s %s' % (branchname, branchnodes))
531 heads.append('%s %s' % (branchname, branchnodes))
532 return '\n'.join(heads)
532 return '\n'.join(heads)
533
533
534 @wireprotocommand('branches', 'nodes')
534 @wireprotocommand('branches', 'nodes')
535 def branches(repo, proto, nodes):
535 def branches(repo, proto, nodes):
536 nodes = decodelist(nodes)
536 nodes = decodelist(nodes)
537 r = []
537 r = []
538 for b in repo.branches(nodes):
538 for b in repo.branches(nodes):
539 r.append(encodelist(b) + "\n")
539 r.append(encodelist(b) + "\n")
540 return "".join(r)
540 return "".join(r)
541
541
542
542
543 wireprotocaps = ['lookup', 'changegroupsubset', 'branchmap', 'pushkey',
543 wireprotocaps = ['lookup', 'changegroupsubset', 'branchmap', 'pushkey',
544 'known', 'getbundle', 'unbundlehash', 'batch']
544 'known', 'getbundle', 'unbundlehash', 'batch']
545
545
546 def _capabilities(repo, proto):
546 def _capabilities(repo, proto):
547 """return a list of capabilities for a repo
547 """return a list of capabilities for a repo
548
548
549 This function exists to allow extensions to easily wrap capabilities
549 This function exists to allow extensions to easily wrap capabilities
550 computation
550 computation
551
551
552 - returns a lists: easy to alter
552 - returns a lists: easy to alter
553 - change done here will be propagated to both `capabilities` and `hello`
553 - change done here will be propagated to both `capabilities` and `hello`
554 command without any other effort. without any other action needed.
554 command without any other effort. without any other action needed.
555 """
555 """
556 # copy to prevent modification of the global list
556 # copy to prevent modification of the global list
557 caps = list(wireprotocaps)
557 caps = list(wireprotocaps)
558 if _allowstream(repo.ui):
558 if _allowstream(repo.ui):
559 if repo.ui.configbool('server', 'preferuncompressed', False):
559 if repo.ui.configbool('server', 'preferuncompressed', False):
560 caps.append('stream-preferred')
560 caps.append('stream-preferred')
561 requiredformats = repo.requirements & repo.supportedformats
561 requiredformats = repo.requirements & repo.supportedformats
562 # if our local revlogs are just revlogv1, add 'stream' cap
562 # if our local revlogs are just revlogv1, add 'stream' cap
563 if not requiredformats - set(('revlogv1',)):
563 if not requiredformats - set(('revlogv1',)):
564 caps.append('stream')
564 caps.append('stream')
565 # otherwise, add 'streamreqs' detailing our local revlog format
565 # otherwise, add 'streamreqs' detailing our local revlog format
566 else:
566 else:
567 caps.append('streamreqs=%s' % ','.join(requiredformats))
567 caps.append('streamreqs=%s' % ','.join(requiredformats))
568 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
568 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
569 caps.append('httpheader=1024')
569 caps.append('httpheader=1024')
570 return caps
570 return caps
571
571
572 # If you are writting and extension and consider wrapping this function. Wrap
572 # If you are writting and extension and consider wrapping this function. Wrap
573 # `_capabilities` instead.
573 # `_capabilities` instead.
574 @wireprotocommand('capabilities')
574 @wireprotocommand('capabilities')
575 def capabilities(repo, proto):
575 def capabilities(repo, proto):
576 return ' '.join(_capabilities(repo, proto))
576 return ' '.join(_capabilities(repo, proto))
577
577
578 @wireprotocommand('changegroup', 'roots')
578 @wireprotocommand('changegroup', 'roots')
579 def changegroup(repo, proto, roots):
579 def changegroup(repo, proto, roots):
580 nodes = decodelist(roots)
580 nodes = decodelist(roots)
581 cg = changegroupmod.changegroup(repo, nodes, 'serve')
581 cg = changegroupmod.changegroup(repo, nodes, 'serve')
582 return streamres(proto.groupchunks(cg))
582 return streamres(proto.groupchunks(cg))
583
583
584 @wireprotocommand('changegroupsubset', 'bases heads')
584 @wireprotocommand('changegroupsubset', 'bases heads')
585 def changegroupsubset(repo, proto, bases, heads):
585 def changegroupsubset(repo, proto, bases, heads):
586 bases = decodelist(bases)
586 bases = decodelist(bases)
587 heads = decodelist(heads)
587 heads = decodelist(heads)
588 cg = changegroupmod.changegroupsubset(repo, bases, heads, 'serve')
588 cg = changegroupmod.changegroupsubset(repo, bases, heads, 'serve')
589 return streamres(proto.groupchunks(cg))
589 return streamres(proto.groupchunks(cg))
590
590
591 @wireprotocommand('debugwireargs', 'one two *')
591 @wireprotocommand('debugwireargs', 'one two *')
592 def debugwireargs(repo, proto, one, two, others):
592 def debugwireargs(repo, proto, one, two, others):
593 # only accept optional args from the known set
593 # only accept optional args from the known set
594 opts = options('debugwireargs', ['three', 'four'], others)
594 opts = options('debugwireargs', ['three', 'four'], others)
595 return repo.debugwireargs(one, two, **opts)
595 return repo.debugwireargs(one, two, **opts)
596
596
597 @wireprotocommand('getbundle', '*')
597 @wireprotocommand('getbundle', '*')
598 def getbundle(repo, proto, others):
598 def getbundle(repo, proto, others):
599 opts = options('getbundle', ['heads', 'common', 'bundlecaps'], others)
599 opts = options('getbundle', ['heads', 'common', 'bundlecaps'], others)
600 for k, v in opts.iteritems():
600 for k, v in opts.iteritems():
601 if k in ('heads', 'common'):
601 if k in ('heads', 'common'):
602 opts[k] = decodelist(v)
602 opts[k] = decodelist(v)
603 elif k == 'bundlecaps':
603 elif k == 'bundlecaps':
604 opts[k] = set(v.split(','))
604 opts[k] = set(v.split(','))
605 cg = changegroupmod.getbundle(repo, 'serve', **opts)
605 cg = changegroupmod.getbundle(repo, 'serve', **opts)
606 return streamres(proto.groupchunks(cg))
606 return streamres(proto.groupchunks(cg))
607
607
608 @wireprotocommand('heads')
608 @wireprotocommand('heads')
609 def heads(repo, proto):
609 def heads(repo, proto):
610 h = repo.heads()
610 h = repo.heads()
611 return encodelist(h) + "\n"
611 return encodelist(h) + "\n"
612
612
613 @wireprotocommand('hello')
613 @wireprotocommand('hello')
614 def hello(repo, proto):
614 def hello(repo, proto):
615 '''the hello command returns a set of lines describing various
615 '''the hello command returns a set of lines describing various
616 interesting things about the server, in an RFC822-like format.
616 interesting things about the server, in an RFC822-like format.
617 Currently the only one defined is "capabilities", which
617 Currently the only one defined is "capabilities", which
618 consists of a line in the form:
618 consists of a line in the form:
619
619
620 capabilities: space separated list of tokens
620 capabilities: space separated list of tokens
621 '''
621 '''
622 return "capabilities: %s\n" % (capabilities(repo, proto))
622 return "capabilities: %s\n" % (capabilities(repo, proto))
623
623
624 @wireprotocommand('listkeys', 'namespace')
624 @wireprotocommand('listkeys', 'namespace')
625 def listkeys(repo, proto, namespace):
625 def listkeys(repo, proto, namespace):
626 d = repo.listkeys(encoding.tolocal(namespace)).items()
626 d = repo.listkeys(encoding.tolocal(namespace)).items()
627 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
627 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
628 for k, v in d])
628 for k, v in d])
629 return t
629 return t
630
630
631 @wireprotocommand('lookup', 'key')
631 @wireprotocommand('lookup', 'key')
632 def lookup(repo, proto, key):
632 def lookup(repo, proto, key):
633 try:
633 try:
634 k = encoding.tolocal(key)
634 k = encoding.tolocal(key)
635 c = repo[k]
635 c = repo[k]
636 r = c.hex()
636 r = c.hex()
637 success = 1
637 success = 1
638 except Exception, inst:
638 except Exception, inst:
639 r = str(inst)
639 r = str(inst)
640 success = 0
640 success = 0
641 return "%s %s\n" % (success, r)
641 return "%s %s\n" % (success, r)
642
642
643 @wireprotocommand('known', 'nodes *')
643 @wireprotocommand('known', 'nodes *')
644 def known(repo, proto, nodes, others):
644 def known(repo, proto, nodes, others):
645 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
645 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
646
646
647 @wireprotocommand('pushkey', 'namespace key old new')
647 @wireprotocommand('pushkey', 'namespace key old new')
648 def pushkey(repo, proto, namespace, key, old, new):
648 def pushkey(repo, proto, namespace, key, old, new):
649 # compatibility with pre-1.8 clients which were accidentally
649 # compatibility with pre-1.8 clients which were accidentally
650 # sending raw binary nodes rather than utf-8-encoded hex
650 # sending raw binary nodes rather than utf-8-encoded hex
651 if len(new) == 20 and new.encode('string-escape') != new:
651 if len(new) == 20 and new.encode('string-escape') != new:
652 # looks like it could be a binary node
652 # looks like it could be a binary node
653 try:
653 try:
654 new.decode('utf-8')
654 new.decode('utf-8')
655 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
655 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
656 except UnicodeDecodeError:
656 except UnicodeDecodeError:
657 pass # binary, leave unmodified
657 pass # binary, leave unmodified
658 else:
658 else:
659 new = encoding.tolocal(new) # normal path
659 new = encoding.tolocal(new) # normal path
660
660
661 if util.safehasattr(proto, 'restore'):
661 if util.safehasattr(proto, 'restore'):
662
662
663 proto.redirect()
663 proto.redirect()
664
664
665 try:
665 try:
666 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
666 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
667 encoding.tolocal(old), new) or False
667 encoding.tolocal(old), new) or False
668 except util.Abort:
668 except util.Abort:
669 r = False
669 r = False
670
670
671 output = proto.restore()
671 output = proto.restore()
672
672
673 return '%s\n%s' % (int(r), output)
673 return '%s\n%s' % (int(r), output)
674
674
675 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
675 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
676 encoding.tolocal(old), new)
676 encoding.tolocal(old), new)
677 return '%s\n' % int(r)
677 return '%s\n' % int(r)
678
678
679 def _allowstream(ui):
679 def _allowstream(ui):
680 return ui.configbool('server', 'uncompressed', True, untrusted=True)
680 return ui.configbool('server', 'uncompressed', True, untrusted=True)
681
681
682 def _walkstreamfiles(repo):
682 def _walkstreamfiles(repo):
683 # this is it's own function so extensions can override it
683 # this is it's own function so extensions can override it
684 return repo.store.walk()
684 return repo.store.walk()
685
685
686 @wireprotocommand('stream_out')
686 @wireprotocommand('stream_out')
687 def stream(repo, proto):
687 def stream(repo, proto):
688 '''If the server supports streaming clone, it advertises the "stream"
688 '''If the server supports streaming clone, it advertises the "stream"
689 capability with a value representing the version and flags of the repo
689 capability with a value representing the version and flags of the repo
690 it is serving. Client checks to see if it understands the format.
690 it is serving. Client checks to see if it understands the format.
691
691
692 The format is simple: the server writes out a line with the amount
692 The format is simple: the server writes out a line with the amount
693 of files, then the total amount of bytes to be transferred (separated
693 of files, then the total amount of bytes to be transferred (separated
694 by a space). Then, for each file, the server first writes the filename
694 by a space). Then, for each file, the server first writes the filename
695 and filesize (separated by the null character), then the file contents.
695 and filesize (separated by the null character), then the file contents.
696 '''
696 '''
697
697
698 if not _allowstream(repo.ui):
698 if not _allowstream(repo.ui):
699 return '1\n'
699 return '1\n'
700
700
701 entries = []
701 entries = []
702 total_bytes = 0
702 total_bytes = 0
703 try:
703 try:
704 # get consistent snapshot of repo, lock during scan
704 # get consistent snapshot of repo, lock during scan
705 lock = repo.lock()
705 lock = repo.lock()
706 try:
706 try:
707 repo.ui.debug('scanning\n')
707 repo.ui.debug('scanning\n')
708 for name, ename, size in _walkstreamfiles(repo):
708 for name, ename, size in _walkstreamfiles(repo):
709 if size:
709 if size:
710 entries.append((name, size))
710 entries.append((name, size))
711 total_bytes += size
711 total_bytes += size
712 finally:
712 finally:
713 lock.release()
713 lock.release()
714 except error.LockError:
714 except error.LockError:
715 return '2\n' # error: 2
715 return '2\n' # error: 2
716
716
717 def streamer(repo, entries, total):
717 def streamer(repo, entries, total):
718 '''stream out all metadata files in repository.'''
718 '''stream out all metadata files in repository.'''
719 yield '0\n' # success
719 yield '0\n' # success
720 repo.ui.debug('%d files, %d bytes to transfer\n' %
720 repo.ui.debug('%d files, %d bytes to transfer\n' %
721 (len(entries), total_bytes))
721 (len(entries), total_bytes))
722 yield '%d %d\n' % (len(entries), total_bytes)
722 yield '%d %d\n' % (len(entries), total_bytes)
723
723
724 sopener = repo.sopener
724 sopener = repo.sopener
725 oldaudit = sopener.mustaudit
725 oldaudit = sopener.mustaudit
726 debugflag = repo.ui.debugflag
726 debugflag = repo.ui.debugflag
727 sopener.mustaudit = False
727 sopener.mustaudit = False
728
728
729 try:
729 try:
730 for name, size in entries:
730 for name, size in entries:
731 if debugflag:
731 if debugflag:
732 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
732 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
733 # partially encode name over the wire for backwards compat
733 # partially encode name over the wire for backwards compat
734 yield '%s\0%d\n' % (store.encodedir(name), size)
734 yield '%s\0%d\n' % (store.encodedir(name), size)
735 if size <= 65536:
735 if size <= 65536:
736 fp = sopener(name)
736 fp = sopener(name)
737 try:
737 try:
738 data = fp.read(size)
738 data = fp.read(size)
739 finally:
739 finally:
740 fp.close()
740 fp.close()
741 yield data
741 yield data
742 else:
742 else:
743 for chunk in util.filechunkiter(sopener(name), limit=size):
743 for chunk in util.filechunkiter(sopener(name), limit=size):
744 yield chunk
744 yield chunk
745 # replace with "finally:" when support for python 2.4 has been dropped
745 # replace with "finally:" when support for python 2.4 has been dropped
746 except Exception:
746 except Exception:
747 sopener.mustaudit = oldaudit
747 sopener.mustaudit = oldaudit
748 raise
748 raise
749 sopener.mustaudit = oldaudit
749 sopener.mustaudit = oldaudit
750
750
751 return streamres(streamer(repo, entries, total_bytes))
751 return streamres(streamer(repo, entries, total_bytes))
752
752
753 @wireprotocommand('unbundle', 'heads')
753 @wireprotocommand('unbundle', 'heads')
754 def unbundle(repo, proto, heads):
754 def unbundle(repo, proto, heads):
755 their_heads = decodelist(heads)
755 their_heads = decodelist(heads)
756
756
757 try:
757 try:
758 proto.redirect()
758 proto.redirect()
759
759
760 exchange.check_heads(repo, their_heads, 'preparing changes')
760 exchange.check_heads(repo, their_heads, 'preparing changes')
761
761
762 # write bundle data to temporary file because it can be big
762 # write bundle data to temporary file because it can be big
763 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
763 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
764 fp = os.fdopen(fd, 'wb+')
764 fp = os.fdopen(fd, 'wb+')
765 r = 0
765 r = 0
766 try:
766 try:
767 proto.getfile(fp)
767 proto.getfile(fp)
768 fp.seek(0)
768 fp.seek(0)
769 gen = changegroupmod.readbundle(fp, None)
769 gen = changegroupmod.readbundle(fp, None)
770 r = exchange.unbundle(repo, gen, their_heads, 'serve',
770 r = exchange.unbundle(repo, gen, their_heads, 'serve',
771 proto._client())
771 proto._client())
772 return pushres(r)
772 return pushres(r)
773
773
774 finally:
774 finally:
775 fp.close()
775 fp.close()
776 os.unlink(tempname)
776 os.unlink(tempname)
777 except util.Abort, inst:
778 # The old code we moved used sys.stderr directly.
779 # We did not changed it to minise code change.
780 # This need to be moved to something proper.
781 # Feel free to do it.
782 sys.stderr.write("abort: %s\n" % inst)
783 return pushres(0)
777 except exchange.PushRaced, exc:
784 except exchange.PushRaced, exc:
778 return pusherr(str(exc))
785 return pusherr(str(exc))
@@ -1,2139 +1,2139 b''
1 > do_push()
1 > do_push()
2 > {
2 > {
3 > user=$1
3 > user=$1
4 > shift
4 > shift
5 > echo "Pushing as user $user"
5 > echo "Pushing as user $user"
6 > echo 'hgrc = """'
6 > echo 'hgrc = """'
7 > sed -e 1,2d b/.hg/hgrc | grep -v fakegroups.py
7 > sed -e 1,2d b/.hg/hgrc | grep -v fakegroups.py
8 > echo '"""'
8 > echo '"""'
9 > if test -f acl.config; then
9 > if test -f acl.config; then
10 > echo 'acl.config = """'
10 > echo 'acl.config = """'
11 > cat acl.config
11 > cat acl.config
12 > echo '"""'
12 > echo '"""'
13 > fi
13 > fi
14 > # On AIX /etc/profile sets LOGNAME read-only. So
14 > # On AIX /etc/profile sets LOGNAME read-only. So
15 > # LOGNAME=$user hg --cws a --debug push ../b
15 > # LOGNAME=$user hg --cws a --debug push ../b
16 > # fails with "This variable is read only."
16 > # fails with "This variable is read only."
17 > # Use env to work around this.
17 > # Use env to work around this.
18 > env LOGNAME=$user hg --cwd a --debug push ../b
18 > env LOGNAME=$user hg --cwd a --debug push ../b
19 > hg --cwd b rollback
19 > hg --cwd b rollback
20 > hg --cwd b --quiet tip
20 > hg --cwd b --quiet tip
21 > echo
21 > echo
22 > }
22 > }
23
23
24 > init_config()
24 > init_config()
25 > {
25 > {
26 > cat > fakegroups.py <<EOF
26 > cat > fakegroups.py <<EOF
27 > from hgext import acl
27 > from hgext import acl
28 > def fakegetusers(ui, group):
28 > def fakegetusers(ui, group):
29 > try:
29 > try:
30 > return acl._getusersorig(ui, group)
30 > return acl._getusersorig(ui, group)
31 > except:
31 > except:
32 > return ["fred", "betty"]
32 > return ["fred", "betty"]
33 > acl._getusersorig = acl._getusers
33 > acl._getusersorig = acl._getusers
34 > acl._getusers = fakegetusers
34 > acl._getusers = fakegetusers
35 > EOF
35 > EOF
36 > rm -f acl.config
36 > rm -f acl.config
37 > cat > $config <<EOF
37 > cat > $config <<EOF
38 > [hooks]
38 > [hooks]
39 > pretxnchangegroup.acl = python:hgext.acl.hook
39 > pretxnchangegroup.acl = python:hgext.acl.hook
40 > [acl]
40 > [acl]
41 > sources = push
41 > sources = push
42 > [extensions]
42 > [extensions]
43 > f=`pwd`/fakegroups.py
43 > f=`pwd`/fakegroups.py
44 > EOF
44 > EOF
45 > }
45 > }
46
46
47 $ hg init a
47 $ hg init a
48 $ cd a
48 $ cd a
49 $ mkdir foo foo/Bar quux
49 $ mkdir foo foo/Bar quux
50 $ echo 'in foo' > foo/file.txt
50 $ echo 'in foo' > foo/file.txt
51 $ echo 'in foo/Bar' > foo/Bar/file.txt
51 $ echo 'in foo/Bar' > foo/Bar/file.txt
52 $ echo 'in quux' > quux/file.py
52 $ echo 'in quux' > quux/file.py
53 $ hg add -q
53 $ hg add -q
54 $ hg ci -m 'add files' -d '1000000 0'
54 $ hg ci -m 'add files' -d '1000000 0'
55 $ echo >> foo/file.txt
55 $ echo >> foo/file.txt
56 $ hg ci -m 'change foo/file' -d '1000001 0'
56 $ hg ci -m 'change foo/file' -d '1000001 0'
57 $ echo >> foo/Bar/file.txt
57 $ echo >> foo/Bar/file.txt
58 $ hg ci -m 'change foo/Bar/file' -d '1000002 0'
58 $ hg ci -m 'change foo/Bar/file' -d '1000002 0'
59 $ echo >> quux/file.py
59 $ echo >> quux/file.py
60 $ hg ci -m 'change quux/file' -d '1000003 0'
60 $ hg ci -m 'change quux/file' -d '1000003 0'
61 $ hg tip --quiet
61 $ hg tip --quiet
62 3:911600dab2ae
62 3:911600dab2ae
63
63
64 $ cd ..
64 $ cd ..
65 $ hg clone -r 0 a b
65 $ hg clone -r 0 a b
66 adding changesets
66 adding changesets
67 adding manifests
67 adding manifests
68 adding file changes
68 adding file changes
69 added 1 changesets with 3 changes to 3 files
69 added 1 changesets with 3 changes to 3 files
70 updating to branch default
70 updating to branch default
71 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
72
72
73 $ config=b/.hg/hgrc
73 $ config=b/.hg/hgrc
74
74
75 Extension disabled for lack of a hook
75 Extension disabled for lack of a hook
76
76
77 $ do_push fred
77 $ do_push fred
78 Pushing as user fred
78 Pushing as user fred
79 hgrc = """
79 hgrc = """
80 """
80 """
81 pushing to ../b
81 pushing to ../b
82 query 1; heads
82 query 1; heads
83 searching for changes
83 searching for changes
84 all remote heads known locally
84 all remote heads known locally
85 listing keys for "bookmarks"
85 listing keys for "bookmarks"
86 3 changesets found
86 3 changesets found
87 list of changesets:
87 list of changesets:
88 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
88 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
89 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
89 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
90 911600dab2ae7a9baff75958b84fe606851ce955
90 911600dab2ae7a9baff75958b84fe606851ce955
91 adding changesets
91 adding changesets
92 bundling: 1/3 changesets (33.33%)
92 bundling: 1/3 changesets (33.33%)
93 bundling: 2/3 changesets (66.67%)
93 bundling: 2/3 changesets (66.67%)
94 bundling: 3/3 changesets (100.00%)
94 bundling: 3/3 changesets (100.00%)
95 bundling: 1/3 manifests (33.33%)
95 bundling: 1/3 manifests (33.33%)
96 bundling: 2/3 manifests (66.67%)
96 bundling: 2/3 manifests (66.67%)
97 bundling: 3/3 manifests (100.00%)
97 bundling: 3/3 manifests (100.00%)
98 bundling: foo/Bar/file.txt 1/3 files (33.33%)
98 bundling: foo/Bar/file.txt 1/3 files (33.33%)
99 bundling: foo/file.txt 2/3 files (66.67%)
99 bundling: foo/file.txt 2/3 files (66.67%)
100 bundling: quux/file.py 3/3 files (100.00%)
100 bundling: quux/file.py 3/3 files (100.00%)
101 changesets: 1 chunks
101 changesets: 1 chunks
102 add changeset ef1ea85a6374
102 add changeset ef1ea85a6374
103 changesets: 2 chunks
103 changesets: 2 chunks
104 add changeset f9cafe1212c8
104 add changeset f9cafe1212c8
105 changesets: 3 chunks
105 changesets: 3 chunks
106 add changeset 911600dab2ae
106 add changeset 911600dab2ae
107 adding manifests
107 adding manifests
108 manifests: 1/3 chunks (33.33%)
108 manifests: 1/3 chunks (33.33%)
109 manifests: 2/3 chunks (66.67%)
109 manifests: 2/3 chunks (66.67%)
110 manifests: 3/3 chunks (100.00%)
110 manifests: 3/3 chunks (100.00%)
111 adding file changes
111 adding file changes
112 adding foo/Bar/file.txt revisions
112 adding foo/Bar/file.txt revisions
113 files: 1/3 chunks (33.33%)
113 files: 1/3 chunks (33.33%)
114 adding foo/file.txt revisions
114 adding foo/file.txt revisions
115 files: 2/3 chunks (66.67%)
115 files: 2/3 chunks (66.67%)
116 adding quux/file.py revisions
116 adding quux/file.py revisions
117 files: 3/3 chunks (100.00%)
117 files: 3/3 chunks (100.00%)
118 added 3 changesets with 3 changes to 3 files
118 added 3 changesets with 3 changes to 3 files
119 updating the branch cache
119 listing keys for "phases"
120 listing keys for "phases"
120 try to push obsolete markers to remote
121 try to push obsolete markers to remote
121 updating the branch cache
122 checking for updated bookmarks
122 checking for updated bookmarks
123 listing keys for "bookmarks"
123 listing keys for "bookmarks"
124 repository tip rolled back to revision 0 (undo push)
124 repository tip rolled back to revision 0 (undo push)
125 0:6675d58eff77
125 0:6675d58eff77
126
126
127
127
128 $ echo '[hooks]' >> $config
128 $ echo '[hooks]' >> $config
129 $ echo 'pretxnchangegroup.acl = python:hgext.acl.hook' >> $config
129 $ echo 'pretxnchangegroup.acl = python:hgext.acl.hook' >> $config
130
130
131 Extension disabled for lack of acl.sources
131 Extension disabled for lack of acl.sources
132
132
133 $ do_push fred
133 $ do_push fred
134 Pushing as user fred
134 Pushing as user fred
135 hgrc = """
135 hgrc = """
136 [hooks]
136 [hooks]
137 pretxnchangegroup.acl = python:hgext.acl.hook
137 pretxnchangegroup.acl = python:hgext.acl.hook
138 """
138 """
139 pushing to ../b
139 pushing to ../b
140 query 1; heads
140 query 1; heads
141 searching for changes
141 searching for changes
142 all remote heads known locally
142 all remote heads known locally
143 invalid branchheads cache (served): tip differs
143 invalid branchheads cache (served): tip differs
144 listing keys for "bookmarks"
144 listing keys for "bookmarks"
145 3 changesets found
145 3 changesets found
146 list of changesets:
146 list of changesets:
147 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
147 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
148 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
148 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
149 911600dab2ae7a9baff75958b84fe606851ce955
149 911600dab2ae7a9baff75958b84fe606851ce955
150 adding changesets
150 adding changesets
151 bundling: 1/3 changesets (33.33%)
151 bundling: 1/3 changesets (33.33%)
152 bundling: 2/3 changesets (66.67%)
152 bundling: 2/3 changesets (66.67%)
153 bundling: 3/3 changesets (100.00%)
153 bundling: 3/3 changesets (100.00%)
154 bundling: 1/3 manifests (33.33%)
154 bundling: 1/3 manifests (33.33%)
155 bundling: 2/3 manifests (66.67%)
155 bundling: 2/3 manifests (66.67%)
156 bundling: 3/3 manifests (100.00%)
156 bundling: 3/3 manifests (100.00%)
157 bundling: foo/Bar/file.txt 1/3 files (33.33%)
157 bundling: foo/Bar/file.txt 1/3 files (33.33%)
158 bundling: foo/file.txt 2/3 files (66.67%)
158 bundling: foo/file.txt 2/3 files (66.67%)
159 bundling: quux/file.py 3/3 files (100.00%)
159 bundling: quux/file.py 3/3 files (100.00%)
160 changesets: 1 chunks
160 changesets: 1 chunks
161 add changeset ef1ea85a6374
161 add changeset ef1ea85a6374
162 changesets: 2 chunks
162 changesets: 2 chunks
163 add changeset f9cafe1212c8
163 add changeset f9cafe1212c8
164 changesets: 3 chunks
164 changesets: 3 chunks
165 add changeset 911600dab2ae
165 add changeset 911600dab2ae
166 adding manifests
166 adding manifests
167 manifests: 1/3 chunks (33.33%)
167 manifests: 1/3 chunks (33.33%)
168 manifests: 2/3 chunks (66.67%)
168 manifests: 2/3 chunks (66.67%)
169 manifests: 3/3 chunks (100.00%)
169 manifests: 3/3 chunks (100.00%)
170 adding file changes
170 adding file changes
171 adding foo/Bar/file.txt revisions
171 adding foo/Bar/file.txt revisions
172 files: 1/3 chunks (33.33%)
172 files: 1/3 chunks (33.33%)
173 adding foo/file.txt revisions
173 adding foo/file.txt revisions
174 files: 2/3 chunks (66.67%)
174 files: 2/3 chunks (66.67%)
175 adding quux/file.py revisions
175 adding quux/file.py revisions
176 files: 3/3 chunks (100.00%)
176 files: 3/3 chunks (100.00%)
177 added 3 changesets with 3 changes to 3 files
177 added 3 changesets with 3 changes to 3 files
178 calling hook pretxnchangegroup.acl: hgext.acl.hook
178 calling hook pretxnchangegroup.acl: hgext.acl.hook
179 acl: changes have source "push" - skipping
179 acl: changes have source "push" - skipping
180 updating the branch cache
180 listing keys for "phases"
181 listing keys for "phases"
181 try to push obsolete markers to remote
182 try to push obsolete markers to remote
182 updating the branch cache
183 checking for updated bookmarks
183 checking for updated bookmarks
184 listing keys for "bookmarks"
184 listing keys for "bookmarks"
185 repository tip rolled back to revision 0 (undo push)
185 repository tip rolled back to revision 0 (undo push)
186 0:6675d58eff77
186 0:6675d58eff77
187
187
188
188
189 No [acl.allow]/[acl.deny]
189 No [acl.allow]/[acl.deny]
190
190
191 $ echo '[acl]' >> $config
191 $ echo '[acl]' >> $config
192 $ echo 'sources = push' >> $config
192 $ echo 'sources = push' >> $config
193 $ do_push fred
193 $ do_push fred
194 Pushing as user fred
194 Pushing as user fred
195 hgrc = """
195 hgrc = """
196 [hooks]
196 [hooks]
197 pretxnchangegroup.acl = python:hgext.acl.hook
197 pretxnchangegroup.acl = python:hgext.acl.hook
198 [acl]
198 [acl]
199 sources = push
199 sources = push
200 """
200 """
201 pushing to ../b
201 pushing to ../b
202 query 1; heads
202 query 1; heads
203 searching for changes
203 searching for changes
204 all remote heads known locally
204 all remote heads known locally
205 invalid branchheads cache (served): tip differs
205 invalid branchheads cache (served): tip differs
206 listing keys for "bookmarks"
206 listing keys for "bookmarks"
207 3 changesets found
207 3 changesets found
208 list of changesets:
208 list of changesets:
209 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
209 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
210 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
210 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
211 911600dab2ae7a9baff75958b84fe606851ce955
211 911600dab2ae7a9baff75958b84fe606851ce955
212 adding changesets
212 adding changesets
213 bundling: 1/3 changesets (33.33%)
213 bundling: 1/3 changesets (33.33%)
214 bundling: 2/3 changesets (66.67%)
214 bundling: 2/3 changesets (66.67%)
215 bundling: 3/3 changesets (100.00%)
215 bundling: 3/3 changesets (100.00%)
216 bundling: 1/3 manifests (33.33%)
216 bundling: 1/3 manifests (33.33%)
217 bundling: 2/3 manifests (66.67%)
217 bundling: 2/3 manifests (66.67%)
218 bundling: 3/3 manifests (100.00%)
218 bundling: 3/3 manifests (100.00%)
219 bundling: foo/Bar/file.txt 1/3 files (33.33%)
219 bundling: foo/Bar/file.txt 1/3 files (33.33%)
220 bundling: foo/file.txt 2/3 files (66.67%)
220 bundling: foo/file.txt 2/3 files (66.67%)
221 bundling: quux/file.py 3/3 files (100.00%)
221 bundling: quux/file.py 3/3 files (100.00%)
222 changesets: 1 chunks
222 changesets: 1 chunks
223 add changeset ef1ea85a6374
223 add changeset ef1ea85a6374
224 changesets: 2 chunks
224 changesets: 2 chunks
225 add changeset f9cafe1212c8
225 add changeset f9cafe1212c8
226 changesets: 3 chunks
226 changesets: 3 chunks
227 add changeset 911600dab2ae
227 add changeset 911600dab2ae
228 adding manifests
228 adding manifests
229 manifests: 1/3 chunks (33.33%)
229 manifests: 1/3 chunks (33.33%)
230 manifests: 2/3 chunks (66.67%)
230 manifests: 2/3 chunks (66.67%)
231 manifests: 3/3 chunks (100.00%)
231 manifests: 3/3 chunks (100.00%)
232 adding file changes
232 adding file changes
233 adding foo/Bar/file.txt revisions
233 adding foo/Bar/file.txt revisions
234 files: 1/3 chunks (33.33%)
234 files: 1/3 chunks (33.33%)
235 adding foo/file.txt revisions
235 adding foo/file.txt revisions
236 files: 2/3 chunks (66.67%)
236 files: 2/3 chunks (66.67%)
237 adding quux/file.py revisions
237 adding quux/file.py revisions
238 files: 3/3 chunks (100.00%)
238 files: 3/3 chunks (100.00%)
239 added 3 changesets with 3 changes to 3 files
239 added 3 changesets with 3 changes to 3 files
240 calling hook pretxnchangegroup.acl: hgext.acl.hook
240 calling hook pretxnchangegroup.acl: hgext.acl.hook
241 acl: checking access for user "fred"
241 acl: checking access for user "fred"
242 acl: acl.allow.branches not enabled
242 acl: acl.allow.branches not enabled
243 acl: acl.deny.branches not enabled
243 acl: acl.deny.branches not enabled
244 acl: acl.allow not enabled
244 acl: acl.allow not enabled
245 acl: acl.deny not enabled
245 acl: acl.deny not enabled
246 acl: branch access granted: "ef1ea85a6374" on branch "default"
246 acl: branch access granted: "ef1ea85a6374" on branch "default"
247 acl: path access granted: "ef1ea85a6374"
247 acl: path access granted: "ef1ea85a6374"
248 acl: branch access granted: "f9cafe1212c8" on branch "default"
248 acl: branch access granted: "f9cafe1212c8" on branch "default"
249 acl: path access granted: "f9cafe1212c8"
249 acl: path access granted: "f9cafe1212c8"
250 acl: branch access granted: "911600dab2ae" on branch "default"
250 acl: branch access granted: "911600dab2ae" on branch "default"
251 acl: path access granted: "911600dab2ae"
251 acl: path access granted: "911600dab2ae"
252 updating the branch cache
252 listing keys for "phases"
253 listing keys for "phases"
253 try to push obsolete markers to remote
254 try to push obsolete markers to remote
254 updating the branch cache
255 checking for updated bookmarks
255 checking for updated bookmarks
256 listing keys for "bookmarks"
256 listing keys for "bookmarks"
257 repository tip rolled back to revision 0 (undo push)
257 repository tip rolled back to revision 0 (undo push)
258 0:6675d58eff77
258 0:6675d58eff77
259
259
260
260
261 Empty [acl.allow]
261 Empty [acl.allow]
262
262
263 $ echo '[acl.allow]' >> $config
263 $ echo '[acl.allow]' >> $config
264 $ do_push fred
264 $ do_push fred
265 Pushing as user fred
265 Pushing as user fred
266 hgrc = """
266 hgrc = """
267 [hooks]
267 [hooks]
268 pretxnchangegroup.acl = python:hgext.acl.hook
268 pretxnchangegroup.acl = python:hgext.acl.hook
269 [acl]
269 [acl]
270 sources = push
270 sources = push
271 [acl.allow]
271 [acl.allow]
272 """
272 """
273 pushing to ../b
273 pushing to ../b
274 query 1; heads
274 query 1; heads
275 searching for changes
275 searching for changes
276 all remote heads known locally
276 all remote heads known locally
277 invalid branchheads cache (served): tip differs
277 invalid branchheads cache (served): tip differs
278 listing keys for "bookmarks"
278 listing keys for "bookmarks"
279 3 changesets found
279 3 changesets found
280 list of changesets:
280 list of changesets:
281 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
281 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
282 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
282 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
283 911600dab2ae7a9baff75958b84fe606851ce955
283 911600dab2ae7a9baff75958b84fe606851ce955
284 adding changesets
284 adding changesets
285 bundling: 1/3 changesets (33.33%)
285 bundling: 1/3 changesets (33.33%)
286 bundling: 2/3 changesets (66.67%)
286 bundling: 2/3 changesets (66.67%)
287 bundling: 3/3 changesets (100.00%)
287 bundling: 3/3 changesets (100.00%)
288 bundling: 1/3 manifests (33.33%)
288 bundling: 1/3 manifests (33.33%)
289 bundling: 2/3 manifests (66.67%)
289 bundling: 2/3 manifests (66.67%)
290 bundling: 3/3 manifests (100.00%)
290 bundling: 3/3 manifests (100.00%)
291 bundling: foo/Bar/file.txt 1/3 files (33.33%)
291 bundling: foo/Bar/file.txt 1/3 files (33.33%)
292 bundling: foo/file.txt 2/3 files (66.67%)
292 bundling: foo/file.txt 2/3 files (66.67%)
293 bundling: quux/file.py 3/3 files (100.00%)
293 bundling: quux/file.py 3/3 files (100.00%)
294 changesets: 1 chunks
294 changesets: 1 chunks
295 add changeset ef1ea85a6374
295 add changeset ef1ea85a6374
296 changesets: 2 chunks
296 changesets: 2 chunks
297 add changeset f9cafe1212c8
297 add changeset f9cafe1212c8
298 changesets: 3 chunks
298 changesets: 3 chunks
299 add changeset 911600dab2ae
299 add changeset 911600dab2ae
300 adding manifests
300 adding manifests
301 manifests: 1/3 chunks (33.33%)
301 manifests: 1/3 chunks (33.33%)
302 manifests: 2/3 chunks (66.67%)
302 manifests: 2/3 chunks (66.67%)
303 manifests: 3/3 chunks (100.00%)
303 manifests: 3/3 chunks (100.00%)
304 adding file changes
304 adding file changes
305 adding foo/Bar/file.txt revisions
305 adding foo/Bar/file.txt revisions
306 files: 1/3 chunks (33.33%)
306 files: 1/3 chunks (33.33%)
307 adding foo/file.txt revisions
307 adding foo/file.txt revisions
308 files: 2/3 chunks (66.67%)
308 files: 2/3 chunks (66.67%)
309 adding quux/file.py revisions
309 adding quux/file.py revisions
310 files: 3/3 chunks (100.00%)
310 files: 3/3 chunks (100.00%)
311 added 3 changesets with 3 changes to 3 files
311 added 3 changesets with 3 changes to 3 files
312 calling hook pretxnchangegroup.acl: hgext.acl.hook
312 calling hook pretxnchangegroup.acl: hgext.acl.hook
313 acl: checking access for user "fred"
313 acl: checking access for user "fred"
314 acl: acl.allow.branches not enabled
314 acl: acl.allow.branches not enabled
315 acl: acl.deny.branches not enabled
315 acl: acl.deny.branches not enabled
316 acl: acl.allow enabled, 0 entries for user fred
316 acl: acl.allow enabled, 0 entries for user fred
317 acl: acl.deny not enabled
317 acl: acl.deny not enabled
318 acl: branch access granted: "ef1ea85a6374" on branch "default"
318 acl: branch access granted: "ef1ea85a6374" on branch "default"
319 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
319 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
320 transaction abort!
320 transaction abort!
321 rollback completed
321 rollback completed
322 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
322 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
323 no rollback information available
323 no rollback information available
324 0:6675d58eff77
324 0:6675d58eff77
325
325
326
326
327 fred is allowed inside foo/
327 fred is allowed inside foo/
328
328
329 $ echo 'foo/** = fred' >> $config
329 $ echo 'foo/** = fred' >> $config
330 $ do_push fred
330 $ do_push fred
331 Pushing as user fred
331 Pushing as user fred
332 hgrc = """
332 hgrc = """
333 [hooks]
333 [hooks]
334 pretxnchangegroup.acl = python:hgext.acl.hook
334 pretxnchangegroup.acl = python:hgext.acl.hook
335 [acl]
335 [acl]
336 sources = push
336 sources = push
337 [acl.allow]
337 [acl.allow]
338 foo/** = fred
338 foo/** = fred
339 """
339 """
340 pushing to ../b
340 pushing to ../b
341 query 1; heads
341 query 1; heads
342 searching for changes
342 searching for changes
343 all remote heads known locally
343 all remote heads known locally
344 invalid branchheads cache (served): tip differs
344 invalid branchheads cache (served): tip differs
345 listing keys for "bookmarks"
345 listing keys for "bookmarks"
346 3 changesets found
346 3 changesets found
347 list of changesets:
347 list of changesets:
348 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
348 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
349 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
349 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
350 911600dab2ae7a9baff75958b84fe606851ce955
350 911600dab2ae7a9baff75958b84fe606851ce955
351 adding changesets
351 adding changesets
352 bundling: 1/3 changesets (33.33%)
352 bundling: 1/3 changesets (33.33%)
353 bundling: 2/3 changesets (66.67%)
353 bundling: 2/3 changesets (66.67%)
354 bundling: 3/3 changesets (100.00%)
354 bundling: 3/3 changesets (100.00%)
355 bundling: 1/3 manifests (33.33%)
355 bundling: 1/3 manifests (33.33%)
356 bundling: 2/3 manifests (66.67%)
356 bundling: 2/3 manifests (66.67%)
357 bundling: 3/3 manifests (100.00%)
357 bundling: 3/3 manifests (100.00%)
358 bundling: foo/Bar/file.txt 1/3 files (33.33%)
358 bundling: foo/Bar/file.txt 1/3 files (33.33%)
359 bundling: foo/file.txt 2/3 files (66.67%)
359 bundling: foo/file.txt 2/3 files (66.67%)
360 bundling: quux/file.py 3/3 files (100.00%)
360 bundling: quux/file.py 3/3 files (100.00%)
361 changesets: 1 chunks
361 changesets: 1 chunks
362 add changeset ef1ea85a6374
362 add changeset ef1ea85a6374
363 changesets: 2 chunks
363 changesets: 2 chunks
364 add changeset f9cafe1212c8
364 add changeset f9cafe1212c8
365 changesets: 3 chunks
365 changesets: 3 chunks
366 add changeset 911600dab2ae
366 add changeset 911600dab2ae
367 adding manifests
367 adding manifests
368 manifests: 1/3 chunks (33.33%)
368 manifests: 1/3 chunks (33.33%)
369 manifests: 2/3 chunks (66.67%)
369 manifests: 2/3 chunks (66.67%)
370 manifests: 3/3 chunks (100.00%)
370 manifests: 3/3 chunks (100.00%)
371 adding file changes
371 adding file changes
372 adding foo/Bar/file.txt revisions
372 adding foo/Bar/file.txt revisions
373 files: 1/3 chunks (33.33%)
373 files: 1/3 chunks (33.33%)
374 adding foo/file.txt revisions
374 adding foo/file.txt revisions
375 files: 2/3 chunks (66.67%)
375 files: 2/3 chunks (66.67%)
376 adding quux/file.py revisions
376 adding quux/file.py revisions
377 files: 3/3 chunks (100.00%)
377 files: 3/3 chunks (100.00%)
378 added 3 changesets with 3 changes to 3 files
378 added 3 changesets with 3 changes to 3 files
379 calling hook pretxnchangegroup.acl: hgext.acl.hook
379 calling hook pretxnchangegroup.acl: hgext.acl.hook
380 acl: checking access for user "fred"
380 acl: checking access for user "fred"
381 acl: acl.allow.branches not enabled
381 acl: acl.allow.branches not enabled
382 acl: acl.deny.branches not enabled
382 acl: acl.deny.branches not enabled
383 acl: acl.allow enabled, 1 entries for user fred
383 acl: acl.allow enabled, 1 entries for user fred
384 acl: acl.deny not enabled
384 acl: acl.deny not enabled
385 acl: branch access granted: "ef1ea85a6374" on branch "default"
385 acl: branch access granted: "ef1ea85a6374" on branch "default"
386 acl: path access granted: "ef1ea85a6374"
386 acl: path access granted: "ef1ea85a6374"
387 acl: branch access granted: "f9cafe1212c8" on branch "default"
387 acl: branch access granted: "f9cafe1212c8" on branch "default"
388 acl: path access granted: "f9cafe1212c8"
388 acl: path access granted: "f9cafe1212c8"
389 acl: branch access granted: "911600dab2ae" on branch "default"
389 acl: branch access granted: "911600dab2ae" on branch "default"
390 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
390 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
391 transaction abort!
391 transaction abort!
392 rollback completed
392 rollback completed
393 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
393 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
394 no rollback information available
394 no rollback information available
395 0:6675d58eff77
395 0:6675d58eff77
396
396
397
397
398 Empty [acl.deny]
398 Empty [acl.deny]
399
399
400 $ echo '[acl.deny]' >> $config
400 $ echo '[acl.deny]' >> $config
401 $ do_push barney
401 $ do_push barney
402 Pushing as user barney
402 Pushing as user barney
403 hgrc = """
403 hgrc = """
404 [hooks]
404 [hooks]
405 pretxnchangegroup.acl = python:hgext.acl.hook
405 pretxnchangegroup.acl = python:hgext.acl.hook
406 [acl]
406 [acl]
407 sources = push
407 sources = push
408 [acl.allow]
408 [acl.allow]
409 foo/** = fred
409 foo/** = fred
410 [acl.deny]
410 [acl.deny]
411 """
411 """
412 pushing to ../b
412 pushing to ../b
413 query 1; heads
413 query 1; heads
414 searching for changes
414 searching for changes
415 all remote heads known locally
415 all remote heads known locally
416 invalid branchheads cache (served): tip differs
416 invalid branchheads cache (served): tip differs
417 listing keys for "bookmarks"
417 listing keys for "bookmarks"
418 3 changesets found
418 3 changesets found
419 list of changesets:
419 list of changesets:
420 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
420 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
421 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
421 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
422 911600dab2ae7a9baff75958b84fe606851ce955
422 911600dab2ae7a9baff75958b84fe606851ce955
423 adding changesets
423 adding changesets
424 bundling: 1/3 changesets (33.33%)
424 bundling: 1/3 changesets (33.33%)
425 bundling: 2/3 changesets (66.67%)
425 bundling: 2/3 changesets (66.67%)
426 bundling: 3/3 changesets (100.00%)
426 bundling: 3/3 changesets (100.00%)
427 bundling: 1/3 manifests (33.33%)
427 bundling: 1/3 manifests (33.33%)
428 bundling: 2/3 manifests (66.67%)
428 bundling: 2/3 manifests (66.67%)
429 bundling: 3/3 manifests (100.00%)
429 bundling: 3/3 manifests (100.00%)
430 bundling: foo/Bar/file.txt 1/3 files (33.33%)
430 bundling: foo/Bar/file.txt 1/3 files (33.33%)
431 bundling: foo/file.txt 2/3 files (66.67%)
431 bundling: foo/file.txt 2/3 files (66.67%)
432 bundling: quux/file.py 3/3 files (100.00%)
432 bundling: quux/file.py 3/3 files (100.00%)
433 changesets: 1 chunks
433 changesets: 1 chunks
434 add changeset ef1ea85a6374
434 add changeset ef1ea85a6374
435 changesets: 2 chunks
435 changesets: 2 chunks
436 add changeset f9cafe1212c8
436 add changeset f9cafe1212c8
437 changesets: 3 chunks
437 changesets: 3 chunks
438 add changeset 911600dab2ae
438 add changeset 911600dab2ae
439 adding manifests
439 adding manifests
440 manifests: 1/3 chunks (33.33%)
440 manifests: 1/3 chunks (33.33%)
441 manifests: 2/3 chunks (66.67%)
441 manifests: 2/3 chunks (66.67%)
442 manifests: 3/3 chunks (100.00%)
442 manifests: 3/3 chunks (100.00%)
443 adding file changes
443 adding file changes
444 adding foo/Bar/file.txt revisions
444 adding foo/Bar/file.txt revisions
445 files: 1/3 chunks (33.33%)
445 files: 1/3 chunks (33.33%)
446 adding foo/file.txt revisions
446 adding foo/file.txt revisions
447 files: 2/3 chunks (66.67%)
447 files: 2/3 chunks (66.67%)
448 adding quux/file.py revisions
448 adding quux/file.py revisions
449 files: 3/3 chunks (100.00%)
449 files: 3/3 chunks (100.00%)
450 added 3 changesets with 3 changes to 3 files
450 added 3 changesets with 3 changes to 3 files
451 calling hook pretxnchangegroup.acl: hgext.acl.hook
451 calling hook pretxnchangegroup.acl: hgext.acl.hook
452 acl: checking access for user "barney"
452 acl: checking access for user "barney"
453 acl: acl.allow.branches not enabled
453 acl: acl.allow.branches not enabled
454 acl: acl.deny.branches not enabled
454 acl: acl.deny.branches not enabled
455 acl: acl.allow enabled, 0 entries for user barney
455 acl: acl.allow enabled, 0 entries for user barney
456 acl: acl.deny enabled, 0 entries for user barney
456 acl: acl.deny enabled, 0 entries for user barney
457 acl: branch access granted: "ef1ea85a6374" on branch "default"
457 acl: branch access granted: "ef1ea85a6374" on branch "default"
458 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
458 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
459 transaction abort!
459 transaction abort!
460 rollback completed
460 rollback completed
461 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
461 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
462 no rollback information available
462 no rollback information available
463 0:6675d58eff77
463 0:6675d58eff77
464
464
465
465
466 fred is allowed inside foo/, but not foo/bar/ (case matters)
466 fred is allowed inside foo/, but not foo/bar/ (case matters)
467
467
468 $ echo 'foo/bar/** = fred' >> $config
468 $ echo 'foo/bar/** = fred' >> $config
469 $ do_push fred
469 $ do_push fred
470 Pushing as user fred
470 Pushing as user fred
471 hgrc = """
471 hgrc = """
472 [hooks]
472 [hooks]
473 pretxnchangegroup.acl = python:hgext.acl.hook
473 pretxnchangegroup.acl = python:hgext.acl.hook
474 [acl]
474 [acl]
475 sources = push
475 sources = push
476 [acl.allow]
476 [acl.allow]
477 foo/** = fred
477 foo/** = fred
478 [acl.deny]
478 [acl.deny]
479 foo/bar/** = fred
479 foo/bar/** = fred
480 """
480 """
481 pushing to ../b
481 pushing to ../b
482 query 1; heads
482 query 1; heads
483 searching for changes
483 searching for changes
484 all remote heads known locally
484 all remote heads known locally
485 invalid branchheads cache (served): tip differs
485 invalid branchheads cache (served): tip differs
486 listing keys for "bookmarks"
486 listing keys for "bookmarks"
487 3 changesets found
487 3 changesets found
488 list of changesets:
488 list of changesets:
489 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
489 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
490 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
490 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
491 911600dab2ae7a9baff75958b84fe606851ce955
491 911600dab2ae7a9baff75958b84fe606851ce955
492 adding changesets
492 adding changesets
493 bundling: 1/3 changesets (33.33%)
493 bundling: 1/3 changesets (33.33%)
494 bundling: 2/3 changesets (66.67%)
494 bundling: 2/3 changesets (66.67%)
495 bundling: 3/3 changesets (100.00%)
495 bundling: 3/3 changesets (100.00%)
496 bundling: 1/3 manifests (33.33%)
496 bundling: 1/3 manifests (33.33%)
497 bundling: 2/3 manifests (66.67%)
497 bundling: 2/3 manifests (66.67%)
498 bundling: 3/3 manifests (100.00%)
498 bundling: 3/3 manifests (100.00%)
499 bundling: foo/Bar/file.txt 1/3 files (33.33%)
499 bundling: foo/Bar/file.txt 1/3 files (33.33%)
500 bundling: foo/file.txt 2/3 files (66.67%)
500 bundling: foo/file.txt 2/3 files (66.67%)
501 bundling: quux/file.py 3/3 files (100.00%)
501 bundling: quux/file.py 3/3 files (100.00%)
502 changesets: 1 chunks
502 changesets: 1 chunks
503 add changeset ef1ea85a6374
503 add changeset ef1ea85a6374
504 changesets: 2 chunks
504 changesets: 2 chunks
505 add changeset f9cafe1212c8
505 add changeset f9cafe1212c8
506 changesets: 3 chunks
506 changesets: 3 chunks
507 add changeset 911600dab2ae
507 add changeset 911600dab2ae
508 adding manifests
508 adding manifests
509 manifests: 1/3 chunks (33.33%)
509 manifests: 1/3 chunks (33.33%)
510 manifests: 2/3 chunks (66.67%)
510 manifests: 2/3 chunks (66.67%)
511 manifests: 3/3 chunks (100.00%)
511 manifests: 3/3 chunks (100.00%)
512 adding file changes
512 adding file changes
513 adding foo/Bar/file.txt revisions
513 adding foo/Bar/file.txt revisions
514 files: 1/3 chunks (33.33%)
514 files: 1/3 chunks (33.33%)
515 adding foo/file.txt revisions
515 adding foo/file.txt revisions
516 files: 2/3 chunks (66.67%)
516 files: 2/3 chunks (66.67%)
517 adding quux/file.py revisions
517 adding quux/file.py revisions
518 files: 3/3 chunks (100.00%)
518 files: 3/3 chunks (100.00%)
519 added 3 changesets with 3 changes to 3 files
519 added 3 changesets with 3 changes to 3 files
520 calling hook pretxnchangegroup.acl: hgext.acl.hook
520 calling hook pretxnchangegroup.acl: hgext.acl.hook
521 acl: checking access for user "fred"
521 acl: checking access for user "fred"
522 acl: acl.allow.branches not enabled
522 acl: acl.allow.branches not enabled
523 acl: acl.deny.branches not enabled
523 acl: acl.deny.branches not enabled
524 acl: acl.allow enabled, 1 entries for user fred
524 acl: acl.allow enabled, 1 entries for user fred
525 acl: acl.deny enabled, 1 entries for user fred
525 acl: acl.deny enabled, 1 entries for user fred
526 acl: branch access granted: "ef1ea85a6374" on branch "default"
526 acl: branch access granted: "ef1ea85a6374" on branch "default"
527 acl: path access granted: "ef1ea85a6374"
527 acl: path access granted: "ef1ea85a6374"
528 acl: branch access granted: "f9cafe1212c8" on branch "default"
528 acl: branch access granted: "f9cafe1212c8" on branch "default"
529 acl: path access granted: "f9cafe1212c8"
529 acl: path access granted: "f9cafe1212c8"
530 acl: branch access granted: "911600dab2ae" on branch "default"
530 acl: branch access granted: "911600dab2ae" on branch "default"
531 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
531 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
532 transaction abort!
532 transaction abort!
533 rollback completed
533 rollback completed
534 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
534 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
535 no rollback information available
535 no rollback information available
536 0:6675d58eff77
536 0:6675d58eff77
537
537
538
538
539 fred is allowed inside foo/, but not foo/Bar/
539 fred is allowed inside foo/, but not foo/Bar/
540
540
541 $ echo 'foo/Bar/** = fred' >> $config
541 $ echo 'foo/Bar/** = fred' >> $config
542 $ do_push fred
542 $ do_push fred
543 Pushing as user fred
543 Pushing as user fred
544 hgrc = """
544 hgrc = """
545 [hooks]
545 [hooks]
546 pretxnchangegroup.acl = python:hgext.acl.hook
546 pretxnchangegroup.acl = python:hgext.acl.hook
547 [acl]
547 [acl]
548 sources = push
548 sources = push
549 [acl.allow]
549 [acl.allow]
550 foo/** = fred
550 foo/** = fred
551 [acl.deny]
551 [acl.deny]
552 foo/bar/** = fred
552 foo/bar/** = fred
553 foo/Bar/** = fred
553 foo/Bar/** = fred
554 """
554 """
555 pushing to ../b
555 pushing to ../b
556 query 1; heads
556 query 1; heads
557 searching for changes
557 searching for changes
558 all remote heads known locally
558 all remote heads known locally
559 invalid branchheads cache (served): tip differs
559 invalid branchheads cache (served): tip differs
560 listing keys for "bookmarks"
560 listing keys for "bookmarks"
561 3 changesets found
561 3 changesets found
562 list of changesets:
562 list of changesets:
563 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
563 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
564 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
564 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
565 911600dab2ae7a9baff75958b84fe606851ce955
565 911600dab2ae7a9baff75958b84fe606851ce955
566 adding changesets
566 adding changesets
567 bundling: 1/3 changesets (33.33%)
567 bundling: 1/3 changesets (33.33%)
568 bundling: 2/3 changesets (66.67%)
568 bundling: 2/3 changesets (66.67%)
569 bundling: 3/3 changesets (100.00%)
569 bundling: 3/3 changesets (100.00%)
570 bundling: 1/3 manifests (33.33%)
570 bundling: 1/3 manifests (33.33%)
571 bundling: 2/3 manifests (66.67%)
571 bundling: 2/3 manifests (66.67%)
572 bundling: 3/3 manifests (100.00%)
572 bundling: 3/3 manifests (100.00%)
573 bundling: foo/Bar/file.txt 1/3 files (33.33%)
573 bundling: foo/Bar/file.txt 1/3 files (33.33%)
574 bundling: foo/file.txt 2/3 files (66.67%)
574 bundling: foo/file.txt 2/3 files (66.67%)
575 bundling: quux/file.py 3/3 files (100.00%)
575 bundling: quux/file.py 3/3 files (100.00%)
576 changesets: 1 chunks
576 changesets: 1 chunks
577 add changeset ef1ea85a6374
577 add changeset ef1ea85a6374
578 changesets: 2 chunks
578 changesets: 2 chunks
579 add changeset f9cafe1212c8
579 add changeset f9cafe1212c8
580 changesets: 3 chunks
580 changesets: 3 chunks
581 add changeset 911600dab2ae
581 add changeset 911600dab2ae
582 adding manifests
582 adding manifests
583 manifests: 1/3 chunks (33.33%)
583 manifests: 1/3 chunks (33.33%)
584 manifests: 2/3 chunks (66.67%)
584 manifests: 2/3 chunks (66.67%)
585 manifests: 3/3 chunks (100.00%)
585 manifests: 3/3 chunks (100.00%)
586 adding file changes
586 adding file changes
587 adding foo/Bar/file.txt revisions
587 adding foo/Bar/file.txt revisions
588 files: 1/3 chunks (33.33%)
588 files: 1/3 chunks (33.33%)
589 adding foo/file.txt revisions
589 adding foo/file.txt revisions
590 files: 2/3 chunks (66.67%)
590 files: 2/3 chunks (66.67%)
591 adding quux/file.py revisions
591 adding quux/file.py revisions
592 files: 3/3 chunks (100.00%)
592 files: 3/3 chunks (100.00%)
593 added 3 changesets with 3 changes to 3 files
593 added 3 changesets with 3 changes to 3 files
594 calling hook pretxnchangegroup.acl: hgext.acl.hook
594 calling hook pretxnchangegroup.acl: hgext.acl.hook
595 acl: checking access for user "fred"
595 acl: checking access for user "fred"
596 acl: acl.allow.branches not enabled
596 acl: acl.allow.branches not enabled
597 acl: acl.deny.branches not enabled
597 acl: acl.deny.branches not enabled
598 acl: acl.allow enabled, 1 entries for user fred
598 acl: acl.allow enabled, 1 entries for user fred
599 acl: acl.deny enabled, 2 entries for user fred
599 acl: acl.deny enabled, 2 entries for user fred
600 acl: branch access granted: "ef1ea85a6374" on branch "default"
600 acl: branch access granted: "ef1ea85a6374" on branch "default"
601 acl: path access granted: "ef1ea85a6374"
601 acl: path access granted: "ef1ea85a6374"
602 acl: branch access granted: "f9cafe1212c8" on branch "default"
602 acl: branch access granted: "f9cafe1212c8" on branch "default"
603 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
603 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
604 transaction abort!
604 transaction abort!
605 rollback completed
605 rollback completed
606 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
606 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
607 no rollback information available
607 no rollback information available
608 0:6675d58eff77
608 0:6675d58eff77
609
609
610
610
611 $ echo 'barney is not mentioned => not allowed anywhere'
611 $ echo 'barney is not mentioned => not allowed anywhere'
612 barney is not mentioned => not allowed anywhere
612 barney is not mentioned => not allowed anywhere
613 $ do_push barney
613 $ do_push barney
614 Pushing as user barney
614 Pushing as user barney
615 hgrc = """
615 hgrc = """
616 [hooks]
616 [hooks]
617 pretxnchangegroup.acl = python:hgext.acl.hook
617 pretxnchangegroup.acl = python:hgext.acl.hook
618 [acl]
618 [acl]
619 sources = push
619 sources = push
620 [acl.allow]
620 [acl.allow]
621 foo/** = fred
621 foo/** = fred
622 [acl.deny]
622 [acl.deny]
623 foo/bar/** = fred
623 foo/bar/** = fred
624 foo/Bar/** = fred
624 foo/Bar/** = fred
625 """
625 """
626 pushing to ../b
626 pushing to ../b
627 query 1; heads
627 query 1; heads
628 searching for changes
628 searching for changes
629 all remote heads known locally
629 all remote heads known locally
630 invalid branchheads cache (served): tip differs
630 invalid branchheads cache (served): tip differs
631 listing keys for "bookmarks"
631 listing keys for "bookmarks"
632 3 changesets found
632 3 changesets found
633 list of changesets:
633 list of changesets:
634 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
634 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
635 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
635 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
636 911600dab2ae7a9baff75958b84fe606851ce955
636 911600dab2ae7a9baff75958b84fe606851ce955
637 adding changesets
637 adding changesets
638 bundling: 1/3 changesets (33.33%)
638 bundling: 1/3 changesets (33.33%)
639 bundling: 2/3 changesets (66.67%)
639 bundling: 2/3 changesets (66.67%)
640 bundling: 3/3 changesets (100.00%)
640 bundling: 3/3 changesets (100.00%)
641 bundling: 1/3 manifests (33.33%)
641 bundling: 1/3 manifests (33.33%)
642 bundling: 2/3 manifests (66.67%)
642 bundling: 2/3 manifests (66.67%)
643 bundling: 3/3 manifests (100.00%)
643 bundling: 3/3 manifests (100.00%)
644 bundling: foo/Bar/file.txt 1/3 files (33.33%)
644 bundling: foo/Bar/file.txt 1/3 files (33.33%)
645 bundling: foo/file.txt 2/3 files (66.67%)
645 bundling: foo/file.txt 2/3 files (66.67%)
646 bundling: quux/file.py 3/3 files (100.00%)
646 bundling: quux/file.py 3/3 files (100.00%)
647 changesets: 1 chunks
647 changesets: 1 chunks
648 add changeset ef1ea85a6374
648 add changeset ef1ea85a6374
649 changesets: 2 chunks
649 changesets: 2 chunks
650 add changeset f9cafe1212c8
650 add changeset f9cafe1212c8
651 changesets: 3 chunks
651 changesets: 3 chunks
652 add changeset 911600dab2ae
652 add changeset 911600dab2ae
653 adding manifests
653 adding manifests
654 manifests: 1/3 chunks (33.33%)
654 manifests: 1/3 chunks (33.33%)
655 manifests: 2/3 chunks (66.67%)
655 manifests: 2/3 chunks (66.67%)
656 manifests: 3/3 chunks (100.00%)
656 manifests: 3/3 chunks (100.00%)
657 adding file changes
657 adding file changes
658 adding foo/Bar/file.txt revisions
658 adding foo/Bar/file.txt revisions
659 files: 1/3 chunks (33.33%)
659 files: 1/3 chunks (33.33%)
660 adding foo/file.txt revisions
660 adding foo/file.txt revisions
661 files: 2/3 chunks (66.67%)
661 files: 2/3 chunks (66.67%)
662 adding quux/file.py revisions
662 adding quux/file.py revisions
663 files: 3/3 chunks (100.00%)
663 files: 3/3 chunks (100.00%)
664 added 3 changesets with 3 changes to 3 files
664 added 3 changesets with 3 changes to 3 files
665 calling hook pretxnchangegroup.acl: hgext.acl.hook
665 calling hook pretxnchangegroup.acl: hgext.acl.hook
666 acl: checking access for user "barney"
666 acl: checking access for user "barney"
667 acl: acl.allow.branches not enabled
667 acl: acl.allow.branches not enabled
668 acl: acl.deny.branches not enabled
668 acl: acl.deny.branches not enabled
669 acl: acl.allow enabled, 0 entries for user barney
669 acl: acl.allow enabled, 0 entries for user barney
670 acl: acl.deny enabled, 0 entries for user barney
670 acl: acl.deny enabled, 0 entries for user barney
671 acl: branch access granted: "ef1ea85a6374" on branch "default"
671 acl: branch access granted: "ef1ea85a6374" on branch "default"
672 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
672 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
673 transaction abort!
673 transaction abort!
674 rollback completed
674 rollback completed
675 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
675 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
676 no rollback information available
676 no rollback information available
677 0:6675d58eff77
677 0:6675d58eff77
678
678
679
679
680 barney is allowed everywhere
680 barney is allowed everywhere
681
681
682 $ echo '[acl.allow]' >> $config
682 $ echo '[acl.allow]' >> $config
683 $ echo '** = barney' >> $config
683 $ echo '** = barney' >> $config
684 $ do_push barney
684 $ do_push barney
685 Pushing as user barney
685 Pushing as user barney
686 hgrc = """
686 hgrc = """
687 [hooks]
687 [hooks]
688 pretxnchangegroup.acl = python:hgext.acl.hook
688 pretxnchangegroup.acl = python:hgext.acl.hook
689 [acl]
689 [acl]
690 sources = push
690 sources = push
691 [acl.allow]
691 [acl.allow]
692 foo/** = fred
692 foo/** = fred
693 [acl.deny]
693 [acl.deny]
694 foo/bar/** = fred
694 foo/bar/** = fred
695 foo/Bar/** = fred
695 foo/Bar/** = fred
696 [acl.allow]
696 [acl.allow]
697 ** = barney
697 ** = barney
698 """
698 """
699 pushing to ../b
699 pushing to ../b
700 query 1; heads
700 query 1; heads
701 searching for changes
701 searching for changes
702 all remote heads known locally
702 all remote heads known locally
703 invalid branchheads cache (served): tip differs
703 invalid branchheads cache (served): tip differs
704 listing keys for "bookmarks"
704 listing keys for "bookmarks"
705 3 changesets found
705 3 changesets found
706 list of changesets:
706 list of changesets:
707 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
707 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
708 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
708 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
709 911600dab2ae7a9baff75958b84fe606851ce955
709 911600dab2ae7a9baff75958b84fe606851ce955
710 adding changesets
710 adding changesets
711 bundling: 1/3 changesets (33.33%)
711 bundling: 1/3 changesets (33.33%)
712 bundling: 2/3 changesets (66.67%)
712 bundling: 2/3 changesets (66.67%)
713 bundling: 3/3 changesets (100.00%)
713 bundling: 3/3 changesets (100.00%)
714 bundling: 1/3 manifests (33.33%)
714 bundling: 1/3 manifests (33.33%)
715 bundling: 2/3 manifests (66.67%)
715 bundling: 2/3 manifests (66.67%)
716 bundling: 3/3 manifests (100.00%)
716 bundling: 3/3 manifests (100.00%)
717 bundling: foo/Bar/file.txt 1/3 files (33.33%)
717 bundling: foo/Bar/file.txt 1/3 files (33.33%)
718 bundling: foo/file.txt 2/3 files (66.67%)
718 bundling: foo/file.txt 2/3 files (66.67%)
719 bundling: quux/file.py 3/3 files (100.00%)
719 bundling: quux/file.py 3/3 files (100.00%)
720 changesets: 1 chunks
720 changesets: 1 chunks
721 add changeset ef1ea85a6374
721 add changeset ef1ea85a6374
722 changesets: 2 chunks
722 changesets: 2 chunks
723 add changeset f9cafe1212c8
723 add changeset f9cafe1212c8
724 changesets: 3 chunks
724 changesets: 3 chunks
725 add changeset 911600dab2ae
725 add changeset 911600dab2ae
726 adding manifests
726 adding manifests
727 manifests: 1/3 chunks (33.33%)
727 manifests: 1/3 chunks (33.33%)
728 manifests: 2/3 chunks (66.67%)
728 manifests: 2/3 chunks (66.67%)
729 manifests: 3/3 chunks (100.00%)
729 manifests: 3/3 chunks (100.00%)
730 adding file changes
730 adding file changes
731 adding foo/Bar/file.txt revisions
731 adding foo/Bar/file.txt revisions
732 files: 1/3 chunks (33.33%)
732 files: 1/3 chunks (33.33%)
733 adding foo/file.txt revisions
733 adding foo/file.txt revisions
734 files: 2/3 chunks (66.67%)
734 files: 2/3 chunks (66.67%)
735 adding quux/file.py revisions
735 adding quux/file.py revisions
736 files: 3/3 chunks (100.00%)
736 files: 3/3 chunks (100.00%)
737 added 3 changesets with 3 changes to 3 files
737 added 3 changesets with 3 changes to 3 files
738 calling hook pretxnchangegroup.acl: hgext.acl.hook
738 calling hook pretxnchangegroup.acl: hgext.acl.hook
739 acl: checking access for user "barney"
739 acl: checking access for user "barney"
740 acl: acl.allow.branches not enabled
740 acl: acl.allow.branches not enabled
741 acl: acl.deny.branches not enabled
741 acl: acl.deny.branches not enabled
742 acl: acl.allow enabled, 1 entries for user barney
742 acl: acl.allow enabled, 1 entries for user barney
743 acl: acl.deny enabled, 0 entries for user barney
743 acl: acl.deny enabled, 0 entries for user barney
744 acl: branch access granted: "ef1ea85a6374" on branch "default"
744 acl: branch access granted: "ef1ea85a6374" on branch "default"
745 acl: path access granted: "ef1ea85a6374"
745 acl: path access granted: "ef1ea85a6374"
746 acl: branch access granted: "f9cafe1212c8" on branch "default"
746 acl: branch access granted: "f9cafe1212c8" on branch "default"
747 acl: path access granted: "f9cafe1212c8"
747 acl: path access granted: "f9cafe1212c8"
748 acl: branch access granted: "911600dab2ae" on branch "default"
748 acl: branch access granted: "911600dab2ae" on branch "default"
749 acl: path access granted: "911600dab2ae"
749 acl: path access granted: "911600dab2ae"
750 updating the branch cache
750 listing keys for "phases"
751 listing keys for "phases"
751 try to push obsolete markers to remote
752 try to push obsolete markers to remote
752 updating the branch cache
753 checking for updated bookmarks
753 checking for updated bookmarks
754 listing keys for "bookmarks"
754 listing keys for "bookmarks"
755 repository tip rolled back to revision 0 (undo push)
755 repository tip rolled back to revision 0 (undo push)
756 0:6675d58eff77
756 0:6675d58eff77
757
757
758
758
759 wilma can change files with a .txt extension
759 wilma can change files with a .txt extension
760
760
761 $ echo '**/*.txt = wilma' >> $config
761 $ echo '**/*.txt = wilma' >> $config
762 $ do_push wilma
762 $ do_push wilma
763 Pushing as user wilma
763 Pushing as user wilma
764 hgrc = """
764 hgrc = """
765 [hooks]
765 [hooks]
766 pretxnchangegroup.acl = python:hgext.acl.hook
766 pretxnchangegroup.acl = python:hgext.acl.hook
767 [acl]
767 [acl]
768 sources = push
768 sources = push
769 [acl.allow]
769 [acl.allow]
770 foo/** = fred
770 foo/** = fred
771 [acl.deny]
771 [acl.deny]
772 foo/bar/** = fred
772 foo/bar/** = fred
773 foo/Bar/** = fred
773 foo/Bar/** = fred
774 [acl.allow]
774 [acl.allow]
775 ** = barney
775 ** = barney
776 **/*.txt = wilma
776 **/*.txt = wilma
777 """
777 """
778 pushing to ../b
778 pushing to ../b
779 query 1; heads
779 query 1; heads
780 searching for changes
780 searching for changes
781 all remote heads known locally
781 all remote heads known locally
782 invalid branchheads cache (served): tip differs
782 invalid branchheads cache (served): tip differs
783 listing keys for "bookmarks"
783 listing keys for "bookmarks"
784 3 changesets found
784 3 changesets found
785 list of changesets:
785 list of changesets:
786 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
786 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
787 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
787 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
788 911600dab2ae7a9baff75958b84fe606851ce955
788 911600dab2ae7a9baff75958b84fe606851ce955
789 adding changesets
789 adding changesets
790 bundling: 1/3 changesets (33.33%)
790 bundling: 1/3 changesets (33.33%)
791 bundling: 2/3 changesets (66.67%)
791 bundling: 2/3 changesets (66.67%)
792 bundling: 3/3 changesets (100.00%)
792 bundling: 3/3 changesets (100.00%)
793 bundling: 1/3 manifests (33.33%)
793 bundling: 1/3 manifests (33.33%)
794 bundling: 2/3 manifests (66.67%)
794 bundling: 2/3 manifests (66.67%)
795 bundling: 3/3 manifests (100.00%)
795 bundling: 3/3 manifests (100.00%)
796 bundling: foo/Bar/file.txt 1/3 files (33.33%)
796 bundling: foo/Bar/file.txt 1/3 files (33.33%)
797 bundling: foo/file.txt 2/3 files (66.67%)
797 bundling: foo/file.txt 2/3 files (66.67%)
798 bundling: quux/file.py 3/3 files (100.00%)
798 bundling: quux/file.py 3/3 files (100.00%)
799 changesets: 1 chunks
799 changesets: 1 chunks
800 add changeset ef1ea85a6374
800 add changeset ef1ea85a6374
801 changesets: 2 chunks
801 changesets: 2 chunks
802 add changeset f9cafe1212c8
802 add changeset f9cafe1212c8
803 changesets: 3 chunks
803 changesets: 3 chunks
804 add changeset 911600dab2ae
804 add changeset 911600dab2ae
805 adding manifests
805 adding manifests
806 manifests: 1/3 chunks (33.33%)
806 manifests: 1/3 chunks (33.33%)
807 manifests: 2/3 chunks (66.67%)
807 manifests: 2/3 chunks (66.67%)
808 manifests: 3/3 chunks (100.00%)
808 manifests: 3/3 chunks (100.00%)
809 adding file changes
809 adding file changes
810 adding foo/Bar/file.txt revisions
810 adding foo/Bar/file.txt revisions
811 files: 1/3 chunks (33.33%)
811 files: 1/3 chunks (33.33%)
812 adding foo/file.txt revisions
812 adding foo/file.txt revisions
813 files: 2/3 chunks (66.67%)
813 files: 2/3 chunks (66.67%)
814 adding quux/file.py revisions
814 adding quux/file.py revisions
815 files: 3/3 chunks (100.00%)
815 files: 3/3 chunks (100.00%)
816 added 3 changesets with 3 changes to 3 files
816 added 3 changesets with 3 changes to 3 files
817 calling hook pretxnchangegroup.acl: hgext.acl.hook
817 calling hook pretxnchangegroup.acl: hgext.acl.hook
818 acl: checking access for user "wilma"
818 acl: checking access for user "wilma"
819 acl: acl.allow.branches not enabled
819 acl: acl.allow.branches not enabled
820 acl: acl.deny.branches not enabled
820 acl: acl.deny.branches not enabled
821 acl: acl.allow enabled, 1 entries for user wilma
821 acl: acl.allow enabled, 1 entries for user wilma
822 acl: acl.deny enabled, 0 entries for user wilma
822 acl: acl.deny enabled, 0 entries for user wilma
823 acl: branch access granted: "ef1ea85a6374" on branch "default"
823 acl: branch access granted: "ef1ea85a6374" on branch "default"
824 acl: path access granted: "ef1ea85a6374"
824 acl: path access granted: "ef1ea85a6374"
825 acl: branch access granted: "f9cafe1212c8" on branch "default"
825 acl: branch access granted: "f9cafe1212c8" on branch "default"
826 acl: path access granted: "f9cafe1212c8"
826 acl: path access granted: "f9cafe1212c8"
827 acl: branch access granted: "911600dab2ae" on branch "default"
827 acl: branch access granted: "911600dab2ae" on branch "default"
828 error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
828 error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
829 transaction abort!
829 transaction abort!
830 rollback completed
830 rollback completed
831 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
831 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
832 no rollback information available
832 no rollback information available
833 0:6675d58eff77
833 0:6675d58eff77
834
834
835
835
836 file specified by acl.config does not exist
836 file specified by acl.config does not exist
837
837
838 $ echo '[acl]' >> $config
838 $ echo '[acl]' >> $config
839 $ echo 'config = ../acl.config' >> $config
839 $ echo 'config = ../acl.config' >> $config
840 $ do_push barney
840 $ do_push barney
841 Pushing as user barney
841 Pushing as user barney
842 hgrc = """
842 hgrc = """
843 [hooks]
843 [hooks]
844 pretxnchangegroup.acl = python:hgext.acl.hook
844 pretxnchangegroup.acl = python:hgext.acl.hook
845 [acl]
845 [acl]
846 sources = push
846 sources = push
847 [acl.allow]
847 [acl.allow]
848 foo/** = fred
848 foo/** = fred
849 [acl.deny]
849 [acl.deny]
850 foo/bar/** = fred
850 foo/bar/** = fred
851 foo/Bar/** = fred
851 foo/Bar/** = fred
852 [acl.allow]
852 [acl.allow]
853 ** = barney
853 ** = barney
854 **/*.txt = wilma
854 **/*.txt = wilma
855 [acl]
855 [acl]
856 config = ../acl.config
856 config = ../acl.config
857 """
857 """
858 pushing to ../b
858 pushing to ../b
859 query 1; heads
859 query 1; heads
860 searching for changes
860 searching for changes
861 all remote heads known locally
861 all remote heads known locally
862 invalid branchheads cache (served): tip differs
862 invalid branchheads cache (served): tip differs
863 listing keys for "bookmarks"
863 listing keys for "bookmarks"
864 3 changesets found
864 3 changesets found
865 list of changesets:
865 list of changesets:
866 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
866 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
867 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
867 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
868 911600dab2ae7a9baff75958b84fe606851ce955
868 911600dab2ae7a9baff75958b84fe606851ce955
869 adding changesets
869 adding changesets
870 bundling: 1/3 changesets (33.33%)
870 bundling: 1/3 changesets (33.33%)
871 bundling: 2/3 changesets (66.67%)
871 bundling: 2/3 changesets (66.67%)
872 bundling: 3/3 changesets (100.00%)
872 bundling: 3/3 changesets (100.00%)
873 bundling: 1/3 manifests (33.33%)
873 bundling: 1/3 manifests (33.33%)
874 bundling: 2/3 manifests (66.67%)
874 bundling: 2/3 manifests (66.67%)
875 bundling: 3/3 manifests (100.00%)
875 bundling: 3/3 manifests (100.00%)
876 bundling: foo/Bar/file.txt 1/3 files (33.33%)
876 bundling: foo/Bar/file.txt 1/3 files (33.33%)
877 bundling: foo/file.txt 2/3 files (66.67%)
877 bundling: foo/file.txt 2/3 files (66.67%)
878 bundling: quux/file.py 3/3 files (100.00%)
878 bundling: quux/file.py 3/3 files (100.00%)
879 changesets: 1 chunks
879 changesets: 1 chunks
880 add changeset ef1ea85a6374
880 add changeset ef1ea85a6374
881 changesets: 2 chunks
881 changesets: 2 chunks
882 add changeset f9cafe1212c8
882 add changeset f9cafe1212c8
883 changesets: 3 chunks
883 changesets: 3 chunks
884 add changeset 911600dab2ae
884 add changeset 911600dab2ae
885 adding manifests
885 adding manifests
886 manifests: 1/3 chunks (33.33%)
886 manifests: 1/3 chunks (33.33%)
887 manifests: 2/3 chunks (66.67%)
887 manifests: 2/3 chunks (66.67%)
888 manifests: 3/3 chunks (100.00%)
888 manifests: 3/3 chunks (100.00%)
889 adding file changes
889 adding file changes
890 adding foo/Bar/file.txt revisions
890 adding foo/Bar/file.txt revisions
891 files: 1/3 chunks (33.33%)
891 files: 1/3 chunks (33.33%)
892 adding foo/file.txt revisions
892 adding foo/file.txt revisions
893 files: 2/3 chunks (66.67%)
893 files: 2/3 chunks (66.67%)
894 adding quux/file.py revisions
894 adding quux/file.py revisions
895 files: 3/3 chunks (100.00%)
895 files: 3/3 chunks (100.00%)
896 added 3 changesets with 3 changes to 3 files
896 added 3 changesets with 3 changes to 3 files
897 calling hook pretxnchangegroup.acl: hgext.acl.hook
897 calling hook pretxnchangegroup.acl: hgext.acl.hook
898 acl: checking access for user "barney"
898 acl: checking access for user "barney"
899 error: pretxnchangegroup.acl hook raised an exception: [Errno *] *: '../acl.config' (glob)
899 error: pretxnchangegroup.acl hook raised an exception: [Errno *] *: '../acl.config' (glob)
900 transaction abort!
900 transaction abort!
901 rollback completed
901 rollback completed
902 abort: *: ../acl.config (glob)
902 abort: *: ../acl.config (glob)
903 no rollback information available
903 no rollback information available
904 0:6675d58eff77
904 0:6675d58eff77
905
905
906
906
907 betty is allowed inside foo/ by a acl.config file
907 betty is allowed inside foo/ by a acl.config file
908
908
909 $ echo '[acl.allow]' >> acl.config
909 $ echo '[acl.allow]' >> acl.config
910 $ echo 'foo/** = betty' >> acl.config
910 $ echo 'foo/** = betty' >> acl.config
911 $ do_push betty
911 $ do_push betty
912 Pushing as user betty
912 Pushing as user betty
913 hgrc = """
913 hgrc = """
914 [hooks]
914 [hooks]
915 pretxnchangegroup.acl = python:hgext.acl.hook
915 pretxnchangegroup.acl = python:hgext.acl.hook
916 [acl]
916 [acl]
917 sources = push
917 sources = push
918 [acl.allow]
918 [acl.allow]
919 foo/** = fred
919 foo/** = fred
920 [acl.deny]
920 [acl.deny]
921 foo/bar/** = fred
921 foo/bar/** = fred
922 foo/Bar/** = fred
922 foo/Bar/** = fred
923 [acl.allow]
923 [acl.allow]
924 ** = barney
924 ** = barney
925 **/*.txt = wilma
925 **/*.txt = wilma
926 [acl]
926 [acl]
927 config = ../acl.config
927 config = ../acl.config
928 """
928 """
929 acl.config = """
929 acl.config = """
930 [acl.allow]
930 [acl.allow]
931 foo/** = betty
931 foo/** = betty
932 """
932 """
933 pushing to ../b
933 pushing to ../b
934 query 1; heads
934 query 1; heads
935 searching for changes
935 searching for changes
936 all remote heads known locally
936 all remote heads known locally
937 invalid branchheads cache (served): tip differs
937 invalid branchheads cache (served): tip differs
938 listing keys for "bookmarks"
938 listing keys for "bookmarks"
939 3 changesets found
939 3 changesets found
940 list of changesets:
940 list of changesets:
941 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
941 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
942 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
942 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
943 911600dab2ae7a9baff75958b84fe606851ce955
943 911600dab2ae7a9baff75958b84fe606851ce955
944 adding changesets
944 adding changesets
945 bundling: 1/3 changesets (33.33%)
945 bundling: 1/3 changesets (33.33%)
946 bundling: 2/3 changesets (66.67%)
946 bundling: 2/3 changesets (66.67%)
947 bundling: 3/3 changesets (100.00%)
947 bundling: 3/3 changesets (100.00%)
948 bundling: 1/3 manifests (33.33%)
948 bundling: 1/3 manifests (33.33%)
949 bundling: 2/3 manifests (66.67%)
949 bundling: 2/3 manifests (66.67%)
950 bundling: 3/3 manifests (100.00%)
950 bundling: 3/3 manifests (100.00%)
951 bundling: foo/Bar/file.txt 1/3 files (33.33%)
951 bundling: foo/Bar/file.txt 1/3 files (33.33%)
952 bundling: foo/file.txt 2/3 files (66.67%)
952 bundling: foo/file.txt 2/3 files (66.67%)
953 bundling: quux/file.py 3/3 files (100.00%)
953 bundling: quux/file.py 3/3 files (100.00%)
954 changesets: 1 chunks
954 changesets: 1 chunks
955 add changeset ef1ea85a6374
955 add changeset ef1ea85a6374
956 changesets: 2 chunks
956 changesets: 2 chunks
957 add changeset f9cafe1212c8
957 add changeset f9cafe1212c8
958 changesets: 3 chunks
958 changesets: 3 chunks
959 add changeset 911600dab2ae
959 add changeset 911600dab2ae
960 adding manifests
960 adding manifests
961 manifests: 1/3 chunks (33.33%)
961 manifests: 1/3 chunks (33.33%)
962 manifests: 2/3 chunks (66.67%)
962 manifests: 2/3 chunks (66.67%)
963 manifests: 3/3 chunks (100.00%)
963 manifests: 3/3 chunks (100.00%)
964 adding file changes
964 adding file changes
965 adding foo/Bar/file.txt revisions
965 adding foo/Bar/file.txt revisions
966 files: 1/3 chunks (33.33%)
966 files: 1/3 chunks (33.33%)
967 adding foo/file.txt revisions
967 adding foo/file.txt revisions
968 files: 2/3 chunks (66.67%)
968 files: 2/3 chunks (66.67%)
969 adding quux/file.py revisions
969 adding quux/file.py revisions
970 files: 3/3 chunks (100.00%)
970 files: 3/3 chunks (100.00%)
971 added 3 changesets with 3 changes to 3 files
971 added 3 changesets with 3 changes to 3 files
972 calling hook pretxnchangegroup.acl: hgext.acl.hook
972 calling hook pretxnchangegroup.acl: hgext.acl.hook
973 acl: checking access for user "betty"
973 acl: checking access for user "betty"
974 acl: acl.allow.branches not enabled
974 acl: acl.allow.branches not enabled
975 acl: acl.deny.branches not enabled
975 acl: acl.deny.branches not enabled
976 acl: acl.allow enabled, 1 entries for user betty
976 acl: acl.allow enabled, 1 entries for user betty
977 acl: acl.deny enabled, 0 entries for user betty
977 acl: acl.deny enabled, 0 entries for user betty
978 acl: branch access granted: "ef1ea85a6374" on branch "default"
978 acl: branch access granted: "ef1ea85a6374" on branch "default"
979 acl: path access granted: "ef1ea85a6374"
979 acl: path access granted: "ef1ea85a6374"
980 acl: branch access granted: "f9cafe1212c8" on branch "default"
980 acl: branch access granted: "f9cafe1212c8" on branch "default"
981 acl: path access granted: "f9cafe1212c8"
981 acl: path access granted: "f9cafe1212c8"
982 acl: branch access granted: "911600dab2ae" on branch "default"
982 acl: branch access granted: "911600dab2ae" on branch "default"
983 error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
983 error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
984 transaction abort!
984 transaction abort!
985 rollback completed
985 rollback completed
986 abort: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
986 abort: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
987 no rollback information available
987 no rollback information available
988 0:6675d58eff77
988 0:6675d58eff77
989
989
990
990
991 acl.config can set only [acl.allow]/[acl.deny]
991 acl.config can set only [acl.allow]/[acl.deny]
992
992
993 $ echo '[hooks]' >> acl.config
993 $ echo '[hooks]' >> acl.config
994 $ echo 'changegroup.acl = false' >> acl.config
994 $ echo 'changegroup.acl = false' >> acl.config
995 $ do_push barney
995 $ do_push barney
996 Pushing as user barney
996 Pushing as user barney
997 hgrc = """
997 hgrc = """
998 [hooks]
998 [hooks]
999 pretxnchangegroup.acl = python:hgext.acl.hook
999 pretxnchangegroup.acl = python:hgext.acl.hook
1000 [acl]
1000 [acl]
1001 sources = push
1001 sources = push
1002 [acl.allow]
1002 [acl.allow]
1003 foo/** = fred
1003 foo/** = fred
1004 [acl.deny]
1004 [acl.deny]
1005 foo/bar/** = fred
1005 foo/bar/** = fred
1006 foo/Bar/** = fred
1006 foo/Bar/** = fred
1007 [acl.allow]
1007 [acl.allow]
1008 ** = barney
1008 ** = barney
1009 **/*.txt = wilma
1009 **/*.txt = wilma
1010 [acl]
1010 [acl]
1011 config = ../acl.config
1011 config = ../acl.config
1012 """
1012 """
1013 acl.config = """
1013 acl.config = """
1014 [acl.allow]
1014 [acl.allow]
1015 foo/** = betty
1015 foo/** = betty
1016 [hooks]
1016 [hooks]
1017 changegroup.acl = false
1017 changegroup.acl = false
1018 """
1018 """
1019 pushing to ../b
1019 pushing to ../b
1020 query 1; heads
1020 query 1; heads
1021 searching for changes
1021 searching for changes
1022 all remote heads known locally
1022 all remote heads known locally
1023 invalid branchheads cache (served): tip differs
1023 invalid branchheads cache (served): tip differs
1024 listing keys for "bookmarks"
1024 listing keys for "bookmarks"
1025 3 changesets found
1025 3 changesets found
1026 list of changesets:
1026 list of changesets:
1027 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1027 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1028 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1028 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1029 911600dab2ae7a9baff75958b84fe606851ce955
1029 911600dab2ae7a9baff75958b84fe606851ce955
1030 adding changesets
1030 adding changesets
1031 bundling: 1/3 changesets (33.33%)
1031 bundling: 1/3 changesets (33.33%)
1032 bundling: 2/3 changesets (66.67%)
1032 bundling: 2/3 changesets (66.67%)
1033 bundling: 3/3 changesets (100.00%)
1033 bundling: 3/3 changesets (100.00%)
1034 bundling: 1/3 manifests (33.33%)
1034 bundling: 1/3 manifests (33.33%)
1035 bundling: 2/3 manifests (66.67%)
1035 bundling: 2/3 manifests (66.67%)
1036 bundling: 3/3 manifests (100.00%)
1036 bundling: 3/3 manifests (100.00%)
1037 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1037 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1038 bundling: foo/file.txt 2/3 files (66.67%)
1038 bundling: foo/file.txt 2/3 files (66.67%)
1039 bundling: quux/file.py 3/3 files (100.00%)
1039 bundling: quux/file.py 3/3 files (100.00%)
1040 changesets: 1 chunks
1040 changesets: 1 chunks
1041 add changeset ef1ea85a6374
1041 add changeset ef1ea85a6374
1042 changesets: 2 chunks
1042 changesets: 2 chunks
1043 add changeset f9cafe1212c8
1043 add changeset f9cafe1212c8
1044 changesets: 3 chunks
1044 changesets: 3 chunks
1045 add changeset 911600dab2ae
1045 add changeset 911600dab2ae
1046 adding manifests
1046 adding manifests
1047 manifests: 1/3 chunks (33.33%)
1047 manifests: 1/3 chunks (33.33%)
1048 manifests: 2/3 chunks (66.67%)
1048 manifests: 2/3 chunks (66.67%)
1049 manifests: 3/3 chunks (100.00%)
1049 manifests: 3/3 chunks (100.00%)
1050 adding file changes
1050 adding file changes
1051 adding foo/Bar/file.txt revisions
1051 adding foo/Bar/file.txt revisions
1052 files: 1/3 chunks (33.33%)
1052 files: 1/3 chunks (33.33%)
1053 adding foo/file.txt revisions
1053 adding foo/file.txt revisions
1054 files: 2/3 chunks (66.67%)
1054 files: 2/3 chunks (66.67%)
1055 adding quux/file.py revisions
1055 adding quux/file.py revisions
1056 files: 3/3 chunks (100.00%)
1056 files: 3/3 chunks (100.00%)
1057 added 3 changesets with 3 changes to 3 files
1057 added 3 changesets with 3 changes to 3 files
1058 calling hook pretxnchangegroup.acl: hgext.acl.hook
1058 calling hook pretxnchangegroup.acl: hgext.acl.hook
1059 acl: checking access for user "barney"
1059 acl: checking access for user "barney"
1060 acl: acl.allow.branches not enabled
1060 acl: acl.allow.branches not enabled
1061 acl: acl.deny.branches not enabled
1061 acl: acl.deny.branches not enabled
1062 acl: acl.allow enabled, 1 entries for user barney
1062 acl: acl.allow enabled, 1 entries for user barney
1063 acl: acl.deny enabled, 0 entries for user barney
1063 acl: acl.deny enabled, 0 entries for user barney
1064 acl: branch access granted: "ef1ea85a6374" on branch "default"
1064 acl: branch access granted: "ef1ea85a6374" on branch "default"
1065 acl: path access granted: "ef1ea85a6374"
1065 acl: path access granted: "ef1ea85a6374"
1066 acl: branch access granted: "f9cafe1212c8" on branch "default"
1066 acl: branch access granted: "f9cafe1212c8" on branch "default"
1067 acl: path access granted: "f9cafe1212c8"
1067 acl: path access granted: "f9cafe1212c8"
1068 acl: branch access granted: "911600dab2ae" on branch "default"
1068 acl: branch access granted: "911600dab2ae" on branch "default"
1069 acl: path access granted: "911600dab2ae"
1069 acl: path access granted: "911600dab2ae"
1070 updating the branch cache
1070 listing keys for "phases"
1071 listing keys for "phases"
1071 try to push obsolete markers to remote
1072 try to push obsolete markers to remote
1072 updating the branch cache
1073 checking for updated bookmarks
1073 checking for updated bookmarks
1074 listing keys for "bookmarks"
1074 listing keys for "bookmarks"
1075 repository tip rolled back to revision 0 (undo push)
1075 repository tip rolled back to revision 0 (undo push)
1076 0:6675d58eff77
1076 0:6675d58eff77
1077
1077
1078
1078
1079 asterisk
1079 asterisk
1080
1080
1081 $ init_config
1081 $ init_config
1082
1082
1083 asterisk test
1083 asterisk test
1084
1084
1085 $ echo '[acl.allow]' >> $config
1085 $ echo '[acl.allow]' >> $config
1086 $ echo "** = fred" >> $config
1086 $ echo "** = fred" >> $config
1087
1087
1088 fred is always allowed
1088 fred is always allowed
1089
1089
1090 $ do_push fred
1090 $ do_push fred
1091 Pushing as user fred
1091 Pushing as user fred
1092 hgrc = """
1092 hgrc = """
1093 [acl]
1093 [acl]
1094 sources = push
1094 sources = push
1095 [extensions]
1095 [extensions]
1096 [acl.allow]
1096 [acl.allow]
1097 ** = fred
1097 ** = fred
1098 """
1098 """
1099 pushing to ../b
1099 pushing to ../b
1100 query 1; heads
1100 query 1; heads
1101 searching for changes
1101 searching for changes
1102 all remote heads known locally
1102 all remote heads known locally
1103 invalid branchheads cache (served): tip differs
1103 invalid branchheads cache (served): tip differs
1104 listing keys for "bookmarks"
1104 listing keys for "bookmarks"
1105 3 changesets found
1105 3 changesets found
1106 list of changesets:
1106 list of changesets:
1107 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1107 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1108 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1108 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1109 911600dab2ae7a9baff75958b84fe606851ce955
1109 911600dab2ae7a9baff75958b84fe606851ce955
1110 adding changesets
1110 adding changesets
1111 bundling: 1/3 changesets (33.33%)
1111 bundling: 1/3 changesets (33.33%)
1112 bundling: 2/3 changesets (66.67%)
1112 bundling: 2/3 changesets (66.67%)
1113 bundling: 3/3 changesets (100.00%)
1113 bundling: 3/3 changesets (100.00%)
1114 bundling: 1/3 manifests (33.33%)
1114 bundling: 1/3 manifests (33.33%)
1115 bundling: 2/3 manifests (66.67%)
1115 bundling: 2/3 manifests (66.67%)
1116 bundling: 3/3 manifests (100.00%)
1116 bundling: 3/3 manifests (100.00%)
1117 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1117 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1118 bundling: foo/file.txt 2/3 files (66.67%)
1118 bundling: foo/file.txt 2/3 files (66.67%)
1119 bundling: quux/file.py 3/3 files (100.00%)
1119 bundling: quux/file.py 3/3 files (100.00%)
1120 changesets: 1 chunks
1120 changesets: 1 chunks
1121 add changeset ef1ea85a6374
1121 add changeset ef1ea85a6374
1122 changesets: 2 chunks
1122 changesets: 2 chunks
1123 add changeset f9cafe1212c8
1123 add changeset f9cafe1212c8
1124 changesets: 3 chunks
1124 changesets: 3 chunks
1125 add changeset 911600dab2ae
1125 add changeset 911600dab2ae
1126 adding manifests
1126 adding manifests
1127 manifests: 1/3 chunks (33.33%)
1127 manifests: 1/3 chunks (33.33%)
1128 manifests: 2/3 chunks (66.67%)
1128 manifests: 2/3 chunks (66.67%)
1129 manifests: 3/3 chunks (100.00%)
1129 manifests: 3/3 chunks (100.00%)
1130 adding file changes
1130 adding file changes
1131 adding foo/Bar/file.txt revisions
1131 adding foo/Bar/file.txt revisions
1132 files: 1/3 chunks (33.33%)
1132 files: 1/3 chunks (33.33%)
1133 adding foo/file.txt revisions
1133 adding foo/file.txt revisions
1134 files: 2/3 chunks (66.67%)
1134 files: 2/3 chunks (66.67%)
1135 adding quux/file.py revisions
1135 adding quux/file.py revisions
1136 files: 3/3 chunks (100.00%)
1136 files: 3/3 chunks (100.00%)
1137 added 3 changesets with 3 changes to 3 files
1137 added 3 changesets with 3 changes to 3 files
1138 calling hook pretxnchangegroup.acl: hgext.acl.hook
1138 calling hook pretxnchangegroup.acl: hgext.acl.hook
1139 acl: checking access for user "fred"
1139 acl: checking access for user "fred"
1140 acl: acl.allow.branches not enabled
1140 acl: acl.allow.branches not enabled
1141 acl: acl.deny.branches not enabled
1141 acl: acl.deny.branches not enabled
1142 acl: acl.allow enabled, 1 entries for user fred
1142 acl: acl.allow enabled, 1 entries for user fred
1143 acl: acl.deny not enabled
1143 acl: acl.deny not enabled
1144 acl: branch access granted: "ef1ea85a6374" on branch "default"
1144 acl: branch access granted: "ef1ea85a6374" on branch "default"
1145 acl: path access granted: "ef1ea85a6374"
1145 acl: path access granted: "ef1ea85a6374"
1146 acl: branch access granted: "f9cafe1212c8" on branch "default"
1146 acl: branch access granted: "f9cafe1212c8" on branch "default"
1147 acl: path access granted: "f9cafe1212c8"
1147 acl: path access granted: "f9cafe1212c8"
1148 acl: branch access granted: "911600dab2ae" on branch "default"
1148 acl: branch access granted: "911600dab2ae" on branch "default"
1149 acl: path access granted: "911600dab2ae"
1149 acl: path access granted: "911600dab2ae"
1150 updating the branch cache
1150 listing keys for "phases"
1151 listing keys for "phases"
1151 try to push obsolete markers to remote
1152 try to push obsolete markers to remote
1152 updating the branch cache
1153 checking for updated bookmarks
1153 checking for updated bookmarks
1154 listing keys for "bookmarks"
1154 listing keys for "bookmarks"
1155 repository tip rolled back to revision 0 (undo push)
1155 repository tip rolled back to revision 0 (undo push)
1156 0:6675d58eff77
1156 0:6675d58eff77
1157
1157
1158
1158
1159 $ echo '[acl.deny]' >> $config
1159 $ echo '[acl.deny]' >> $config
1160 $ echo "foo/Bar/** = *" >> $config
1160 $ echo "foo/Bar/** = *" >> $config
1161
1161
1162 no one is allowed inside foo/Bar/
1162 no one is allowed inside foo/Bar/
1163
1163
1164 $ do_push fred
1164 $ do_push fred
1165 Pushing as user fred
1165 Pushing as user fred
1166 hgrc = """
1166 hgrc = """
1167 [acl]
1167 [acl]
1168 sources = push
1168 sources = push
1169 [extensions]
1169 [extensions]
1170 [acl.allow]
1170 [acl.allow]
1171 ** = fred
1171 ** = fred
1172 [acl.deny]
1172 [acl.deny]
1173 foo/Bar/** = *
1173 foo/Bar/** = *
1174 """
1174 """
1175 pushing to ../b
1175 pushing to ../b
1176 query 1; heads
1176 query 1; heads
1177 searching for changes
1177 searching for changes
1178 all remote heads known locally
1178 all remote heads known locally
1179 invalid branchheads cache (served): tip differs
1179 invalid branchheads cache (served): tip differs
1180 listing keys for "bookmarks"
1180 listing keys for "bookmarks"
1181 3 changesets found
1181 3 changesets found
1182 list of changesets:
1182 list of changesets:
1183 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1183 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1184 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1184 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1185 911600dab2ae7a9baff75958b84fe606851ce955
1185 911600dab2ae7a9baff75958b84fe606851ce955
1186 adding changesets
1186 adding changesets
1187 bundling: 1/3 changesets (33.33%)
1187 bundling: 1/3 changesets (33.33%)
1188 bundling: 2/3 changesets (66.67%)
1188 bundling: 2/3 changesets (66.67%)
1189 bundling: 3/3 changesets (100.00%)
1189 bundling: 3/3 changesets (100.00%)
1190 bundling: 1/3 manifests (33.33%)
1190 bundling: 1/3 manifests (33.33%)
1191 bundling: 2/3 manifests (66.67%)
1191 bundling: 2/3 manifests (66.67%)
1192 bundling: 3/3 manifests (100.00%)
1192 bundling: 3/3 manifests (100.00%)
1193 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1193 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1194 bundling: foo/file.txt 2/3 files (66.67%)
1194 bundling: foo/file.txt 2/3 files (66.67%)
1195 bundling: quux/file.py 3/3 files (100.00%)
1195 bundling: quux/file.py 3/3 files (100.00%)
1196 changesets: 1 chunks
1196 changesets: 1 chunks
1197 add changeset ef1ea85a6374
1197 add changeset ef1ea85a6374
1198 changesets: 2 chunks
1198 changesets: 2 chunks
1199 add changeset f9cafe1212c8
1199 add changeset f9cafe1212c8
1200 changesets: 3 chunks
1200 changesets: 3 chunks
1201 add changeset 911600dab2ae
1201 add changeset 911600dab2ae
1202 adding manifests
1202 adding manifests
1203 manifests: 1/3 chunks (33.33%)
1203 manifests: 1/3 chunks (33.33%)
1204 manifests: 2/3 chunks (66.67%)
1204 manifests: 2/3 chunks (66.67%)
1205 manifests: 3/3 chunks (100.00%)
1205 manifests: 3/3 chunks (100.00%)
1206 adding file changes
1206 adding file changes
1207 adding foo/Bar/file.txt revisions
1207 adding foo/Bar/file.txt revisions
1208 files: 1/3 chunks (33.33%)
1208 files: 1/3 chunks (33.33%)
1209 adding foo/file.txt revisions
1209 adding foo/file.txt revisions
1210 files: 2/3 chunks (66.67%)
1210 files: 2/3 chunks (66.67%)
1211 adding quux/file.py revisions
1211 adding quux/file.py revisions
1212 files: 3/3 chunks (100.00%)
1212 files: 3/3 chunks (100.00%)
1213 added 3 changesets with 3 changes to 3 files
1213 added 3 changesets with 3 changes to 3 files
1214 calling hook pretxnchangegroup.acl: hgext.acl.hook
1214 calling hook pretxnchangegroup.acl: hgext.acl.hook
1215 acl: checking access for user "fred"
1215 acl: checking access for user "fred"
1216 acl: acl.allow.branches not enabled
1216 acl: acl.allow.branches not enabled
1217 acl: acl.deny.branches not enabled
1217 acl: acl.deny.branches not enabled
1218 acl: acl.allow enabled, 1 entries for user fred
1218 acl: acl.allow enabled, 1 entries for user fred
1219 acl: acl.deny enabled, 1 entries for user fred
1219 acl: acl.deny enabled, 1 entries for user fred
1220 acl: branch access granted: "ef1ea85a6374" on branch "default"
1220 acl: branch access granted: "ef1ea85a6374" on branch "default"
1221 acl: path access granted: "ef1ea85a6374"
1221 acl: path access granted: "ef1ea85a6374"
1222 acl: branch access granted: "f9cafe1212c8" on branch "default"
1222 acl: branch access granted: "f9cafe1212c8" on branch "default"
1223 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1223 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1224 transaction abort!
1224 transaction abort!
1225 rollback completed
1225 rollback completed
1226 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1226 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1227 no rollback information available
1227 no rollback information available
1228 0:6675d58eff77
1228 0:6675d58eff77
1229
1229
1230
1230
1231 Groups
1231 Groups
1232
1232
1233 $ init_config
1233 $ init_config
1234
1234
1235 OS-level groups
1235 OS-level groups
1236
1236
1237 $ echo '[acl.allow]' >> $config
1237 $ echo '[acl.allow]' >> $config
1238 $ echo "** = @group1" >> $config
1238 $ echo "** = @group1" >> $config
1239
1239
1240 @group1 is always allowed
1240 @group1 is always allowed
1241
1241
1242 $ do_push fred
1242 $ do_push fred
1243 Pushing as user fred
1243 Pushing as user fred
1244 hgrc = """
1244 hgrc = """
1245 [acl]
1245 [acl]
1246 sources = push
1246 sources = push
1247 [extensions]
1247 [extensions]
1248 [acl.allow]
1248 [acl.allow]
1249 ** = @group1
1249 ** = @group1
1250 """
1250 """
1251 pushing to ../b
1251 pushing to ../b
1252 query 1; heads
1252 query 1; heads
1253 searching for changes
1253 searching for changes
1254 all remote heads known locally
1254 all remote heads known locally
1255 invalid branchheads cache (served): tip differs
1255 invalid branchheads cache (served): tip differs
1256 listing keys for "bookmarks"
1256 listing keys for "bookmarks"
1257 3 changesets found
1257 3 changesets found
1258 list of changesets:
1258 list of changesets:
1259 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1259 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1260 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1260 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1261 911600dab2ae7a9baff75958b84fe606851ce955
1261 911600dab2ae7a9baff75958b84fe606851ce955
1262 adding changesets
1262 adding changesets
1263 bundling: 1/3 changesets (33.33%)
1263 bundling: 1/3 changesets (33.33%)
1264 bundling: 2/3 changesets (66.67%)
1264 bundling: 2/3 changesets (66.67%)
1265 bundling: 3/3 changesets (100.00%)
1265 bundling: 3/3 changesets (100.00%)
1266 bundling: 1/3 manifests (33.33%)
1266 bundling: 1/3 manifests (33.33%)
1267 bundling: 2/3 manifests (66.67%)
1267 bundling: 2/3 manifests (66.67%)
1268 bundling: 3/3 manifests (100.00%)
1268 bundling: 3/3 manifests (100.00%)
1269 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1269 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1270 bundling: foo/file.txt 2/3 files (66.67%)
1270 bundling: foo/file.txt 2/3 files (66.67%)
1271 bundling: quux/file.py 3/3 files (100.00%)
1271 bundling: quux/file.py 3/3 files (100.00%)
1272 changesets: 1 chunks
1272 changesets: 1 chunks
1273 add changeset ef1ea85a6374
1273 add changeset ef1ea85a6374
1274 changesets: 2 chunks
1274 changesets: 2 chunks
1275 add changeset f9cafe1212c8
1275 add changeset f9cafe1212c8
1276 changesets: 3 chunks
1276 changesets: 3 chunks
1277 add changeset 911600dab2ae
1277 add changeset 911600dab2ae
1278 adding manifests
1278 adding manifests
1279 manifests: 1/3 chunks (33.33%)
1279 manifests: 1/3 chunks (33.33%)
1280 manifests: 2/3 chunks (66.67%)
1280 manifests: 2/3 chunks (66.67%)
1281 manifests: 3/3 chunks (100.00%)
1281 manifests: 3/3 chunks (100.00%)
1282 adding file changes
1282 adding file changes
1283 adding foo/Bar/file.txt revisions
1283 adding foo/Bar/file.txt revisions
1284 files: 1/3 chunks (33.33%)
1284 files: 1/3 chunks (33.33%)
1285 adding foo/file.txt revisions
1285 adding foo/file.txt revisions
1286 files: 2/3 chunks (66.67%)
1286 files: 2/3 chunks (66.67%)
1287 adding quux/file.py revisions
1287 adding quux/file.py revisions
1288 files: 3/3 chunks (100.00%)
1288 files: 3/3 chunks (100.00%)
1289 added 3 changesets with 3 changes to 3 files
1289 added 3 changesets with 3 changes to 3 files
1290 calling hook pretxnchangegroup.acl: hgext.acl.hook
1290 calling hook pretxnchangegroup.acl: hgext.acl.hook
1291 acl: checking access for user "fred"
1291 acl: checking access for user "fred"
1292 acl: acl.allow.branches not enabled
1292 acl: acl.allow.branches not enabled
1293 acl: acl.deny.branches not enabled
1293 acl: acl.deny.branches not enabled
1294 acl: "group1" not defined in [acl.groups]
1294 acl: "group1" not defined in [acl.groups]
1295 acl: acl.allow enabled, 1 entries for user fred
1295 acl: acl.allow enabled, 1 entries for user fred
1296 acl: acl.deny not enabled
1296 acl: acl.deny not enabled
1297 acl: branch access granted: "ef1ea85a6374" on branch "default"
1297 acl: branch access granted: "ef1ea85a6374" on branch "default"
1298 acl: path access granted: "ef1ea85a6374"
1298 acl: path access granted: "ef1ea85a6374"
1299 acl: branch access granted: "f9cafe1212c8" on branch "default"
1299 acl: branch access granted: "f9cafe1212c8" on branch "default"
1300 acl: path access granted: "f9cafe1212c8"
1300 acl: path access granted: "f9cafe1212c8"
1301 acl: branch access granted: "911600dab2ae" on branch "default"
1301 acl: branch access granted: "911600dab2ae" on branch "default"
1302 acl: path access granted: "911600dab2ae"
1302 acl: path access granted: "911600dab2ae"
1303 updating the branch cache
1303 listing keys for "phases"
1304 listing keys for "phases"
1304 try to push obsolete markers to remote
1305 try to push obsolete markers to remote
1305 updating the branch cache
1306 checking for updated bookmarks
1306 checking for updated bookmarks
1307 listing keys for "bookmarks"
1307 listing keys for "bookmarks"
1308 repository tip rolled back to revision 0 (undo push)
1308 repository tip rolled back to revision 0 (undo push)
1309 0:6675d58eff77
1309 0:6675d58eff77
1310
1310
1311
1311
1312 $ echo '[acl.deny]' >> $config
1312 $ echo '[acl.deny]' >> $config
1313 $ echo "foo/Bar/** = @group1" >> $config
1313 $ echo "foo/Bar/** = @group1" >> $config
1314
1314
1315 @group is allowed inside anything but foo/Bar/
1315 @group is allowed inside anything but foo/Bar/
1316
1316
1317 $ do_push fred
1317 $ do_push fred
1318 Pushing as user fred
1318 Pushing as user fred
1319 hgrc = """
1319 hgrc = """
1320 [acl]
1320 [acl]
1321 sources = push
1321 sources = push
1322 [extensions]
1322 [extensions]
1323 [acl.allow]
1323 [acl.allow]
1324 ** = @group1
1324 ** = @group1
1325 [acl.deny]
1325 [acl.deny]
1326 foo/Bar/** = @group1
1326 foo/Bar/** = @group1
1327 """
1327 """
1328 pushing to ../b
1328 pushing to ../b
1329 query 1; heads
1329 query 1; heads
1330 searching for changes
1330 searching for changes
1331 all remote heads known locally
1331 all remote heads known locally
1332 invalid branchheads cache (served): tip differs
1332 invalid branchheads cache (served): tip differs
1333 listing keys for "bookmarks"
1333 listing keys for "bookmarks"
1334 3 changesets found
1334 3 changesets found
1335 list of changesets:
1335 list of changesets:
1336 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1336 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1337 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1337 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1338 911600dab2ae7a9baff75958b84fe606851ce955
1338 911600dab2ae7a9baff75958b84fe606851ce955
1339 adding changesets
1339 adding changesets
1340 bundling: 1/3 changesets (33.33%)
1340 bundling: 1/3 changesets (33.33%)
1341 bundling: 2/3 changesets (66.67%)
1341 bundling: 2/3 changesets (66.67%)
1342 bundling: 3/3 changesets (100.00%)
1342 bundling: 3/3 changesets (100.00%)
1343 bundling: 1/3 manifests (33.33%)
1343 bundling: 1/3 manifests (33.33%)
1344 bundling: 2/3 manifests (66.67%)
1344 bundling: 2/3 manifests (66.67%)
1345 bundling: 3/3 manifests (100.00%)
1345 bundling: 3/3 manifests (100.00%)
1346 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1346 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1347 bundling: foo/file.txt 2/3 files (66.67%)
1347 bundling: foo/file.txt 2/3 files (66.67%)
1348 bundling: quux/file.py 3/3 files (100.00%)
1348 bundling: quux/file.py 3/3 files (100.00%)
1349 changesets: 1 chunks
1349 changesets: 1 chunks
1350 add changeset ef1ea85a6374
1350 add changeset ef1ea85a6374
1351 changesets: 2 chunks
1351 changesets: 2 chunks
1352 add changeset f9cafe1212c8
1352 add changeset f9cafe1212c8
1353 changesets: 3 chunks
1353 changesets: 3 chunks
1354 add changeset 911600dab2ae
1354 add changeset 911600dab2ae
1355 adding manifests
1355 adding manifests
1356 manifests: 1/3 chunks (33.33%)
1356 manifests: 1/3 chunks (33.33%)
1357 manifests: 2/3 chunks (66.67%)
1357 manifests: 2/3 chunks (66.67%)
1358 manifests: 3/3 chunks (100.00%)
1358 manifests: 3/3 chunks (100.00%)
1359 adding file changes
1359 adding file changes
1360 adding foo/Bar/file.txt revisions
1360 adding foo/Bar/file.txt revisions
1361 files: 1/3 chunks (33.33%)
1361 files: 1/3 chunks (33.33%)
1362 adding foo/file.txt revisions
1362 adding foo/file.txt revisions
1363 files: 2/3 chunks (66.67%)
1363 files: 2/3 chunks (66.67%)
1364 adding quux/file.py revisions
1364 adding quux/file.py revisions
1365 files: 3/3 chunks (100.00%)
1365 files: 3/3 chunks (100.00%)
1366 added 3 changesets with 3 changes to 3 files
1366 added 3 changesets with 3 changes to 3 files
1367 calling hook pretxnchangegroup.acl: hgext.acl.hook
1367 calling hook pretxnchangegroup.acl: hgext.acl.hook
1368 acl: checking access for user "fred"
1368 acl: checking access for user "fred"
1369 acl: acl.allow.branches not enabled
1369 acl: acl.allow.branches not enabled
1370 acl: acl.deny.branches not enabled
1370 acl: acl.deny.branches not enabled
1371 acl: "group1" not defined in [acl.groups]
1371 acl: "group1" not defined in [acl.groups]
1372 acl: acl.allow enabled, 1 entries for user fred
1372 acl: acl.allow enabled, 1 entries for user fred
1373 acl: "group1" not defined in [acl.groups]
1373 acl: "group1" not defined in [acl.groups]
1374 acl: acl.deny enabled, 1 entries for user fred
1374 acl: acl.deny enabled, 1 entries for user fred
1375 acl: branch access granted: "ef1ea85a6374" on branch "default"
1375 acl: branch access granted: "ef1ea85a6374" on branch "default"
1376 acl: path access granted: "ef1ea85a6374"
1376 acl: path access granted: "ef1ea85a6374"
1377 acl: branch access granted: "f9cafe1212c8" on branch "default"
1377 acl: branch access granted: "f9cafe1212c8" on branch "default"
1378 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1378 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1379 transaction abort!
1379 transaction abort!
1380 rollback completed
1380 rollback completed
1381 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1381 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1382 no rollback information available
1382 no rollback information available
1383 0:6675d58eff77
1383 0:6675d58eff77
1384
1384
1385
1385
1386 Invalid group
1386 Invalid group
1387
1387
1388 Disable the fakegroups trick to get real failures
1388 Disable the fakegroups trick to get real failures
1389
1389
1390 $ grep -v fakegroups $config > config.tmp
1390 $ grep -v fakegroups $config > config.tmp
1391 $ mv config.tmp $config
1391 $ mv config.tmp $config
1392 $ echo '[acl.allow]' >> $config
1392 $ echo '[acl.allow]' >> $config
1393 $ echo "** = @unlikelytoexist" >> $config
1393 $ echo "** = @unlikelytoexist" >> $config
1394 $ do_push fred 2>&1 | grep unlikelytoexist
1394 $ do_push fred 2>&1 | grep unlikelytoexist
1395 ** = @unlikelytoexist
1395 ** = @unlikelytoexist
1396 acl: "unlikelytoexist" not defined in [acl.groups]
1396 acl: "unlikelytoexist" not defined in [acl.groups]
1397 error: pretxnchangegroup.acl hook failed: group 'unlikelytoexist' is undefined
1397 error: pretxnchangegroup.acl hook failed: group 'unlikelytoexist' is undefined
1398 abort: group 'unlikelytoexist' is undefined
1398 abort: group 'unlikelytoexist' is undefined
1399
1399
1400
1400
1401 Branch acl tests setup
1401 Branch acl tests setup
1402
1402
1403 $ init_config
1403 $ init_config
1404 $ cd b
1404 $ cd b
1405 $ hg up
1405 $ hg up
1406 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1406 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1407 $ hg branch foobar
1407 $ hg branch foobar
1408 marked working directory as branch foobar
1408 marked working directory as branch foobar
1409 (branches are permanent and global, did you want a bookmark?)
1409 (branches are permanent and global, did you want a bookmark?)
1410 $ hg commit -m 'create foobar'
1410 $ hg commit -m 'create foobar'
1411 $ echo 'foo contents' > abc.txt
1411 $ echo 'foo contents' > abc.txt
1412 $ hg add abc.txt
1412 $ hg add abc.txt
1413 $ hg commit -m 'foobar contents'
1413 $ hg commit -m 'foobar contents'
1414 $ cd ..
1414 $ cd ..
1415 $ hg --cwd a pull ../b
1415 $ hg --cwd a pull ../b
1416 pulling from ../b
1416 pulling from ../b
1417 searching for changes
1417 searching for changes
1418 adding changesets
1418 adding changesets
1419 adding manifests
1419 adding manifests
1420 adding file changes
1420 adding file changes
1421 added 2 changesets with 1 changes to 1 files (+1 heads)
1421 added 2 changesets with 1 changes to 1 files (+1 heads)
1422 (run 'hg heads' to see heads)
1422 (run 'hg heads' to see heads)
1423
1423
1424 Create additional changeset on foobar branch
1424 Create additional changeset on foobar branch
1425
1425
1426 $ cd a
1426 $ cd a
1427 $ hg up -C foobar
1427 $ hg up -C foobar
1428 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1428 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1429 $ echo 'foo contents2' > abc.txt
1429 $ echo 'foo contents2' > abc.txt
1430 $ hg commit -m 'foobar contents2'
1430 $ hg commit -m 'foobar contents2'
1431 $ cd ..
1431 $ cd ..
1432
1432
1433
1433
1434 No branch acls specified
1434 No branch acls specified
1435
1435
1436 $ do_push astro
1436 $ do_push astro
1437 Pushing as user astro
1437 Pushing as user astro
1438 hgrc = """
1438 hgrc = """
1439 [acl]
1439 [acl]
1440 sources = push
1440 sources = push
1441 [extensions]
1441 [extensions]
1442 """
1442 """
1443 pushing to ../b
1443 pushing to ../b
1444 query 1; heads
1444 query 1; heads
1445 searching for changes
1445 searching for changes
1446 all remote heads known locally
1446 all remote heads known locally
1447 listing keys for "bookmarks"
1447 listing keys for "bookmarks"
1448 4 changesets found
1448 4 changesets found
1449 list of changesets:
1449 list of changesets:
1450 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1450 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1451 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1451 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1452 911600dab2ae7a9baff75958b84fe606851ce955
1452 911600dab2ae7a9baff75958b84fe606851ce955
1453 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1453 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1454 adding changesets
1454 adding changesets
1455 bundling: 1/4 changesets (25.00%)
1455 bundling: 1/4 changesets (25.00%)
1456 bundling: 2/4 changesets (50.00%)
1456 bundling: 2/4 changesets (50.00%)
1457 bundling: 3/4 changesets (75.00%)
1457 bundling: 3/4 changesets (75.00%)
1458 bundling: 4/4 changesets (100.00%)
1458 bundling: 4/4 changesets (100.00%)
1459 bundling: 1/4 manifests (25.00%)
1459 bundling: 1/4 manifests (25.00%)
1460 bundling: 2/4 manifests (50.00%)
1460 bundling: 2/4 manifests (50.00%)
1461 bundling: 3/4 manifests (75.00%)
1461 bundling: 3/4 manifests (75.00%)
1462 bundling: 4/4 manifests (100.00%)
1462 bundling: 4/4 manifests (100.00%)
1463 bundling: abc.txt 1/4 files (25.00%)
1463 bundling: abc.txt 1/4 files (25.00%)
1464 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1464 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1465 bundling: foo/file.txt 3/4 files (75.00%)
1465 bundling: foo/file.txt 3/4 files (75.00%)
1466 bundling: quux/file.py 4/4 files (100.00%)
1466 bundling: quux/file.py 4/4 files (100.00%)
1467 changesets: 1 chunks
1467 changesets: 1 chunks
1468 add changeset ef1ea85a6374
1468 add changeset ef1ea85a6374
1469 changesets: 2 chunks
1469 changesets: 2 chunks
1470 add changeset f9cafe1212c8
1470 add changeset f9cafe1212c8
1471 changesets: 3 chunks
1471 changesets: 3 chunks
1472 add changeset 911600dab2ae
1472 add changeset 911600dab2ae
1473 changesets: 4 chunks
1473 changesets: 4 chunks
1474 add changeset e8fc755d4d82
1474 add changeset e8fc755d4d82
1475 adding manifests
1475 adding manifests
1476 manifests: 1/4 chunks (25.00%)
1476 manifests: 1/4 chunks (25.00%)
1477 manifests: 2/4 chunks (50.00%)
1477 manifests: 2/4 chunks (50.00%)
1478 manifests: 3/4 chunks (75.00%)
1478 manifests: 3/4 chunks (75.00%)
1479 manifests: 4/4 chunks (100.00%)
1479 manifests: 4/4 chunks (100.00%)
1480 adding file changes
1480 adding file changes
1481 adding abc.txt revisions
1481 adding abc.txt revisions
1482 files: 1/4 chunks (25.00%)
1482 files: 1/4 chunks (25.00%)
1483 adding foo/Bar/file.txt revisions
1483 adding foo/Bar/file.txt revisions
1484 files: 2/4 chunks (50.00%)
1484 files: 2/4 chunks (50.00%)
1485 adding foo/file.txt revisions
1485 adding foo/file.txt revisions
1486 files: 3/4 chunks (75.00%)
1486 files: 3/4 chunks (75.00%)
1487 adding quux/file.py revisions
1487 adding quux/file.py revisions
1488 files: 4/4 chunks (100.00%)
1488 files: 4/4 chunks (100.00%)
1489 added 4 changesets with 4 changes to 4 files (+1 heads)
1489 added 4 changesets with 4 changes to 4 files (+1 heads)
1490 calling hook pretxnchangegroup.acl: hgext.acl.hook
1490 calling hook pretxnchangegroup.acl: hgext.acl.hook
1491 acl: checking access for user "astro"
1491 acl: checking access for user "astro"
1492 acl: acl.allow.branches not enabled
1492 acl: acl.allow.branches not enabled
1493 acl: acl.deny.branches not enabled
1493 acl: acl.deny.branches not enabled
1494 acl: acl.allow not enabled
1494 acl: acl.allow not enabled
1495 acl: acl.deny not enabled
1495 acl: acl.deny not enabled
1496 acl: branch access granted: "ef1ea85a6374" on branch "default"
1496 acl: branch access granted: "ef1ea85a6374" on branch "default"
1497 acl: path access granted: "ef1ea85a6374"
1497 acl: path access granted: "ef1ea85a6374"
1498 acl: branch access granted: "f9cafe1212c8" on branch "default"
1498 acl: branch access granted: "f9cafe1212c8" on branch "default"
1499 acl: path access granted: "f9cafe1212c8"
1499 acl: path access granted: "f9cafe1212c8"
1500 acl: branch access granted: "911600dab2ae" on branch "default"
1500 acl: branch access granted: "911600dab2ae" on branch "default"
1501 acl: path access granted: "911600dab2ae"
1501 acl: path access granted: "911600dab2ae"
1502 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1502 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1503 acl: path access granted: "e8fc755d4d82"
1503 acl: path access granted: "e8fc755d4d82"
1504 updating the branch cache
1504 listing keys for "phases"
1505 listing keys for "phases"
1505 try to push obsolete markers to remote
1506 try to push obsolete markers to remote
1506 updating the branch cache
1507 checking for updated bookmarks
1507 checking for updated bookmarks
1508 listing keys for "bookmarks"
1508 listing keys for "bookmarks"
1509 repository tip rolled back to revision 2 (undo push)
1509 repository tip rolled back to revision 2 (undo push)
1510 2:fb35475503ef
1510 2:fb35475503ef
1511
1511
1512
1512
1513 Branch acl deny test
1513 Branch acl deny test
1514
1514
1515 $ echo "[acl.deny.branches]" >> $config
1515 $ echo "[acl.deny.branches]" >> $config
1516 $ echo "foobar = *" >> $config
1516 $ echo "foobar = *" >> $config
1517 $ do_push astro
1517 $ do_push astro
1518 Pushing as user astro
1518 Pushing as user astro
1519 hgrc = """
1519 hgrc = """
1520 [acl]
1520 [acl]
1521 sources = push
1521 sources = push
1522 [extensions]
1522 [extensions]
1523 [acl.deny.branches]
1523 [acl.deny.branches]
1524 foobar = *
1524 foobar = *
1525 """
1525 """
1526 pushing to ../b
1526 pushing to ../b
1527 query 1; heads
1527 query 1; heads
1528 searching for changes
1528 searching for changes
1529 all remote heads known locally
1529 all remote heads known locally
1530 listing keys for "bookmarks"
1530 listing keys for "bookmarks"
1531 4 changesets found
1531 4 changesets found
1532 list of changesets:
1532 list of changesets:
1533 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1533 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1534 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1534 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1535 911600dab2ae7a9baff75958b84fe606851ce955
1535 911600dab2ae7a9baff75958b84fe606851ce955
1536 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1536 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1537 adding changesets
1537 adding changesets
1538 bundling: 1/4 changesets (25.00%)
1538 bundling: 1/4 changesets (25.00%)
1539 bundling: 2/4 changesets (50.00%)
1539 bundling: 2/4 changesets (50.00%)
1540 bundling: 3/4 changesets (75.00%)
1540 bundling: 3/4 changesets (75.00%)
1541 bundling: 4/4 changesets (100.00%)
1541 bundling: 4/4 changesets (100.00%)
1542 bundling: 1/4 manifests (25.00%)
1542 bundling: 1/4 manifests (25.00%)
1543 bundling: 2/4 manifests (50.00%)
1543 bundling: 2/4 manifests (50.00%)
1544 bundling: 3/4 manifests (75.00%)
1544 bundling: 3/4 manifests (75.00%)
1545 bundling: 4/4 manifests (100.00%)
1545 bundling: 4/4 manifests (100.00%)
1546 bundling: abc.txt 1/4 files (25.00%)
1546 bundling: abc.txt 1/4 files (25.00%)
1547 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1547 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1548 bundling: foo/file.txt 3/4 files (75.00%)
1548 bundling: foo/file.txt 3/4 files (75.00%)
1549 bundling: quux/file.py 4/4 files (100.00%)
1549 bundling: quux/file.py 4/4 files (100.00%)
1550 changesets: 1 chunks
1550 changesets: 1 chunks
1551 add changeset ef1ea85a6374
1551 add changeset ef1ea85a6374
1552 changesets: 2 chunks
1552 changesets: 2 chunks
1553 add changeset f9cafe1212c8
1553 add changeset f9cafe1212c8
1554 changesets: 3 chunks
1554 changesets: 3 chunks
1555 add changeset 911600dab2ae
1555 add changeset 911600dab2ae
1556 changesets: 4 chunks
1556 changesets: 4 chunks
1557 add changeset e8fc755d4d82
1557 add changeset e8fc755d4d82
1558 adding manifests
1558 adding manifests
1559 manifests: 1/4 chunks (25.00%)
1559 manifests: 1/4 chunks (25.00%)
1560 manifests: 2/4 chunks (50.00%)
1560 manifests: 2/4 chunks (50.00%)
1561 manifests: 3/4 chunks (75.00%)
1561 manifests: 3/4 chunks (75.00%)
1562 manifests: 4/4 chunks (100.00%)
1562 manifests: 4/4 chunks (100.00%)
1563 adding file changes
1563 adding file changes
1564 adding abc.txt revisions
1564 adding abc.txt revisions
1565 files: 1/4 chunks (25.00%)
1565 files: 1/4 chunks (25.00%)
1566 adding foo/Bar/file.txt revisions
1566 adding foo/Bar/file.txt revisions
1567 files: 2/4 chunks (50.00%)
1567 files: 2/4 chunks (50.00%)
1568 adding foo/file.txt revisions
1568 adding foo/file.txt revisions
1569 files: 3/4 chunks (75.00%)
1569 files: 3/4 chunks (75.00%)
1570 adding quux/file.py revisions
1570 adding quux/file.py revisions
1571 files: 4/4 chunks (100.00%)
1571 files: 4/4 chunks (100.00%)
1572 added 4 changesets with 4 changes to 4 files (+1 heads)
1572 added 4 changesets with 4 changes to 4 files (+1 heads)
1573 calling hook pretxnchangegroup.acl: hgext.acl.hook
1573 calling hook pretxnchangegroup.acl: hgext.acl.hook
1574 acl: checking access for user "astro"
1574 acl: checking access for user "astro"
1575 acl: acl.allow.branches not enabled
1575 acl: acl.allow.branches not enabled
1576 acl: acl.deny.branches enabled, 1 entries for user astro
1576 acl: acl.deny.branches enabled, 1 entries for user astro
1577 acl: acl.allow not enabled
1577 acl: acl.allow not enabled
1578 acl: acl.deny not enabled
1578 acl: acl.deny not enabled
1579 acl: branch access granted: "ef1ea85a6374" on branch "default"
1579 acl: branch access granted: "ef1ea85a6374" on branch "default"
1580 acl: path access granted: "ef1ea85a6374"
1580 acl: path access granted: "ef1ea85a6374"
1581 acl: branch access granted: "f9cafe1212c8" on branch "default"
1581 acl: branch access granted: "f9cafe1212c8" on branch "default"
1582 acl: path access granted: "f9cafe1212c8"
1582 acl: path access granted: "f9cafe1212c8"
1583 acl: branch access granted: "911600dab2ae" on branch "default"
1583 acl: branch access granted: "911600dab2ae" on branch "default"
1584 acl: path access granted: "911600dab2ae"
1584 acl: path access granted: "911600dab2ae"
1585 error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1585 error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1586 transaction abort!
1586 transaction abort!
1587 rollback completed
1587 rollback completed
1588 abort: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1588 abort: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1589 no rollback information available
1589 no rollback information available
1590 2:fb35475503ef
1590 2:fb35475503ef
1591
1591
1592
1592
1593 Branch acl empty allow test
1593 Branch acl empty allow test
1594
1594
1595 $ init_config
1595 $ init_config
1596 $ echo "[acl.allow.branches]" >> $config
1596 $ echo "[acl.allow.branches]" >> $config
1597 $ do_push astro
1597 $ do_push astro
1598 Pushing as user astro
1598 Pushing as user astro
1599 hgrc = """
1599 hgrc = """
1600 [acl]
1600 [acl]
1601 sources = push
1601 sources = push
1602 [extensions]
1602 [extensions]
1603 [acl.allow.branches]
1603 [acl.allow.branches]
1604 """
1604 """
1605 pushing to ../b
1605 pushing to ../b
1606 query 1; heads
1606 query 1; heads
1607 searching for changes
1607 searching for changes
1608 all remote heads known locally
1608 all remote heads known locally
1609 listing keys for "bookmarks"
1609 listing keys for "bookmarks"
1610 4 changesets found
1610 4 changesets found
1611 list of changesets:
1611 list of changesets:
1612 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1612 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1613 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1613 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1614 911600dab2ae7a9baff75958b84fe606851ce955
1614 911600dab2ae7a9baff75958b84fe606851ce955
1615 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1615 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1616 adding changesets
1616 adding changesets
1617 bundling: 1/4 changesets (25.00%)
1617 bundling: 1/4 changesets (25.00%)
1618 bundling: 2/4 changesets (50.00%)
1618 bundling: 2/4 changesets (50.00%)
1619 bundling: 3/4 changesets (75.00%)
1619 bundling: 3/4 changesets (75.00%)
1620 bundling: 4/4 changesets (100.00%)
1620 bundling: 4/4 changesets (100.00%)
1621 bundling: 1/4 manifests (25.00%)
1621 bundling: 1/4 manifests (25.00%)
1622 bundling: 2/4 manifests (50.00%)
1622 bundling: 2/4 manifests (50.00%)
1623 bundling: 3/4 manifests (75.00%)
1623 bundling: 3/4 manifests (75.00%)
1624 bundling: 4/4 manifests (100.00%)
1624 bundling: 4/4 manifests (100.00%)
1625 bundling: abc.txt 1/4 files (25.00%)
1625 bundling: abc.txt 1/4 files (25.00%)
1626 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1626 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1627 bundling: foo/file.txt 3/4 files (75.00%)
1627 bundling: foo/file.txt 3/4 files (75.00%)
1628 bundling: quux/file.py 4/4 files (100.00%)
1628 bundling: quux/file.py 4/4 files (100.00%)
1629 changesets: 1 chunks
1629 changesets: 1 chunks
1630 add changeset ef1ea85a6374
1630 add changeset ef1ea85a6374
1631 changesets: 2 chunks
1631 changesets: 2 chunks
1632 add changeset f9cafe1212c8
1632 add changeset f9cafe1212c8
1633 changesets: 3 chunks
1633 changesets: 3 chunks
1634 add changeset 911600dab2ae
1634 add changeset 911600dab2ae
1635 changesets: 4 chunks
1635 changesets: 4 chunks
1636 add changeset e8fc755d4d82
1636 add changeset e8fc755d4d82
1637 adding manifests
1637 adding manifests
1638 manifests: 1/4 chunks (25.00%)
1638 manifests: 1/4 chunks (25.00%)
1639 manifests: 2/4 chunks (50.00%)
1639 manifests: 2/4 chunks (50.00%)
1640 manifests: 3/4 chunks (75.00%)
1640 manifests: 3/4 chunks (75.00%)
1641 manifests: 4/4 chunks (100.00%)
1641 manifests: 4/4 chunks (100.00%)
1642 adding file changes
1642 adding file changes
1643 adding abc.txt revisions
1643 adding abc.txt revisions
1644 files: 1/4 chunks (25.00%)
1644 files: 1/4 chunks (25.00%)
1645 adding foo/Bar/file.txt revisions
1645 adding foo/Bar/file.txt revisions
1646 files: 2/4 chunks (50.00%)
1646 files: 2/4 chunks (50.00%)
1647 adding foo/file.txt revisions
1647 adding foo/file.txt revisions
1648 files: 3/4 chunks (75.00%)
1648 files: 3/4 chunks (75.00%)
1649 adding quux/file.py revisions
1649 adding quux/file.py revisions
1650 files: 4/4 chunks (100.00%)
1650 files: 4/4 chunks (100.00%)
1651 added 4 changesets with 4 changes to 4 files (+1 heads)
1651 added 4 changesets with 4 changes to 4 files (+1 heads)
1652 calling hook pretxnchangegroup.acl: hgext.acl.hook
1652 calling hook pretxnchangegroup.acl: hgext.acl.hook
1653 acl: checking access for user "astro"
1653 acl: checking access for user "astro"
1654 acl: acl.allow.branches enabled, 0 entries for user astro
1654 acl: acl.allow.branches enabled, 0 entries for user astro
1655 acl: acl.deny.branches not enabled
1655 acl: acl.deny.branches not enabled
1656 acl: acl.allow not enabled
1656 acl: acl.allow not enabled
1657 acl: acl.deny not enabled
1657 acl: acl.deny not enabled
1658 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1658 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1659 transaction abort!
1659 transaction abort!
1660 rollback completed
1660 rollback completed
1661 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1661 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1662 no rollback information available
1662 no rollback information available
1663 2:fb35475503ef
1663 2:fb35475503ef
1664
1664
1665
1665
1666 Branch acl allow other
1666 Branch acl allow other
1667
1667
1668 $ init_config
1668 $ init_config
1669 $ echo "[acl.allow.branches]" >> $config
1669 $ echo "[acl.allow.branches]" >> $config
1670 $ echo "* = george" >> $config
1670 $ echo "* = george" >> $config
1671 $ do_push astro
1671 $ do_push astro
1672 Pushing as user astro
1672 Pushing as user astro
1673 hgrc = """
1673 hgrc = """
1674 [acl]
1674 [acl]
1675 sources = push
1675 sources = push
1676 [extensions]
1676 [extensions]
1677 [acl.allow.branches]
1677 [acl.allow.branches]
1678 * = george
1678 * = george
1679 """
1679 """
1680 pushing to ../b
1680 pushing to ../b
1681 query 1; heads
1681 query 1; heads
1682 searching for changes
1682 searching for changes
1683 all remote heads known locally
1683 all remote heads known locally
1684 listing keys for "bookmarks"
1684 listing keys for "bookmarks"
1685 4 changesets found
1685 4 changesets found
1686 list of changesets:
1686 list of changesets:
1687 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1687 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1688 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1688 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1689 911600dab2ae7a9baff75958b84fe606851ce955
1689 911600dab2ae7a9baff75958b84fe606851ce955
1690 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1690 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1691 adding changesets
1691 adding changesets
1692 bundling: 1/4 changesets (25.00%)
1692 bundling: 1/4 changesets (25.00%)
1693 bundling: 2/4 changesets (50.00%)
1693 bundling: 2/4 changesets (50.00%)
1694 bundling: 3/4 changesets (75.00%)
1694 bundling: 3/4 changesets (75.00%)
1695 bundling: 4/4 changesets (100.00%)
1695 bundling: 4/4 changesets (100.00%)
1696 bundling: 1/4 manifests (25.00%)
1696 bundling: 1/4 manifests (25.00%)
1697 bundling: 2/4 manifests (50.00%)
1697 bundling: 2/4 manifests (50.00%)
1698 bundling: 3/4 manifests (75.00%)
1698 bundling: 3/4 manifests (75.00%)
1699 bundling: 4/4 manifests (100.00%)
1699 bundling: 4/4 manifests (100.00%)
1700 bundling: abc.txt 1/4 files (25.00%)
1700 bundling: abc.txt 1/4 files (25.00%)
1701 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1701 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1702 bundling: foo/file.txt 3/4 files (75.00%)
1702 bundling: foo/file.txt 3/4 files (75.00%)
1703 bundling: quux/file.py 4/4 files (100.00%)
1703 bundling: quux/file.py 4/4 files (100.00%)
1704 changesets: 1 chunks
1704 changesets: 1 chunks
1705 add changeset ef1ea85a6374
1705 add changeset ef1ea85a6374
1706 changesets: 2 chunks
1706 changesets: 2 chunks
1707 add changeset f9cafe1212c8
1707 add changeset f9cafe1212c8
1708 changesets: 3 chunks
1708 changesets: 3 chunks
1709 add changeset 911600dab2ae
1709 add changeset 911600dab2ae
1710 changesets: 4 chunks
1710 changesets: 4 chunks
1711 add changeset e8fc755d4d82
1711 add changeset e8fc755d4d82
1712 adding manifests
1712 adding manifests
1713 manifests: 1/4 chunks (25.00%)
1713 manifests: 1/4 chunks (25.00%)
1714 manifests: 2/4 chunks (50.00%)
1714 manifests: 2/4 chunks (50.00%)
1715 manifests: 3/4 chunks (75.00%)
1715 manifests: 3/4 chunks (75.00%)
1716 manifests: 4/4 chunks (100.00%)
1716 manifests: 4/4 chunks (100.00%)
1717 adding file changes
1717 adding file changes
1718 adding abc.txt revisions
1718 adding abc.txt revisions
1719 files: 1/4 chunks (25.00%)
1719 files: 1/4 chunks (25.00%)
1720 adding foo/Bar/file.txt revisions
1720 adding foo/Bar/file.txt revisions
1721 files: 2/4 chunks (50.00%)
1721 files: 2/4 chunks (50.00%)
1722 adding foo/file.txt revisions
1722 adding foo/file.txt revisions
1723 files: 3/4 chunks (75.00%)
1723 files: 3/4 chunks (75.00%)
1724 adding quux/file.py revisions
1724 adding quux/file.py revisions
1725 files: 4/4 chunks (100.00%)
1725 files: 4/4 chunks (100.00%)
1726 added 4 changesets with 4 changes to 4 files (+1 heads)
1726 added 4 changesets with 4 changes to 4 files (+1 heads)
1727 calling hook pretxnchangegroup.acl: hgext.acl.hook
1727 calling hook pretxnchangegroup.acl: hgext.acl.hook
1728 acl: checking access for user "astro"
1728 acl: checking access for user "astro"
1729 acl: acl.allow.branches enabled, 0 entries for user astro
1729 acl: acl.allow.branches enabled, 0 entries for user astro
1730 acl: acl.deny.branches not enabled
1730 acl: acl.deny.branches not enabled
1731 acl: acl.allow not enabled
1731 acl: acl.allow not enabled
1732 acl: acl.deny not enabled
1732 acl: acl.deny not enabled
1733 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1733 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1734 transaction abort!
1734 transaction abort!
1735 rollback completed
1735 rollback completed
1736 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1736 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1737 no rollback information available
1737 no rollback information available
1738 2:fb35475503ef
1738 2:fb35475503ef
1739
1739
1740 $ do_push george
1740 $ do_push george
1741 Pushing as user george
1741 Pushing as user george
1742 hgrc = """
1742 hgrc = """
1743 [acl]
1743 [acl]
1744 sources = push
1744 sources = push
1745 [extensions]
1745 [extensions]
1746 [acl.allow.branches]
1746 [acl.allow.branches]
1747 * = george
1747 * = george
1748 """
1748 """
1749 pushing to ../b
1749 pushing to ../b
1750 query 1; heads
1750 query 1; heads
1751 searching for changes
1751 searching for changes
1752 all remote heads known locally
1752 all remote heads known locally
1753 listing keys for "bookmarks"
1753 listing keys for "bookmarks"
1754 4 changesets found
1754 4 changesets found
1755 list of changesets:
1755 list of changesets:
1756 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1756 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1757 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1757 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1758 911600dab2ae7a9baff75958b84fe606851ce955
1758 911600dab2ae7a9baff75958b84fe606851ce955
1759 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1759 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1760 adding changesets
1760 adding changesets
1761 bundling: 1/4 changesets (25.00%)
1761 bundling: 1/4 changesets (25.00%)
1762 bundling: 2/4 changesets (50.00%)
1762 bundling: 2/4 changesets (50.00%)
1763 bundling: 3/4 changesets (75.00%)
1763 bundling: 3/4 changesets (75.00%)
1764 bundling: 4/4 changesets (100.00%)
1764 bundling: 4/4 changesets (100.00%)
1765 bundling: 1/4 manifests (25.00%)
1765 bundling: 1/4 manifests (25.00%)
1766 bundling: 2/4 manifests (50.00%)
1766 bundling: 2/4 manifests (50.00%)
1767 bundling: 3/4 manifests (75.00%)
1767 bundling: 3/4 manifests (75.00%)
1768 bundling: 4/4 manifests (100.00%)
1768 bundling: 4/4 manifests (100.00%)
1769 bundling: abc.txt 1/4 files (25.00%)
1769 bundling: abc.txt 1/4 files (25.00%)
1770 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1770 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1771 bundling: foo/file.txt 3/4 files (75.00%)
1771 bundling: foo/file.txt 3/4 files (75.00%)
1772 bundling: quux/file.py 4/4 files (100.00%)
1772 bundling: quux/file.py 4/4 files (100.00%)
1773 changesets: 1 chunks
1773 changesets: 1 chunks
1774 add changeset ef1ea85a6374
1774 add changeset ef1ea85a6374
1775 changesets: 2 chunks
1775 changesets: 2 chunks
1776 add changeset f9cafe1212c8
1776 add changeset f9cafe1212c8
1777 changesets: 3 chunks
1777 changesets: 3 chunks
1778 add changeset 911600dab2ae
1778 add changeset 911600dab2ae
1779 changesets: 4 chunks
1779 changesets: 4 chunks
1780 add changeset e8fc755d4d82
1780 add changeset e8fc755d4d82
1781 adding manifests
1781 adding manifests
1782 manifests: 1/4 chunks (25.00%)
1782 manifests: 1/4 chunks (25.00%)
1783 manifests: 2/4 chunks (50.00%)
1783 manifests: 2/4 chunks (50.00%)
1784 manifests: 3/4 chunks (75.00%)
1784 manifests: 3/4 chunks (75.00%)
1785 manifests: 4/4 chunks (100.00%)
1785 manifests: 4/4 chunks (100.00%)
1786 adding file changes
1786 adding file changes
1787 adding abc.txt revisions
1787 adding abc.txt revisions
1788 files: 1/4 chunks (25.00%)
1788 files: 1/4 chunks (25.00%)
1789 adding foo/Bar/file.txt revisions
1789 adding foo/Bar/file.txt revisions
1790 files: 2/4 chunks (50.00%)
1790 files: 2/4 chunks (50.00%)
1791 adding foo/file.txt revisions
1791 adding foo/file.txt revisions
1792 files: 3/4 chunks (75.00%)
1792 files: 3/4 chunks (75.00%)
1793 adding quux/file.py revisions
1793 adding quux/file.py revisions
1794 files: 4/4 chunks (100.00%)
1794 files: 4/4 chunks (100.00%)
1795 added 4 changesets with 4 changes to 4 files (+1 heads)
1795 added 4 changesets with 4 changes to 4 files (+1 heads)
1796 calling hook pretxnchangegroup.acl: hgext.acl.hook
1796 calling hook pretxnchangegroup.acl: hgext.acl.hook
1797 acl: checking access for user "george"
1797 acl: checking access for user "george"
1798 acl: acl.allow.branches enabled, 1 entries for user george
1798 acl: acl.allow.branches enabled, 1 entries for user george
1799 acl: acl.deny.branches not enabled
1799 acl: acl.deny.branches not enabled
1800 acl: acl.allow not enabled
1800 acl: acl.allow not enabled
1801 acl: acl.deny not enabled
1801 acl: acl.deny not enabled
1802 acl: branch access granted: "ef1ea85a6374" on branch "default"
1802 acl: branch access granted: "ef1ea85a6374" on branch "default"
1803 acl: path access granted: "ef1ea85a6374"
1803 acl: path access granted: "ef1ea85a6374"
1804 acl: branch access granted: "f9cafe1212c8" on branch "default"
1804 acl: branch access granted: "f9cafe1212c8" on branch "default"
1805 acl: path access granted: "f9cafe1212c8"
1805 acl: path access granted: "f9cafe1212c8"
1806 acl: branch access granted: "911600dab2ae" on branch "default"
1806 acl: branch access granted: "911600dab2ae" on branch "default"
1807 acl: path access granted: "911600dab2ae"
1807 acl: path access granted: "911600dab2ae"
1808 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1808 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1809 acl: path access granted: "e8fc755d4d82"
1809 acl: path access granted: "e8fc755d4d82"
1810 updating the branch cache
1810 listing keys for "phases"
1811 listing keys for "phases"
1811 try to push obsolete markers to remote
1812 try to push obsolete markers to remote
1812 updating the branch cache
1813 checking for updated bookmarks
1813 checking for updated bookmarks
1814 listing keys for "bookmarks"
1814 listing keys for "bookmarks"
1815 repository tip rolled back to revision 2 (undo push)
1815 repository tip rolled back to revision 2 (undo push)
1816 2:fb35475503ef
1816 2:fb35475503ef
1817
1817
1818
1818
1819 Branch acl conflicting allow
1819 Branch acl conflicting allow
1820 asterisk ends up applying to all branches and allowing george to
1820 asterisk ends up applying to all branches and allowing george to
1821 push foobar into the remote
1821 push foobar into the remote
1822
1822
1823 $ init_config
1823 $ init_config
1824 $ echo "[acl.allow.branches]" >> $config
1824 $ echo "[acl.allow.branches]" >> $config
1825 $ echo "foobar = astro" >> $config
1825 $ echo "foobar = astro" >> $config
1826 $ echo "* = george" >> $config
1826 $ echo "* = george" >> $config
1827 $ do_push george
1827 $ do_push george
1828 Pushing as user george
1828 Pushing as user george
1829 hgrc = """
1829 hgrc = """
1830 [acl]
1830 [acl]
1831 sources = push
1831 sources = push
1832 [extensions]
1832 [extensions]
1833 [acl.allow.branches]
1833 [acl.allow.branches]
1834 foobar = astro
1834 foobar = astro
1835 * = george
1835 * = george
1836 """
1836 """
1837 pushing to ../b
1837 pushing to ../b
1838 query 1; heads
1838 query 1; heads
1839 searching for changes
1839 searching for changes
1840 all remote heads known locally
1840 all remote heads known locally
1841 listing keys for "bookmarks"
1841 listing keys for "bookmarks"
1842 4 changesets found
1842 4 changesets found
1843 list of changesets:
1843 list of changesets:
1844 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1844 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1845 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1845 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1846 911600dab2ae7a9baff75958b84fe606851ce955
1846 911600dab2ae7a9baff75958b84fe606851ce955
1847 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1847 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1848 adding changesets
1848 adding changesets
1849 bundling: 1/4 changesets (25.00%)
1849 bundling: 1/4 changesets (25.00%)
1850 bundling: 2/4 changesets (50.00%)
1850 bundling: 2/4 changesets (50.00%)
1851 bundling: 3/4 changesets (75.00%)
1851 bundling: 3/4 changesets (75.00%)
1852 bundling: 4/4 changesets (100.00%)
1852 bundling: 4/4 changesets (100.00%)
1853 bundling: 1/4 manifests (25.00%)
1853 bundling: 1/4 manifests (25.00%)
1854 bundling: 2/4 manifests (50.00%)
1854 bundling: 2/4 manifests (50.00%)
1855 bundling: 3/4 manifests (75.00%)
1855 bundling: 3/4 manifests (75.00%)
1856 bundling: 4/4 manifests (100.00%)
1856 bundling: 4/4 manifests (100.00%)
1857 bundling: abc.txt 1/4 files (25.00%)
1857 bundling: abc.txt 1/4 files (25.00%)
1858 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1858 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1859 bundling: foo/file.txt 3/4 files (75.00%)
1859 bundling: foo/file.txt 3/4 files (75.00%)
1860 bundling: quux/file.py 4/4 files (100.00%)
1860 bundling: quux/file.py 4/4 files (100.00%)
1861 changesets: 1 chunks
1861 changesets: 1 chunks
1862 add changeset ef1ea85a6374
1862 add changeset ef1ea85a6374
1863 changesets: 2 chunks
1863 changesets: 2 chunks
1864 add changeset f9cafe1212c8
1864 add changeset f9cafe1212c8
1865 changesets: 3 chunks
1865 changesets: 3 chunks
1866 add changeset 911600dab2ae
1866 add changeset 911600dab2ae
1867 changesets: 4 chunks
1867 changesets: 4 chunks
1868 add changeset e8fc755d4d82
1868 add changeset e8fc755d4d82
1869 adding manifests
1869 adding manifests
1870 manifests: 1/4 chunks (25.00%)
1870 manifests: 1/4 chunks (25.00%)
1871 manifests: 2/4 chunks (50.00%)
1871 manifests: 2/4 chunks (50.00%)
1872 manifests: 3/4 chunks (75.00%)
1872 manifests: 3/4 chunks (75.00%)
1873 manifests: 4/4 chunks (100.00%)
1873 manifests: 4/4 chunks (100.00%)
1874 adding file changes
1874 adding file changes
1875 adding abc.txt revisions
1875 adding abc.txt revisions
1876 files: 1/4 chunks (25.00%)
1876 files: 1/4 chunks (25.00%)
1877 adding foo/Bar/file.txt revisions
1877 adding foo/Bar/file.txt revisions
1878 files: 2/4 chunks (50.00%)
1878 files: 2/4 chunks (50.00%)
1879 adding foo/file.txt revisions
1879 adding foo/file.txt revisions
1880 files: 3/4 chunks (75.00%)
1880 files: 3/4 chunks (75.00%)
1881 adding quux/file.py revisions
1881 adding quux/file.py revisions
1882 files: 4/4 chunks (100.00%)
1882 files: 4/4 chunks (100.00%)
1883 added 4 changesets with 4 changes to 4 files (+1 heads)
1883 added 4 changesets with 4 changes to 4 files (+1 heads)
1884 calling hook pretxnchangegroup.acl: hgext.acl.hook
1884 calling hook pretxnchangegroup.acl: hgext.acl.hook
1885 acl: checking access for user "george"
1885 acl: checking access for user "george"
1886 acl: acl.allow.branches enabled, 1 entries for user george
1886 acl: acl.allow.branches enabled, 1 entries for user george
1887 acl: acl.deny.branches not enabled
1887 acl: acl.deny.branches not enabled
1888 acl: acl.allow not enabled
1888 acl: acl.allow not enabled
1889 acl: acl.deny not enabled
1889 acl: acl.deny not enabled
1890 acl: branch access granted: "ef1ea85a6374" on branch "default"
1890 acl: branch access granted: "ef1ea85a6374" on branch "default"
1891 acl: path access granted: "ef1ea85a6374"
1891 acl: path access granted: "ef1ea85a6374"
1892 acl: branch access granted: "f9cafe1212c8" on branch "default"
1892 acl: branch access granted: "f9cafe1212c8" on branch "default"
1893 acl: path access granted: "f9cafe1212c8"
1893 acl: path access granted: "f9cafe1212c8"
1894 acl: branch access granted: "911600dab2ae" on branch "default"
1894 acl: branch access granted: "911600dab2ae" on branch "default"
1895 acl: path access granted: "911600dab2ae"
1895 acl: path access granted: "911600dab2ae"
1896 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1896 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1897 acl: path access granted: "e8fc755d4d82"
1897 acl: path access granted: "e8fc755d4d82"
1898 updating the branch cache
1898 listing keys for "phases"
1899 listing keys for "phases"
1899 try to push obsolete markers to remote
1900 try to push obsolete markers to remote
1900 updating the branch cache
1901 checking for updated bookmarks
1901 checking for updated bookmarks
1902 listing keys for "bookmarks"
1902 listing keys for "bookmarks"
1903 repository tip rolled back to revision 2 (undo push)
1903 repository tip rolled back to revision 2 (undo push)
1904 2:fb35475503ef
1904 2:fb35475503ef
1905
1905
1906 Branch acl conflicting deny
1906 Branch acl conflicting deny
1907
1907
1908 $ init_config
1908 $ init_config
1909 $ echo "[acl.deny.branches]" >> $config
1909 $ echo "[acl.deny.branches]" >> $config
1910 $ echo "foobar = astro" >> $config
1910 $ echo "foobar = astro" >> $config
1911 $ echo "default = astro" >> $config
1911 $ echo "default = astro" >> $config
1912 $ echo "* = george" >> $config
1912 $ echo "* = george" >> $config
1913 $ do_push george
1913 $ do_push george
1914 Pushing as user george
1914 Pushing as user george
1915 hgrc = """
1915 hgrc = """
1916 [acl]
1916 [acl]
1917 sources = push
1917 sources = push
1918 [extensions]
1918 [extensions]
1919 [acl.deny.branches]
1919 [acl.deny.branches]
1920 foobar = astro
1920 foobar = astro
1921 default = astro
1921 default = astro
1922 * = george
1922 * = george
1923 """
1923 """
1924 pushing to ../b
1924 pushing to ../b
1925 query 1; heads
1925 query 1; heads
1926 searching for changes
1926 searching for changes
1927 all remote heads known locally
1927 all remote heads known locally
1928 listing keys for "bookmarks"
1928 listing keys for "bookmarks"
1929 4 changesets found
1929 4 changesets found
1930 list of changesets:
1930 list of changesets:
1931 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1931 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1932 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1932 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1933 911600dab2ae7a9baff75958b84fe606851ce955
1933 911600dab2ae7a9baff75958b84fe606851ce955
1934 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1934 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1935 adding changesets
1935 adding changesets
1936 bundling: 1/4 changesets (25.00%)
1936 bundling: 1/4 changesets (25.00%)
1937 bundling: 2/4 changesets (50.00%)
1937 bundling: 2/4 changesets (50.00%)
1938 bundling: 3/4 changesets (75.00%)
1938 bundling: 3/4 changesets (75.00%)
1939 bundling: 4/4 changesets (100.00%)
1939 bundling: 4/4 changesets (100.00%)
1940 bundling: 1/4 manifests (25.00%)
1940 bundling: 1/4 manifests (25.00%)
1941 bundling: 2/4 manifests (50.00%)
1941 bundling: 2/4 manifests (50.00%)
1942 bundling: 3/4 manifests (75.00%)
1942 bundling: 3/4 manifests (75.00%)
1943 bundling: 4/4 manifests (100.00%)
1943 bundling: 4/4 manifests (100.00%)
1944 bundling: abc.txt 1/4 files (25.00%)
1944 bundling: abc.txt 1/4 files (25.00%)
1945 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1945 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1946 bundling: foo/file.txt 3/4 files (75.00%)
1946 bundling: foo/file.txt 3/4 files (75.00%)
1947 bundling: quux/file.py 4/4 files (100.00%)
1947 bundling: quux/file.py 4/4 files (100.00%)
1948 changesets: 1 chunks
1948 changesets: 1 chunks
1949 add changeset ef1ea85a6374
1949 add changeset ef1ea85a6374
1950 changesets: 2 chunks
1950 changesets: 2 chunks
1951 add changeset f9cafe1212c8
1951 add changeset f9cafe1212c8
1952 changesets: 3 chunks
1952 changesets: 3 chunks
1953 add changeset 911600dab2ae
1953 add changeset 911600dab2ae
1954 changesets: 4 chunks
1954 changesets: 4 chunks
1955 add changeset e8fc755d4d82
1955 add changeset e8fc755d4d82
1956 adding manifests
1956 adding manifests
1957 manifests: 1/4 chunks (25.00%)
1957 manifests: 1/4 chunks (25.00%)
1958 manifests: 2/4 chunks (50.00%)
1958 manifests: 2/4 chunks (50.00%)
1959 manifests: 3/4 chunks (75.00%)
1959 manifests: 3/4 chunks (75.00%)
1960 manifests: 4/4 chunks (100.00%)
1960 manifests: 4/4 chunks (100.00%)
1961 adding file changes
1961 adding file changes
1962 adding abc.txt revisions
1962 adding abc.txt revisions
1963 files: 1/4 chunks (25.00%)
1963 files: 1/4 chunks (25.00%)
1964 adding foo/Bar/file.txt revisions
1964 adding foo/Bar/file.txt revisions
1965 files: 2/4 chunks (50.00%)
1965 files: 2/4 chunks (50.00%)
1966 adding foo/file.txt revisions
1966 adding foo/file.txt revisions
1967 files: 3/4 chunks (75.00%)
1967 files: 3/4 chunks (75.00%)
1968 adding quux/file.py revisions
1968 adding quux/file.py revisions
1969 files: 4/4 chunks (100.00%)
1969 files: 4/4 chunks (100.00%)
1970 added 4 changesets with 4 changes to 4 files (+1 heads)
1970 added 4 changesets with 4 changes to 4 files (+1 heads)
1971 calling hook pretxnchangegroup.acl: hgext.acl.hook
1971 calling hook pretxnchangegroup.acl: hgext.acl.hook
1972 acl: checking access for user "george"
1972 acl: checking access for user "george"
1973 acl: acl.allow.branches not enabled
1973 acl: acl.allow.branches not enabled
1974 acl: acl.deny.branches enabled, 1 entries for user george
1974 acl: acl.deny.branches enabled, 1 entries for user george
1975 acl: acl.allow not enabled
1975 acl: acl.allow not enabled
1976 acl: acl.deny not enabled
1976 acl: acl.deny not enabled
1977 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1977 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1978 transaction abort!
1978 transaction abort!
1979 rollback completed
1979 rollback completed
1980 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1980 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1981 no rollback information available
1981 no rollback information available
1982 2:fb35475503ef
1982 2:fb35475503ef
1983
1983
1984 User 'astro' must not be denied
1984 User 'astro' must not be denied
1985
1985
1986 $ init_config
1986 $ init_config
1987 $ echo "[acl.deny.branches]" >> $config
1987 $ echo "[acl.deny.branches]" >> $config
1988 $ echo "default = !astro" >> $config
1988 $ echo "default = !astro" >> $config
1989 $ do_push astro
1989 $ do_push astro
1990 Pushing as user astro
1990 Pushing as user astro
1991 hgrc = """
1991 hgrc = """
1992 [acl]
1992 [acl]
1993 sources = push
1993 sources = push
1994 [extensions]
1994 [extensions]
1995 [acl.deny.branches]
1995 [acl.deny.branches]
1996 default = !astro
1996 default = !astro
1997 """
1997 """
1998 pushing to ../b
1998 pushing to ../b
1999 query 1; heads
1999 query 1; heads
2000 searching for changes
2000 searching for changes
2001 all remote heads known locally
2001 all remote heads known locally
2002 listing keys for "bookmarks"
2002 listing keys for "bookmarks"
2003 4 changesets found
2003 4 changesets found
2004 list of changesets:
2004 list of changesets:
2005 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2005 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2006 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2006 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2007 911600dab2ae7a9baff75958b84fe606851ce955
2007 911600dab2ae7a9baff75958b84fe606851ce955
2008 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2008 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2009 adding changesets
2009 adding changesets
2010 bundling: 1/4 changesets (25.00%)
2010 bundling: 1/4 changesets (25.00%)
2011 bundling: 2/4 changesets (50.00%)
2011 bundling: 2/4 changesets (50.00%)
2012 bundling: 3/4 changesets (75.00%)
2012 bundling: 3/4 changesets (75.00%)
2013 bundling: 4/4 changesets (100.00%)
2013 bundling: 4/4 changesets (100.00%)
2014 bundling: 1/4 manifests (25.00%)
2014 bundling: 1/4 manifests (25.00%)
2015 bundling: 2/4 manifests (50.00%)
2015 bundling: 2/4 manifests (50.00%)
2016 bundling: 3/4 manifests (75.00%)
2016 bundling: 3/4 manifests (75.00%)
2017 bundling: 4/4 manifests (100.00%)
2017 bundling: 4/4 manifests (100.00%)
2018 bundling: abc.txt 1/4 files (25.00%)
2018 bundling: abc.txt 1/4 files (25.00%)
2019 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2019 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2020 bundling: foo/file.txt 3/4 files (75.00%)
2020 bundling: foo/file.txt 3/4 files (75.00%)
2021 bundling: quux/file.py 4/4 files (100.00%)
2021 bundling: quux/file.py 4/4 files (100.00%)
2022 changesets: 1 chunks
2022 changesets: 1 chunks
2023 add changeset ef1ea85a6374
2023 add changeset ef1ea85a6374
2024 changesets: 2 chunks
2024 changesets: 2 chunks
2025 add changeset f9cafe1212c8
2025 add changeset f9cafe1212c8
2026 changesets: 3 chunks
2026 changesets: 3 chunks
2027 add changeset 911600dab2ae
2027 add changeset 911600dab2ae
2028 changesets: 4 chunks
2028 changesets: 4 chunks
2029 add changeset e8fc755d4d82
2029 add changeset e8fc755d4d82
2030 adding manifests
2030 adding manifests
2031 manifests: 1/4 chunks (25.00%)
2031 manifests: 1/4 chunks (25.00%)
2032 manifests: 2/4 chunks (50.00%)
2032 manifests: 2/4 chunks (50.00%)
2033 manifests: 3/4 chunks (75.00%)
2033 manifests: 3/4 chunks (75.00%)
2034 manifests: 4/4 chunks (100.00%)
2034 manifests: 4/4 chunks (100.00%)
2035 adding file changes
2035 adding file changes
2036 adding abc.txt revisions
2036 adding abc.txt revisions
2037 files: 1/4 chunks (25.00%)
2037 files: 1/4 chunks (25.00%)
2038 adding foo/Bar/file.txt revisions
2038 adding foo/Bar/file.txt revisions
2039 files: 2/4 chunks (50.00%)
2039 files: 2/4 chunks (50.00%)
2040 adding foo/file.txt revisions
2040 adding foo/file.txt revisions
2041 files: 3/4 chunks (75.00%)
2041 files: 3/4 chunks (75.00%)
2042 adding quux/file.py revisions
2042 adding quux/file.py revisions
2043 files: 4/4 chunks (100.00%)
2043 files: 4/4 chunks (100.00%)
2044 added 4 changesets with 4 changes to 4 files (+1 heads)
2044 added 4 changesets with 4 changes to 4 files (+1 heads)
2045 calling hook pretxnchangegroup.acl: hgext.acl.hook
2045 calling hook pretxnchangegroup.acl: hgext.acl.hook
2046 acl: checking access for user "astro"
2046 acl: checking access for user "astro"
2047 acl: acl.allow.branches not enabled
2047 acl: acl.allow.branches not enabled
2048 acl: acl.deny.branches enabled, 0 entries for user astro
2048 acl: acl.deny.branches enabled, 0 entries for user astro
2049 acl: acl.allow not enabled
2049 acl: acl.allow not enabled
2050 acl: acl.deny not enabled
2050 acl: acl.deny not enabled
2051 acl: branch access granted: "ef1ea85a6374" on branch "default"
2051 acl: branch access granted: "ef1ea85a6374" on branch "default"
2052 acl: path access granted: "ef1ea85a6374"
2052 acl: path access granted: "ef1ea85a6374"
2053 acl: branch access granted: "f9cafe1212c8" on branch "default"
2053 acl: branch access granted: "f9cafe1212c8" on branch "default"
2054 acl: path access granted: "f9cafe1212c8"
2054 acl: path access granted: "f9cafe1212c8"
2055 acl: branch access granted: "911600dab2ae" on branch "default"
2055 acl: branch access granted: "911600dab2ae" on branch "default"
2056 acl: path access granted: "911600dab2ae"
2056 acl: path access granted: "911600dab2ae"
2057 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
2057 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
2058 acl: path access granted: "e8fc755d4d82"
2058 acl: path access granted: "e8fc755d4d82"
2059 updating the branch cache
2059 listing keys for "phases"
2060 listing keys for "phases"
2060 try to push obsolete markers to remote
2061 try to push obsolete markers to remote
2061 updating the branch cache
2062 checking for updated bookmarks
2062 checking for updated bookmarks
2063 listing keys for "bookmarks"
2063 listing keys for "bookmarks"
2064 repository tip rolled back to revision 2 (undo push)
2064 repository tip rolled back to revision 2 (undo push)
2065 2:fb35475503ef
2065 2:fb35475503ef
2066
2066
2067
2067
2068 Non-astro users must be denied
2068 Non-astro users must be denied
2069
2069
2070 $ do_push george
2070 $ do_push george
2071 Pushing as user george
2071 Pushing as user george
2072 hgrc = """
2072 hgrc = """
2073 [acl]
2073 [acl]
2074 sources = push
2074 sources = push
2075 [extensions]
2075 [extensions]
2076 [acl.deny.branches]
2076 [acl.deny.branches]
2077 default = !astro
2077 default = !astro
2078 """
2078 """
2079 pushing to ../b
2079 pushing to ../b
2080 query 1; heads
2080 query 1; heads
2081 searching for changes
2081 searching for changes
2082 all remote heads known locally
2082 all remote heads known locally
2083 listing keys for "bookmarks"
2083 listing keys for "bookmarks"
2084 4 changesets found
2084 4 changesets found
2085 list of changesets:
2085 list of changesets:
2086 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2086 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2087 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2087 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2088 911600dab2ae7a9baff75958b84fe606851ce955
2088 911600dab2ae7a9baff75958b84fe606851ce955
2089 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2089 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2090 adding changesets
2090 adding changesets
2091 bundling: 1/4 changesets (25.00%)
2091 bundling: 1/4 changesets (25.00%)
2092 bundling: 2/4 changesets (50.00%)
2092 bundling: 2/4 changesets (50.00%)
2093 bundling: 3/4 changesets (75.00%)
2093 bundling: 3/4 changesets (75.00%)
2094 bundling: 4/4 changesets (100.00%)
2094 bundling: 4/4 changesets (100.00%)
2095 bundling: 1/4 manifests (25.00%)
2095 bundling: 1/4 manifests (25.00%)
2096 bundling: 2/4 manifests (50.00%)
2096 bundling: 2/4 manifests (50.00%)
2097 bundling: 3/4 manifests (75.00%)
2097 bundling: 3/4 manifests (75.00%)
2098 bundling: 4/4 manifests (100.00%)
2098 bundling: 4/4 manifests (100.00%)
2099 bundling: abc.txt 1/4 files (25.00%)
2099 bundling: abc.txt 1/4 files (25.00%)
2100 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2100 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2101 bundling: foo/file.txt 3/4 files (75.00%)
2101 bundling: foo/file.txt 3/4 files (75.00%)
2102 bundling: quux/file.py 4/4 files (100.00%)
2102 bundling: quux/file.py 4/4 files (100.00%)
2103 changesets: 1 chunks
2103 changesets: 1 chunks
2104 add changeset ef1ea85a6374
2104 add changeset ef1ea85a6374
2105 changesets: 2 chunks
2105 changesets: 2 chunks
2106 add changeset f9cafe1212c8
2106 add changeset f9cafe1212c8
2107 changesets: 3 chunks
2107 changesets: 3 chunks
2108 add changeset 911600dab2ae
2108 add changeset 911600dab2ae
2109 changesets: 4 chunks
2109 changesets: 4 chunks
2110 add changeset e8fc755d4d82
2110 add changeset e8fc755d4d82
2111 adding manifests
2111 adding manifests
2112 manifests: 1/4 chunks (25.00%)
2112 manifests: 1/4 chunks (25.00%)
2113 manifests: 2/4 chunks (50.00%)
2113 manifests: 2/4 chunks (50.00%)
2114 manifests: 3/4 chunks (75.00%)
2114 manifests: 3/4 chunks (75.00%)
2115 manifests: 4/4 chunks (100.00%)
2115 manifests: 4/4 chunks (100.00%)
2116 adding file changes
2116 adding file changes
2117 adding abc.txt revisions
2117 adding abc.txt revisions
2118 files: 1/4 chunks (25.00%)
2118 files: 1/4 chunks (25.00%)
2119 adding foo/Bar/file.txt revisions
2119 adding foo/Bar/file.txt revisions
2120 files: 2/4 chunks (50.00%)
2120 files: 2/4 chunks (50.00%)
2121 adding foo/file.txt revisions
2121 adding foo/file.txt revisions
2122 files: 3/4 chunks (75.00%)
2122 files: 3/4 chunks (75.00%)
2123 adding quux/file.py revisions
2123 adding quux/file.py revisions
2124 files: 4/4 chunks (100.00%)
2124 files: 4/4 chunks (100.00%)
2125 added 4 changesets with 4 changes to 4 files (+1 heads)
2125 added 4 changesets with 4 changes to 4 files (+1 heads)
2126 calling hook pretxnchangegroup.acl: hgext.acl.hook
2126 calling hook pretxnchangegroup.acl: hgext.acl.hook
2127 acl: checking access for user "george"
2127 acl: checking access for user "george"
2128 acl: acl.allow.branches not enabled
2128 acl: acl.allow.branches not enabled
2129 acl: acl.deny.branches enabled, 1 entries for user george
2129 acl: acl.deny.branches enabled, 1 entries for user george
2130 acl: acl.allow not enabled
2130 acl: acl.allow not enabled
2131 acl: acl.deny not enabled
2131 acl: acl.deny not enabled
2132 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2132 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2133 transaction abort!
2133 transaction abort!
2134 rollback completed
2134 rollback completed
2135 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2135 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2136 no rollback information available
2136 no rollback information available
2137 2:fb35475503ef
2137 2:fb35475503ef
2138
2138
2139
2139
@@ -1,41 +1,42 b''
1 #if unix-permissions no-root no-windows
1 #if unix-permissions no-root no-windows
2
2
3 Prepare
3 Prepare
4
4
5 $ hg init a
5 $ hg init a
6 $ echo a > a/a
6 $ echo a > a/a
7 $ hg -R a ci -A -m a
7 $ hg -R a ci -A -m a
8 adding a
8 adding a
9
9
10 $ hg clone a b
10 $ hg clone a b
11 updating to branch default
11 updating to branch default
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
13
13
14 One process waiting for another
14 One process waiting for another
15
15
16 $ cat > hooks.py << EOF
16 $ cat > hooks.py << EOF
17 > import time
17 > import time
18 > def sleepone(**x): time.sleep(1)
18 > def sleepone(**x): time.sleep(1)
19 > def sleephalf(**x): time.sleep(0.5)
19 > def sleephalf(**x): time.sleep(0.5)
20 > EOF
20 > EOF
21 $ echo b > b/b
21 $ echo b > b/b
22 $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
22 $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
23 $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf"
23 $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf"
24 waiting for lock on working directory of b held by '*:*' (glob)
24 waiting for lock on working directory of b held by '*:*' (glob)
25 got lock after ? seconds (glob)
25 got lock after ? seconds (glob)
26 warning: ignoring unknown working parent d2ae7f538514!
26 warning: ignoring unknown working parent d2ae7f538514!
27 $ wait
27 $ wait
28 $ cat stdout
28 $ cat stdout
29 adding b
29 adding b
30
30
31 Pushing to a local read-only repo that can't be locked
31 Pushing to a local read-only repo that can't be locked
32
32
33 $ chmod 100 a/.hg/store
33 $ chmod 100 a/.hg/store
34
34
35 $ hg -R b push a
35 $ hg -R b push a
36 pushing to a
36 pushing to a
37 searching for changes
37 abort: could not lock repository a: Permission denied
38 abort: could not lock repository a: Permission denied
38 [255]
39 [255]
39
40
40 $ chmod 700 a/.hg/store
41 $ chmod 700 a/.hg/store
41 #endif
42 #endif
General Comments 0
You need to be logged in to leave comments. Login now