##// END OF EJS Templates
push: catch and process PushkeyFailed error...
Pierre-Yves David -
r25485:8182163a default
parent child Browse files
Show More
@@ -1,1547 +1,1556
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import time
8 import time
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid
10 from node import hex, nullid
11 import errno, urllib
11 import errno, urllib
12 import util, scmutil, changegroup, base85, error, store
12 import util, scmutil, changegroup, base85, error, store
13 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
14 import lock as lockmod
14 import lock as lockmod
15 import tags
15 import tags
16
16
17 def readbundle(ui, fh, fname, vfs=None):
17 def readbundle(ui, fh, fname, vfs=None):
18 header = changegroup.readexactly(fh, 4)
18 header = changegroup.readexactly(fh, 4)
19
19
20 alg = None
20 alg = None
21 if not fname:
21 if not fname:
22 fname = "stream"
22 fname = "stream"
23 if not header.startswith('HG') and header.startswith('\0'):
23 if not header.startswith('HG') and header.startswith('\0'):
24 fh = changegroup.headerlessfixup(fh, header)
24 fh = changegroup.headerlessfixup(fh, header)
25 header = "HG10"
25 header = "HG10"
26 alg = 'UN'
26 alg = 'UN'
27 elif vfs:
27 elif vfs:
28 fname = vfs.join(fname)
28 fname = vfs.join(fname)
29
29
30 magic, version = header[0:2], header[2:4]
30 magic, version = header[0:2], header[2:4]
31
31
32 if magic != 'HG':
32 if magic != 'HG':
33 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
33 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
34 if version == '10':
34 if version == '10':
35 if alg is None:
35 if alg is None:
36 alg = changegroup.readexactly(fh, 2)
36 alg = changegroup.readexactly(fh, 2)
37 return changegroup.cg1unpacker(fh, alg)
37 return changegroup.cg1unpacker(fh, alg)
38 elif version.startswith('2'):
38 elif version.startswith('2'):
39 return bundle2.getunbundler(ui, fh, header=magic + version)
39 return bundle2.getunbundler(ui, fh, header=magic + version)
40 else:
40 else:
41 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
41 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
42
42
43 def buildobsmarkerspart(bundler, markers):
43 def buildobsmarkerspart(bundler, markers):
44 """add an obsmarker part to the bundler with <markers>
44 """add an obsmarker part to the bundler with <markers>
45
45
46 No part is created if markers is empty.
46 No part is created if markers is empty.
47 Raises ValueError if the bundler doesn't support any known obsmarker format.
47 Raises ValueError if the bundler doesn't support any known obsmarker format.
48 """
48 """
49 if markers:
49 if markers:
50 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
50 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
51 version = obsolete.commonversion(remoteversions)
51 version = obsolete.commonversion(remoteversions)
52 if version is None:
52 if version is None:
53 raise ValueError('bundler do not support common obsmarker format')
53 raise ValueError('bundler do not support common obsmarker format')
54 stream = obsolete.encodemarkers(markers, True, version=version)
54 stream = obsolete.encodemarkers(markers, True, version=version)
55 return bundler.newpart('obsmarkers', data=stream)
55 return bundler.newpart('obsmarkers', data=stream)
56 return None
56 return None
57
57
58 def _canusebundle2(op):
58 def _canusebundle2(op):
59 """return true if a pull/push can use bundle2
59 """return true if a pull/push can use bundle2
60
60
61 Feel free to nuke this function when we drop the experimental option"""
61 Feel free to nuke this function when we drop the experimental option"""
62 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
62 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
63 and op.remote.capable('bundle2'))
63 and op.remote.capable('bundle2'))
64
64
65
65
66 class pushoperation(object):
66 class pushoperation(object):
67 """A object that represent a single push operation
67 """A object that represent a single push operation
68
68
69 It purpose is to carry push related state and very common operation.
69 It purpose is to carry push related state and very common operation.
70
70
71 A new should be created at the beginning of each push and discarded
71 A new should be created at the beginning of each push and discarded
72 afterward.
72 afterward.
73 """
73 """
74
74
75 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
75 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
76 bookmarks=()):
76 bookmarks=()):
77 # repo we push from
77 # repo we push from
78 self.repo = repo
78 self.repo = repo
79 self.ui = repo.ui
79 self.ui = repo.ui
80 # repo we push to
80 # repo we push to
81 self.remote = remote
81 self.remote = remote
82 # force option provided
82 # force option provided
83 self.force = force
83 self.force = force
84 # revs to be pushed (None is "all")
84 # revs to be pushed (None is "all")
85 self.revs = revs
85 self.revs = revs
86 # bookmark explicitly pushed
86 # bookmark explicitly pushed
87 self.bookmarks = bookmarks
87 self.bookmarks = bookmarks
88 # allow push of new branch
88 # allow push of new branch
89 self.newbranch = newbranch
89 self.newbranch = newbranch
90 # did a local lock get acquired?
90 # did a local lock get acquired?
91 self.locallocked = None
91 self.locallocked = None
92 # step already performed
92 # step already performed
93 # (used to check what steps have been already performed through bundle2)
93 # (used to check what steps have been already performed through bundle2)
94 self.stepsdone = set()
94 self.stepsdone = set()
95 # Integer version of the changegroup push result
95 # Integer version of the changegroup push result
96 # - None means nothing to push
96 # - None means nothing to push
97 # - 0 means HTTP error
97 # - 0 means HTTP error
98 # - 1 means we pushed and remote head count is unchanged *or*
98 # - 1 means we pushed and remote head count is unchanged *or*
99 # we have outgoing changesets but refused to push
99 # we have outgoing changesets but refused to push
100 # - other values as described by addchangegroup()
100 # - other values as described by addchangegroup()
101 self.cgresult = None
101 self.cgresult = None
102 # Boolean value for the bookmark push
102 # Boolean value for the bookmark push
103 self.bkresult = None
103 self.bkresult = None
104 # discover.outgoing object (contains common and outgoing data)
104 # discover.outgoing object (contains common and outgoing data)
105 self.outgoing = None
105 self.outgoing = None
106 # all remote heads before the push
106 # all remote heads before the push
107 self.remoteheads = None
107 self.remoteheads = None
108 # testable as a boolean indicating if any nodes are missing locally.
108 # testable as a boolean indicating if any nodes are missing locally.
109 self.incoming = None
109 self.incoming = None
110 # phases changes that must be pushed along side the changesets
110 # phases changes that must be pushed along side the changesets
111 self.outdatedphases = None
111 self.outdatedphases = None
112 # phases changes that must be pushed if changeset push fails
112 # phases changes that must be pushed if changeset push fails
113 self.fallbackoutdatedphases = None
113 self.fallbackoutdatedphases = None
114 # outgoing obsmarkers
114 # outgoing obsmarkers
115 self.outobsmarkers = set()
115 self.outobsmarkers = set()
116 # outgoing bookmarks
116 # outgoing bookmarks
117 self.outbookmarks = []
117 self.outbookmarks = []
118 # transaction manager
118 # transaction manager
119 self.trmanager = None
119 self.trmanager = None
120 # map { pushkey partid -> callback handling failure}
121 # used to handle exception from mandatory pushkey part failure
122 self.pkfailcb = {}
120
123
121 @util.propertycache
124 @util.propertycache
122 def futureheads(self):
125 def futureheads(self):
123 """future remote heads if the changeset push succeeds"""
126 """future remote heads if the changeset push succeeds"""
124 return self.outgoing.missingheads
127 return self.outgoing.missingheads
125
128
126 @util.propertycache
129 @util.propertycache
127 def fallbackheads(self):
130 def fallbackheads(self):
128 """future remote heads if the changeset push fails"""
131 """future remote heads if the changeset push fails"""
129 if self.revs is None:
132 if self.revs is None:
130 # not target to push, all common are relevant
133 # not target to push, all common are relevant
131 return self.outgoing.commonheads
134 return self.outgoing.commonheads
132 unfi = self.repo.unfiltered()
135 unfi = self.repo.unfiltered()
133 # I want cheads = heads(::missingheads and ::commonheads)
136 # I want cheads = heads(::missingheads and ::commonheads)
134 # (missingheads is revs with secret changeset filtered out)
137 # (missingheads is revs with secret changeset filtered out)
135 #
138 #
136 # This can be expressed as:
139 # This can be expressed as:
137 # cheads = ( (missingheads and ::commonheads)
140 # cheads = ( (missingheads and ::commonheads)
138 # + (commonheads and ::missingheads))"
141 # + (commonheads and ::missingheads))"
139 # )
142 # )
140 #
143 #
141 # while trying to push we already computed the following:
144 # while trying to push we already computed the following:
142 # common = (::commonheads)
145 # common = (::commonheads)
143 # missing = ((commonheads::missingheads) - commonheads)
146 # missing = ((commonheads::missingheads) - commonheads)
144 #
147 #
145 # We can pick:
148 # We can pick:
146 # * missingheads part of common (::commonheads)
149 # * missingheads part of common (::commonheads)
147 common = set(self.outgoing.common)
150 common = set(self.outgoing.common)
148 nm = self.repo.changelog.nodemap
151 nm = self.repo.changelog.nodemap
149 cheads = [node for node in self.revs if nm[node] in common]
152 cheads = [node for node in self.revs if nm[node] in common]
150 # and
153 # and
151 # * commonheads parents on missing
154 # * commonheads parents on missing
152 revset = unfi.set('%ln and parents(roots(%ln))',
155 revset = unfi.set('%ln and parents(roots(%ln))',
153 self.outgoing.commonheads,
156 self.outgoing.commonheads,
154 self.outgoing.missing)
157 self.outgoing.missing)
155 cheads.extend(c.node() for c in revset)
158 cheads.extend(c.node() for c in revset)
156 return cheads
159 return cheads
157
160
158 @property
161 @property
159 def commonheads(self):
162 def commonheads(self):
160 """set of all common heads after changeset bundle push"""
163 """set of all common heads after changeset bundle push"""
161 if self.cgresult:
164 if self.cgresult:
162 return self.futureheads
165 return self.futureheads
163 else:
166 else:
164 return self.fallbackheads
167 return self.fallbackheads
165
168
166 # mapping of message used when pushing bookmark
169 # mapping of message used when pushing bookmark
167 bookmsgmap = {'update': (_("updating bookmark %s\n"),
170 bookmsgmap = {'update': (_("updating bookmark %s\n"),
168 _('updating bookmark %s failed!\n')),
171 _('updating bookmark %s failed!\n')),
169 'export': (_("exporting bookmark %s\n"),
172 'export': (_("exporting bookmark %s\n"),
170 _('exporting bookmark %s failed!\n')),
173 _('exporting bookmark %s failed!\n')),
171 'delete': (_("deleting remote bookmark %s\n"),
174 'delete': (_("deleting remote bookmark %s\n"),
172 _('deleting remote bookmark %s failed!\n')),
175 _('deleting remote bookmark %s failed!\n')),
173 }
176 }
174
177
175
178
176 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
179 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
177 '''Push outgoing changesets (limited by revs) from a local
180 '''Push outgoing changesets (limited by revs) from a local
178 repository to remote. Return an integer:
181 repository to remote. Return an integer:
179 - None means nothing to push
182 - None means nothing to push
180 - 0 means HTTP error
183 - 0 means HTTP error
181 - 1 means we pushed and remote head count is unchanged *or*
184 - 1 means we pushed and remote head count is unchanged *or*
182 we have outgoing changesets but refused to push
185 we have outgoing changesets but refused to push
183 - other values as described by addchangegroup()
186 - other values as described by addchangegroup()
184 '''
187 '''
185 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
188 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
186 if pushop.remote.local():
189 if pushop.remote.local():
187 missing = (set(pushop.repo.requirements)
190 missing = (set(pushop.repo.requirements)
188 - pushop.remote.local().supported)
191 - pushop.remote.local().supported)
189 if missing:
192 if missing:
190 msg = _("required features are not"
193 msg = _("required features are not"
191 " supported in the destination:"
194 " supported in the destination:"
192 " %s") % (', '.join(sorted(missing)))
195 " %s") % (', '.join(sorted(missing)))
193 raise util.Abort(msg)
196 raise util.Abort(msg)
194
197
195 # there are two ways to push to remote repo:
198 # there are two ways to push to remote repo:
196 #
199 #
197 # addchangegroup assumes local user can lock remote
200 # addchangegroup assumes local user can lock remote
198 # repo (local filesystem, old ssh servers).
201 # repo (local filesystem, old ssh servers).
199 #
202 #
200 # unbundle assumes local user cannot lock remote repo (new ssh
203 # unbundle assumes local user cannot lock remote repo (new ssh
201 # servers, http servers).
204 # servers, http servers).
202
205
203 if not pushop.remote.canpush():
206 if not pushop.remote.canpush():
204 raise util.Abort(_("destination does not support push"))
207 raise util.Abort(_("destination does not support push"))
205 # get local lock as we might write phase data
208 # get local lock as we might write phase data
206 localwlock = locallock = None
209 localwlock = locallock = None
207 try:
210 try:
208 # bundle2 push may receive a reply bundle touching bookmarks or other
211 # bundle2 push may receive a reply bundle touching bookmarks or other
209 # things requiring the wlock. Take it now to ensure proper ordering.
212 # things requiring the wlock. Take it now to ensure proper ordering.
210 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
213 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
211 if _canusebundle2(pushop) and maypushback:
214 if _canusebundle2(pushop) and maypushback:
212 localwlock = pushop.repo.wlock()
215 localwlock = pushop.repo.wlock()
213 locallock = pushop.repo.lock()
216 locallock = pushop.repo.lock()
214 pushop.locallocked = True
217 pushop.locallocked = True
215 except IOError, err:
218 except IOError, err:
216 pushop.locallocked = False
219 pushop.locallocked = False
217 if err.errno != errno.EACCES:
220 if err.errno != errno.EACCES:
218 raise
221 raise
219 # source repo cannot be locked.
222 # source repo cannot be locked.
220 # We do not abort the push, but just disable the local phase
223 # We do not abort the push, but just disable the local phase
221 # synchronisation.
224 # synchronisation.
222 msg = 'cannot lock source repository: %s\n' % err
225 msg = 'cannot lock source repository: %s\n' % err
223 pushop.ui.debug(msg)
226 pushop.ui.debug(msg)
224 try:
227 try:
225 if pushop.locallocked:
228 if pushop.locallocked:
226 pushop.trmanager = transactionmanager(repo,
229 pushop.trmanager = transactionmanager(repo,
227 'push-response',
230 'push-response',
228 pushop.remote.url())
231 pushop.remote.url())
229 pushop.repo.checkpush(pushop)
232 pushop.repo.checkpush(pushop)
230 lock = None
233 lock = None
231 unbundle = pushop.remote.capable('unbundle')
234 unbundle = pushop.remote.capable('unbundle')
232 if not unbundle:
235 if not unbundle:
233 lock = pushop.remote.lock()
236 lock = pushop.remote.lock()
234 try:
237 try:
235 _pushdiscovery(pushop)
238 _pushdiscovery(pushop)
236 if _canusebundle2(pushop):
239 if _canusebundle2(pushop):
237 _pushbundle2(pushop)
240 _pushbundle2(pushop)
238 _pushchangeset(pushop)
241 _pushchangeset(pushop)
239 _pushsyncphase(pushop)
242 _pushsyncphase(pushop)
240 _pushobsolete(pushop)
243 _pushobsolete(pushop)
241 _pushbookmark(pushop)
244 _pushbookmark(pushop)
242 finally:
245 finally:
243 if lock is not None:
246 if lock is not None:
244 lock.release()
247 lock.release()
245 if pushop.trmanager:
248 if pushop.trmanager:
246 pushop.trmanager.close()
249 pushop.trmanager.close()
247 finally:
250 finally:
248 if pushop.trmanager:
251 if pushop.trmanager:
249 pushop.trmanager.release()
252 pushop.trmanager.release()
250 if locallock is not None:
253 if locallock is not None:
251 locallock.release()
254 locallock.release()
252 if localwlock is not None:
255 if localwlock is not None:
253 localwlock.release()
256 localwlock.release()
254
257
255 return pushop
258 return pushop
256
259
257 # list of steps to perform discovery before push
260 # list of steps to perform discovery before push
258 pushdiscoveryorder = []
261 pushdiscoveryorder = []
259
262
260 # Mapping between step name and function
263 # Mapping between step name and function
261 #
264 #
262 # This exists to help extensions wrap steps if necessary
265 # This exists to help extensions wrap steps if necessary
263 pushdiscoverymapping = {}
266 pushdiscoverymapping = {}
264
267
265 def pushdiscovery(stepname):
268 def pushdiscovery(stepname):
266 """decorator for function performing discovery before push
269 """decorator for function performing discovery before push
267
270
268 The function is added to the step -> function mapping and appended to the
271 The function is added to the step -> function mapping and appended to the
269 list of steps. Beware that decorated function will be added in order (this
272 list of steps. Beware that decorated function will be added in order (this
270 may matter).
273 may matter).
271
274
272 You can only use this decorator for a new step, if you want to wrap a step
275 You can only use this decorator for a new step, if you want to wrap a step
273 from an extension, change the pushdiscovery dictionary directly."""
276 from an extension, change the pushdiscovery dictionary directly."""
274 def dec(func):
277 def dec(func):
275 assert stepname not in pushdiscoverymapping
278 assert stepname not in pushdiscoverymapping
276 pushdiscoverymapping[stepname] = func
279 pushdiscoverymapping[stepname] = func
277 pushdiscoveryorder.append(stepname)
280 pushdiscoveryorder.append(stepname)
278 return func
281 return func
279 return dec
282 return dec
280
283
281 def _pushdiscovery(pushop):
284 def _pushdiscovery(pushop):
282 """Run all discovery steps"""
285 """Run all discovery steps"""
283 for stepname in pushdiscoveryorder:
286 for stepname in pushdiscoveryorder:
284 step = pushdiscoverymapping[stepname]
287 step = pushdiscoverymapping[stepname]
285 step(pushop)
288 step(pushop)
286
289
287 @pushdiscovery('changeset')
290 @pushdiscovery('changeset')
288 def _pushdiscoverychangeset(pushop):
291 def _pushdiscoverychangeset(pushop):
289 """discover the changeset that need to be pushed"""
292 """discover the changeset that need to be pushed"""
290 fci = discovery.findcommonincoming
293 fci = discovery.findcommonincoming
291 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
294 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
292 common, inc, remoteheads = commoninc
295 common, inc, remoteheads = commoninc
293 fco = discovery.findcommonoutgoing
296 fco = discovery.findcommonoutgoing
294 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
297 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
295 commoninc=commoninc, force=pushop.force)
298 commoninc=commoninc, force=pushop.force)
296 pushop.outgoing = outgoing
299 pushop.outgoing = outgoing
297 pushop.remoteheads = remoteheads
300 pushop.remoteheads = remoteheads
298 pushop.incoming = inc
301 pushop.incoming = inc
299
302
300 @pushdiscovery('phase')
303 @pushdiscovery('phase')
301 def _pushdiscoveryphase(pushop):
304 def _pushdiscoveryphase(pushop):
302 """discover the phase that needs to be pushed
305 """discover the phase that needs to be pushed
303
306
304 (computed for both success and failure case for changesets push)"""
307 (computed for both success and failure case for changesets push)"""
305 outgoing = pushop.outgoing
308 outgoing = pushop.outgoing
306 unfi = pushop.repo.unfiltered()
309 unfi = pushop.repo.unfiltered()
307 remotephases = pushop.remote.listkeys('phases')
310 remotephases = pushop.remote.listkeys('phases')
308 publishing = remotephases.get('publishing', False)
311 publishing = remotephases.get('publishing', False)
309 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
312 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
310 and remotephases # server supports phases
313 and remotephases # server supports phases
311 and not pushop.outgoing.missing # no changesets to be pushed
314 and not pushop.outgoing.missing # no changesets to be pushed
312 and publishing):
315 and publishing):
313 # When:
316 # When:
314 # - this is a subrepo push
317 # - this is a subrepo push
315 # - and remote support phase
318 # - and remote support phase
316 # - and no changeset are to be pushed
319 # - and no changeset are to be pushed
317 # - and remote is publishing
320 # - and remote is publishing
318 # We may be in issue 3871 case!
321 # We may be in issue 3871 case!
319 # We drop the possible phase synchronisation done by
322 # We drop the possible phase synchronisation done by
320 # courtesy to publish changesets possibly locally draft
323 # courtesy to publish changesets possibly locally draft
321 # on the remote.
324 # on the remote.
322 remotephases = {'publishing': 'True'}
325 remotephases = {'publishing': 'True'}
323 ana = phases.analyzeremotephases(pushop.repo,
326 ana = phases.analyzeremotephases(pushop.repo,
324 pushop.fallbackheads,
327 pushop.fallbackheads,
325 remotephases)
328 remotephases)
326 pheads, droots = ana
329 pheads, droots = ana
327 extracond = ''
330 extracond = ''
328 if not publishing:
331 if not publishing:
329 extracond = ' and public()'
332 extracond = ' and public()'
330 revset = 'heads((%%ln::%%ln) %s)' % extracond
333 revset = 'heads((%%ln::%%ln) %s)' % extracond
331 # Get the list of all revs draft on remote by public here.
334 # Get the list of all revs draft on remote by public here.
332 # XXX Beware that revset break if droots is not strictly
335 # XXX Beware that revset break if droots is not strictly
333 # XXX root we may want to ensure it is but it is costly
336 # XXX root we may want to ensure it is but it is costly
334 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
337 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
335 if not outgoing.missing:
338 if not outgoing.missing:
336 future = fallback
339 future = fallback
337 else:
340 else:
338 # adds changeset we are going to push as draft
341 # adds changeset we are going to push as draft
339 #
342 #
340 # should not be necessary for publishing server, but because of an
343 # should not be necessary for publishing server, but because of an
341 # issue fixed in xxxxx we have to do it anyway.
344 # issue fixed in xxxxx we have to do it anyway.
342 fdroots = list(unfi.set('roots(%ln + %ln::)',
345 fdroots = list(unfi.set('roots(%ln + %ln::)',
343 outgoing.missing, droots))
346 outgoing.missing, droots))
344 fdroots = [f.node() for f in fdroots]
347 fdroots = [f.node() for f in fdroots]
345 future = list(unfi.set(revset, fdroots, pushop.futureheads))
348 future = list(unfi.set(revset, fdroots, pushop.futureheads))
346 pushop.outdatedphases = future
349 pushop.outdatedphases = future
347 pushop.fallbackoutdatedphases = fallback
350 pushop.fallbackoutdatedphases = fallback
348
351
349 @pushdiscovery('obsmarker')
352 @pushdiscovery('obsmarker')
350 def _pushdiscoveryobsmarkers(pushop):
353 def _pushdiscoveryobsmarkers(pushop):
351 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
354 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
352 and pushop.repo.obsstore
355 and pushop.repo.obsstore
353 and 'obsolete' in pushop.remote.listkeys('namespaces')):
356 and 'obsolete' in pushop.remote.listkeys('namespaces')):
354 repo = pushop.repo
357 repo = pushop.repo
355 # very naive computation, that can be quite expensive on big repo.
358 # very naive computation, that can be quite expensive on big repo.
356 # However: evolution is currently slow on them anyway.
359 # However: evolution is currently slow on them anyway.
357 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
360 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
358 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
361 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
359
362
360 @pushdiscovery('bookmarks')
363 @pushdiscovery('bookmarks')
361 def _pushdiscoverybookmarks(pushop):
364 def _pushdiscoverybookmarks(pushop):
362 ui = pushop.ui
365 ui = pushop.ui
363 repo = pushop.repo.unfiltered()
366 repo = pushop.repo.unfiltered()
364 remote = pushop.remote
367 remote = pushop.remote
365 ui.debug("checking for updated bookmarks\n")
368 ui.debug("checking for updated bookmarks\n")
366 ancestors = ()
369 ancestors = ()
367 if pushop.revs:
370 if pushop.revs:
368 revnums = map(repo.changelog.rev, pushop.revs)
371 revnums = map(repo.changelog.rev, pushop.revs)
369 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
372 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
370 remotebookmark = remote.listkeys('bookmarks')
373 remotebookmark = remote.listkeys('bookmarks')
371
374
372 explicit = set(pushop.bookmarks)
375 explicit = set(pushop.bookmarks)
373
376
374 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
377 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
375 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
378 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
376 for b, scid, dcid in advsrc:
379 for b, scid, dcid in advsrc:
377 if b in explicit:
380 if b in explicit:
378 explicit.remove(b)
381 explicit.remove(b)
379 if not ancestors or repo[scid].rev() in ancestors:
382 if not ancestors or repo[scid].rev() in ancestors:
380 pushop.outbookmarks.append((b, dcid, scid))
383 pushop.outbookmarks.append((b, dcid, scid))
381 # search added bookmark
384 # search added bookmark
382 for b, scid, dcid in addsrc:
385 for b, scid, dcid in addsrc:
383 if b in explicit:
386 if b in explicit:
384 explicit.remove(b)
387 explicit.remove(b)
385 pushop.outbookmarks.append((b, '', scid))
388 pushop.outbookmarks.append((b, '', scid))
386 # search for overwritten bookmark
389 # search for overwritten bookmark
387 for b, scid, dcid in advdst + diverge + differ:
390 for b, scid, dcid in advdst + diverge + differ:
388 if b in explicit:
391 if b in explicit:
389 explicit.remove(b)
392 explicit.remove(b)
390 pushop.outbookmarks.append((b, dcid, scid))
393 pushop.outbookmarks.append((b, dcid, scid))
391 # search for bookmark to delete
394 # search for bookmark to delete
392 for b, scid, dcid in adddst:
395 for b, scid, dcid in adddst:
393 if b in explicit:
396 if b in explicit:
394 explicit.remove(b)
397 explicit.remove(b)
395 # treat as "deleted locally"
398 # treat as "deleted locally"
396 pushop.outbookmarks.append((b, dcid, ''))
399 pushop.outbookmarks.append((b, dcid, ''))
397 # identical bookmarks shouldn't get reported
400 # identical bookmarks shouldn't get reported
398 for b, scid, dcid in same:
401 for b, scid, dcid in same:
399 if b in explicit:
402 if b in explicit:
400 explicit.remove(b)
403 explicit.remove(b)
401
404
402 if explicit:
405 if explicit:
403 explicit = sorted(explicit)
406 explicit = sorted(explicit)
404 # we should probably list all of them
407 # we should probably list all of them
405 ui.warn(_('bookmark %s does not exist on the local '
408 ui.warn(_('bookmark %s does not exist on the local '
406 'or remote repository!\n') % explicit[0])
409 'or remote repository!\n') % explicit[0])
407 pushop.bkresult = 2
410 pushop.bkresult = 2
408
411
409 pushop.outbookmarks.sort()
412 pushop.outbookmarks.sort()
410
413
411 def _pushcheckoutgoing(pushop):
414 def _pushcheckoutgoing(pushop):
412 outgoing = pushop.outgoing
415 outgoing = pushop.outgoing
413 unfi = pushop.repo.unfiltered()
416 unfi = pushop.repo.unfiltered()
414 if not outgoing.missing:
417 if not outgoing.missing:
415 # nothing to push
418 # nothing to push
416 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
419 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
417 return False
420 return False
418 # something to push
421 # something to push
419 if not pushop.force:
422 if not pushop.force:
420 # if repo.obsstore == False --> no obsolete
423 # if repo.obsstore == False --> no obsolete
421 # then, save the iteration
424 # then, save the iteration
422 if unfi.obsstore:
425 if unfi.obsstore:
423 # this message are here for 80 char limit reason
426 # this message are here for 80 char limit reason
424 mso = _("push includes obsolete changeset: %s!")
427 mso = _("push includes obsolete changeset: %s!")
425 mst = {"unstable": _("push includes unstable changeset: %s!"),
428 mst = {"unstable": _("push includes unstable changeset: %s!"),
426 "bumped": _("push includes bumped changeset: %s!"),
429 "bumped": _("push includes bumped changeset: %s!"),
427 "divergent": _("push includes divergent changeset: %s!")}
430 "divergent": _("push includes divergent changeset: %s!")}
428 # If we are to push if there is at least one
431 # If we are to push if there is at least one
429 # obsolete or unstable changeset in missing, at
432 # obsolete or unstable changeset in missing, at
430 # least one of the missinghead will be obsolete or
433 # least one of the missinghead will be obsolete or
431 # unstable. So checking heads only is ok
434 # unstable. So checking heads only is ok
432 for node in outgoing.missingheads:
435 for node in outgoing.missingheads:
433 ctx = unfi[node]
436 ctx = unfi[node]
434 if ctx.obsolete():
437 if ctx.obsolete():
435 raise util.Abort(mso % ctx)
438 raise util.Abort(mso % ctx)
436 elif ctx.troubled():
439 elif ctx.troubled():
437 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
440 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
438 newbm = pushop.ui.configlist('bookmarks', 'pushing')
441 newbm = pushop.ui.configlist('bookmarks', 'pushing')
439 discovery.checkheads(unfi, pushop.remote, outgoing,
442 discovery.checkheads(unfi, pushop.remote, outgoing,
440 pushop.remoteheads,
443 pushop.remoteheads,
441 pushop.newbranch,
444 pushop.newbranch,
442 bool(pushop.incoming),
445 bool(pushop.incoming),
443 newbm)
446 newbm)
444 return True
447 return True
445
448
446 # List of names of steps to perform for an outgoing bundle2, order matters.
449 # List of names of steps to perform for an outgoing bundle2, order matters.
447 b2partsgenorder = []
450 b2partsgenorder = []
448
451
449 # Mapping between step name and function
452 # Mapping between step name and function
450 #
453 #
451 # This exists to help extensions wrap steps if necessary
454 # This exists to help extensions wrap steps if necessary
452 b2partsgenmapping = {}
455 b2partsgenmapping = {}
453
456
454 def b2partsgenerator(stepname, idx=None):
457 def b2partsgenerator(stepname, idx=None):
455 """decorator for function generating bundle2 part
458 """decorator for function generating bundle2 part
456
459
457 The function is added to the step -> function mapping and appended to the
460 The function is added to the step -> function mapping and appended to the
458 list of steps. Beware that decorated functions will be added in order
461 list of steps. Beware that decorated functions will be added in order
459 (this may matter).
462 (this may matter).
460
463
461 You can only use this decorator for new steps, if you want to wrap a step
464 You can only use this decorator for new steps, if you want to wrap a step
462 from an extension, attack the b2partsgenmapping dictionary directly."""
465 from an extension, attack the b2partsgenmapping dictionary directly."""
463 def dec(func):
466 def dec(func):
464 assert stepname not in b2partsgenmapping
467 assert stepname not in b2partsgenmapping
465 b2partsgenmapping[stepname] = func
468 b2partsgenmapping[stepname] = func
466 if idx is None:
469 if idx is None:
467 b2partsgenorder.append(stepname)
470 b2partsgenorder.append(stepname)
468 else:
471 else:
469 b2partsgenorder.insert(idx, stepname)
472 b2partsgenorder.insert(idx, stepname)
470 return func
473 return func
471 return dec
474 return dec
472
475
473 @b2partsgenerator('changeset')
476 @b2partsgenerator('changeset')
474 def _pushb2ctx(pushop, bundler):
477 def _pushb2ctx(pushop, bundler):
475 """handle changegroup push through bundle2
478 """handle changegroup push through bundle2
476
479
477 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
480 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
478 """
481 """
479 if 'changesets' in pushop.stepsdone:
482 if 'changesets' in pushop.stepsdone:
480 return
483 return
481 pushop.stepsdone.add('changesets')
484 pushop.stepsdone.add('changesets')
482 # Send known heads to the server for race detection.
485 # Send known heads to the server for race detection.
483 if not _pushcheckoutgoing(pushop):
486 if not _pushcheckoutgoing(pushop):
484 return
487 return
485 pushop.repo.prepushoutgoinghooks(pushop.repo,
488 pushop.repo.prepushoutgoinghooks(pushop.repo,
486 pushop.remote,
489 pushop.remote,
487 pushop.outgoing)
490 pushop.outgoing)
488 if not pushop.force:
491 if not pushop.force:
489 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
492 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
490 b2caps = bundle2.bundle2caps(pushop.remote)
493 b2caps = bundle2.bundle2caps(pushop.remote)
491 version = None
494 version = None
492 cgversions = b2caps.get('changegroup')
495 cgversions = b2caps.get('changegroup')
493 if not cgversions: # 3.1 and 3.2 ship with an empty value
496 if not cgversions: # 3.1 and 3.2 ship with an empty value
494 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
497 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
495 pushop.outgoing)
498 pushop.outgoing)
496 else:
499 else:
497 cgversions = [v for v in cgversions if v in changegroup.packermap]
500 cgversions = [v for v in cgversions if v in changegroup.packermap]
498 if not cgversions:
501 if not cgversions:
499 raise ValueError(_('no common changegroup version'))
502 raise ValueError(_('no common changegroup version'))
500 version = max(cgversions)
503 version = max(cgversions)
501 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
504 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
502 pushop.outgoing,
505 pushop.outgoing,
503 version=version)
506 version=version)
504 cgpart = bundler.newpart('changegroup', data=cg)
507 cgpart = bundler.newpart('changegroup', data=cg)
505 if version is not None:
508 if version is not None:
506 cgpart.addparam('version', version)
509 cgpart.addparam('version', version)
507 def handlereply(op):
510 def handlereply(op):
508 """extract addchangegroup returns from server reply"""
511 """extract addchangegroup returns from server reply"""
509 cgreplies = op.records.getreplies(cgpart.id)
512 cgreplies = op.records.getreplies(cgpart.id)
510 assert len(cgreplies['changegroup']) == 1
513 assert len(cgreplies['changegroup']) == 1
511 pushop.cgresult = cgreplies['changegroup'][0]['return']
514 pushop.cgresult = cgreplies['changegroup'][0]['return']
512 return handlereply
515 return handlereply
513
516
514 @b2partsgenerator('phase')
517 @b2partsgenerator('phase')
515 def _pushb2phases(pushop, bundler):
518 def _pushb2phases(pushop, bundler):
516 """handle phase push through bundle2"""
519 """handle phase push through bundle2"""
517 if 'phases' in pushop.stepsdone:
520 if 'phases' in pushop.stepsdone:
518 return
521 return
519 b2caps = bundle2.bundle2caps(pushop.remote)
522 b2caps = bundle2.bundle2caps(pushop.remote)
520 if not 'pushkey' in b2caps:
523 if not 'pushkey' in b2caps:
521 return
524 return
522 pushop.stepsdone.add('phases')
525 pushop.stepsdone.add('phases')
523 part2node = []
526 part2node = []
524 enc = pushkey.encode
527 enc = pushkey.encode
525 for newremotehead in pushop.outdatedphases:
528 for newremotehead in pushop.outdatedphases:
526 part = bundler.newpart('pushkey', mandatory=False)
529 part = bundler.newpart('pushkey', mandatory=False)
527 part.addparam('namespace', enc('phases'))
530 part.addparam('namespace', enc('phases'))
528 part.addparam('key', enc(newremotehead.hex()))
531 part.addparam('key', enc(newremotehead.hex()))
529 part.addparam('old', enc(str(phases.draft)))
532 part.addparam('old', enc(str(phases.draft)))
530 part.addparam('new', enc(str(phases.public)))
533 part.addparam('new', enc(str(phases.public)))
531 part2node.append((part.id, newremotehead))
534 part2node.append((part.id, newremotehead))
532 def handlereply(op):
535 def handlereply(op):
533 for partid, node in part2node:
536 for partid, node in part2node:
534 partrep = op.records.getreplies(partid)
537 partrep = op.records.getreplies(partid)
535 results = partrep['pushkey']
538 results = partrep['pushkey']
536 assert len(results) <= 1
539 assert len(results) <= 1
537 msg = None
540 msg = None
538 if not results:
541 if not results:
539 msg = _('server ignored update of %s to public!\n') % node
542 msg = _('server ignored update of %s to public!\n') % node
540 elif not int(results[0]['return']):
543 elif not int(results[0]['return']):
541 msg = _('updating %s to public failed!\n') % node
544 msg = _('updating %s to public failed!\n') % node
542 if msg is not None:
545 if msg is not None:
543 pushop.ui.warn(msg)
546 pushop.ui.warn(msg)
544 return handlereply
547 return handlereply
545
548
546 @b2partsgenerator('obsmarkers')
549 @b2partsgenerator('obsmarkers')
547 def _pushb2obsmarkers(pushop, bundler):
550 def _pushb2obsmarkers(pushop, bundler):
548 if 'obsmarkers' in pushop.stepsdone:
551 if 'obsmarkers' in pushop.stepsdone:
549 return
552 return
550 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
553 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
551 if obsolete.commonversion(remoteversions) is None:
554 if obsolete.commonversion(remoteversions) is None:
552 return
555 return
553 pushop.stepsdone.add('obsmarkers')
556 pushop.stepsdone.add('obsmarkers')
554 if pushop.outobsmarkers:
557 if pushop.outobsmarkers:
555 markers = sorted(pushop.outobsmarkers)
558 markers = sorted(pushop.outobsmarkers)
556 buildobsmarkerspart(bundler, markers)
559 buildobsmarkerspart(bundler, markers)
557
560
558 @b2partsgenerator('bookmarks')
561 @b2partsgenerator('bookmarks')
559 def _pushb2bookmarks(pushop, bundler):
562 def _pushb2bookmarks(pushop, bundler):
560 """handle phase push through bundle2"""
563 """handle phase push through bundle2"""
561 if 'bookmarks' in pushop.stepsdone:
564 if 'bookmarks' in pushop.stepsdone:
562 return
565 return
563 b2caps = bundle2.bundle2caps(pushop.remote)
566 b2caps = bundle2.bundle2caps(pushop.remote)
564 if 'pushkey' not in b2caps:
567 if 'pushkey' not in b2caps:
565 return
568 return
566 pushop.stepsdone.add('bookmarks')
569 pushop.stepsdone.add('bookmarks')
567 part2book = []
570 part2book = []
568 enc = pushkey.encode
571 enc = pushkey.encode
569 for book, old, new in pushop.outbookmarks:
572 for book, old, new in pushop.outbookmarks:
570 part = bundler.newpart('pushkey', mandatory=False)
573 part = bundler.newpart('pushkey', mandatory=False)
571 part.addparam('namespace', enc('bookmarks'))
574 part.addparam('namespace', enc('bookmarks'))
572 part.addparam('key', enc(book))
575 part.addparam('key', enc(book))
573 part.addparam('old', enc(old))
576 part.addparam('old', enc(old))
574 part.addparam('new', enc(new))
577 part.addparam('new', enc(new))
575 action = 'update'
578 action = 'update'
576 if not old:
579 if not old:
577 action = 'export'
580 action = 'export'
578 elif not new:
581 elif not new:
579 action = 'delete'
582 action = 'delete'
580 part2book.append((part.id, book, action))
583 part2book.append((part.id, book, action))
581
584
582
585
583 def handlereply(op):
586 def handlereply(op):
584 ui = pushop.ui
587 ui = pushop.ui
585 for partid, book, action in part2book:
588 for partid, book, action in part2book:
586 partrep = op.records.getreplies(partid)
589 partrep = op.records.getreplies(partid)
587 results = partrep['pushkey']
590 results = partrep['pushkey']
588 assert len(results) <= 1
591 assert len(results) <= 1
589 if not results:
592 if not results:
590 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
593 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
591 else:
594 else:
592 ret = int(results[0]['return'])
595 ret = int(results[0]['return'])
593 if ret:
596 if ret:
594 ui.status(bookmsgmap[action][0] % book)
597 ui.status(bookmsgmap[action][0] % book)
595 else:
598 else:
596 ui.warn(bookmsgmap[action][1] % book)
599 ui.warn(bookmsgmap[action][1] % book)
597 if pushop.bkresult is not None:
600 if pushop.bkresult is not None:
598 pushop.bkresult = 1
601 pushop.bkresult = 1
599 return handlereply
602 return handlereply
600
603
601
604
602 def _pushbundle2(pushop):
605 def _pushbundle2(pushop):
603 """push data to the remote using bundle2
606 """push data to the remote using bundle2
604
607
605 The only currently supported type of data is changegroup but this will
608 The only currently supported type of data is changegroup but this will
606 evolve in the future."""
609 evolve in the future."""
607 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
610 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
608 pushback = (pushop.trmanager
611 pushback = (pushop.trmanager
609 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
612 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
610
613
611 # create reply capability
614 # create reply capability
612 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
615 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
613 allowpushback=pushback))
616 allowpushback=pushback))
614 bundler.newpart('replycaps', data=capsblob)
617 bundler.newpart('replycaps', data=capsblob)
615 replyhandlers = []
618 replyhandlers = []
616 for partgenname in b2partsgenorder:
619 for partgenname in b2partsgenorder:
617 partgen = b2partsgenmapping[partgenname]
620 partgen = b2partsgenmapping[partgenname]
618 ret = partgen(pushop, bundler)
621 ret = partgen(pushop, bundler)
619 if callable(ret):
622 if callable(ret):
620 replyhandlers.append(ret)
623 replyhandlers.append(ret)
621 # do not push if nothing to push
624 # do not push if nothing to push
622 if bundler.nbparts <= 1:
625 if bundler.nbparts <= 1:
623 return
626 return
624 stream = util.chunkbuffer(bundler.getchunks())
627 stream = util.chunkbuffer(bundler.getchunks())
625 try:
628 try:
626 reply = pushop.remote.unbundle(stream, ['force'], 'push')
629 try:
627 except error.BundleValueError, exc:
630 reply = pushop.remote.unbundle(stream, ['force'], 'push')
628 raise util.Abort('missing support for %s' % exc)
631 except error.BundleValueError, exc:
629 try:
632 raise util.Abort('missing support for %s' % exc)
630 trgetter = None
633 try:
631 if pushback:
634 trgetter = None
632 trgetter = pushop.trmanager.transaction
635 if pushback:
633 op = bundle2.processbundle(pushop.repo, reply, trgetter)
636 trgetter = pushop.trmanager.transaction
634 except error.BundleValueError, exc:
637 op = bundle2.processbundle(pushop.repo, reply, trgetter)
635 raise util.Abort('missing support for %s' % exc)
638 except error.BundleValueError, exc:
639 raise util.Abort('missing support for %s' % exc)
640 except error.PushkeyFailed, exc:
641 partid = int(exc.partid)
642 if partid not in pushop.pkfailcb:
643 raise
644 pushop.pkfailcb[partid](pushop, exc)
636 for rephand in replyhandlers:
645 for rephand in replyhandlers:
637 rephand(op)
646 rephand(op)
638
647
639 def _pushchangeset(pushop):
648 def _pushchangeset(pushop):
640 """Make the actual push of changeset bundle to remote repo"""
649 """Make the actual push of changeset bundle to remote repo"""
641 if 'changesets' in pushop.stepsdone:
650 if 'changesets' in pushop.stepsdone:
642 return
651 return
643 pushop.stepsdone.add('changesets')
652 pushop.stepsdone.add('changesets')
644 if not _pushcheckoutgoing(pushop):
653 if not _pushcheckoutgoing(pushop):
645 return
654 return
646 pushop.repo.prepushoutgoinghooks(pushop.repo,
655 pushop.repo.prepushoutgoinghooks(pushop.repo,
647 pushop.remote,
656 pushop.remote,
648 pushop.outgoing)
657 pushop.outgoing)
649 outgoing = pushop.outgoing
658 outgoing = pushop.outgoing
650 unbundle = pushop.remote.capable('unbundle')
659 unbundle = pushop.remote.capable('unbundle')
651 # TODO: get bundlecaps from remote
660 # TODO: get bundlecaps from remote
652 bundlecaps = None
661 bundlecaps = None
653 # create a changegroup from local
662 # create a changegroup from local
654 if pushop.revs is None and not (outgoing.excluded
663 if pushop.revs is None and not (outgoing.excluded
655 or pushop.repo.changelog.filteredrevs):
664 or pushop.repo.changelog.filteredrevs):
656 # push everything,
665 # push everything,
657 # use the fast path, no race possible on push
666 # use the fast path, no race possible on push
658 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
667 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
659 cg = changegroup.getsubset(pushop.repo,
668 cg = changegroup.getsubset(pushop.repo,
660 outgoing,
669 outgoing,
661 bundler,
670 bundler,
662 'push',
671 'push',
663 fastpath=True)
672 fastpath=True)
664 else:
673 else:
665 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
674 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
666 bundlecaps)
675 bundlecaps)
667
676
668 # apply changegroup to remote
677 # apply changegroup to remote
669 if unbundle:
678 if unbundle:
670 # local repo finds heads on server, finds out what
679 # local repo finds heads on server, finds out what
671 # revs it must push. once revs transferred, if server
680 # revs it must push. once revs transferred, if server
672 # finds it has different heads (someone else won
681 # finds it has different heads (someone else won
673 # commit/push race), server aborts.
682 # commit/push race), server aborts.
674 if pushop.force:
683 if pushop.force:
675 remoteheads = ['force']
684 remoteheads = ['force']
676 else:
685 else:
677 remoteheads = pushop.remoteheads
686 remoteheads = pushop.remoteheads
678 # ssh: return remote's addchangegroup()
687 # ssh: return remote's addchangegroup()
679 # http: return remote's addchangegroup() or 0 for error
688 # http: return remote's addchangegroup() or 0 for error
680 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
689 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
681 pushop.repo.url())
690 pushop.repo.url())
682 else:
691 else:
683 # we return an integer indicating remote head count
692 # we return an integer indicating remote head count
684 # change
693 # change
685 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
694 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
686 pushop.repo.url())
695 pushop.repo.url())
687
696
688 def _pushsyncphase(pushop):
697 def _pushsyncphase(pushop):
689 """synchronise phase information locally and remotely"""
698 """synchronise phase information locally and remotely"""
690 cheads = pushop.commonheads
699 cheads = pushop.commonheads
691 # even when we don't push, exchanging phase data is useful
700 # even when we don't push, exchanging phase data is useful
692 remotephases = pushop.remote.listkeys('phases')
701 remotephases = pushop.remote.listkeys('phases')
693 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
702 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
694 and remotephases # server supports phases
703 and remotephases # server supports phases
695 and pushop.cgresult is None # nothing was pushed
704 and pushop.cgresult is None # nothing was pushed
696 and remotephases.get('publishing', False)):
705 and remotephases.get('publishing', False)):
697 # When:
706 # When:
698 # - this is a subrepo push
707 # - this is a subrepo push
699 # - and remote support phase
708 # - and remote support phase
700 # - and no changeset was pushed
709 # - and no changeset was pushed
701 # - and remote is publishing
710 # - and remote is publishing
702 # We may be in issue 3871 case!
711 # We may be in issue 3871 case!
703 # We drop the possible phase synchronisation done by
712 # We drop the possible phase synchronisation done by
704 # courtesy to publish changesets possibly locally draft
713 # courtesy to publish changesets possibly locally draft
705 # on the remote.
714 # on the remote.
706 remotephases = {'publishing': 'True'}
715 remotephases = {'publishing': 'True'}
707 if not remotephases: # old server or public only reply from non-publishing
716 if not remotephases: # old server or public only reply from non-publishing
708 _localphasemove(pushop, cheads)
717 _localphasemove(pushop, cheads)
709 # don't push any phase data as there is nothing to push
718 # don't push any phase data as there is nothing to push
710 else:
719 else:
711 ana = phases.analyzeremotephases(pushop.repo, cheads,
720 ana = phases.analyzeremotephases(pushop.repo, cheads,
712 remotephases)
721 remotephases)
713 pheads, droots = ana
722 pheads, droots = ana
714 ### Apply remote phase on local
723 ### Apply remote phase on local
715 if remotephases.get('publishing', False):
724 if remotephases.get('publishing', False):
716 _localphasemove(pushop, cheads)
725 _localphasemove(pushop, cheads)
717 else: # publish = False
726 else: # publish = False
718 _localphasemove(pushop, pheads)
727 _localphasemove(pushop, pheads)
719 _localphasemove(pushop, cheads, phases.draft)
728 _localphasemove(pushop, cheads, phases.draft)
720 ### Apply local phase on remote
729 ### Apply local phase on remote
721
730
722 if pushop.cgresult:
731 if pushop.cgresult:
723 if 'phases' in pushop.stepsdone:
732 if 'phases' in pushop.stepsdone:
724 # phases already pushed though bundle2
733 # phases already pushed though bundle2
725 return
734 return
726 outdated = pushop.outdatedphases
735 outdated = pushop.outdatedphases
727 else:
736 else:
728 outdated = pushop.fallbackoutdatedphases
737 outdated = pushop.fallbackoutdatedphases
729
738
730 pushop.stepsdone.add('phases')
739 pushop.stepsdone.add('phases')
731
740
732 # filter heads already turned public by the push
741 # filter heads already turned public by the push
733 outdated = [c for c in outdated if c.node() not in pheads]
742 outdated = [c for c in outdated if c.node() not in pheads]
734 # fallback to independent pushkey command
743 # fallback to independent pushkey command
735 for newremotehead in outdated:
744 for newremotehead in outdated:
736 r = pushop.remote.pushkey('phases',
745 r = pushop.remote.pushkey('phases',
737 newremotehead.hex(),
746 newremotehead.hex(),
738 str(phases.draft),
747 str(phases.draft),
739 str(phases.public))
748 str(phases.public))
740 if not r:
749 if not r:
741 pushop.ui.warn(_('updating %s to public failed!\n')
750 pushop.ui.warn(_('updating %s to public failed!\n')
742 % newremotehead)
751 % newremotehead)
743
752
744 def _localphasemove(pushop, nodes, phase=phases.public):
753 def _localphasemove(pushop, nodes, phase=phases.public):
745 """move <nodes> to <phase> in the local source repo"""
754 """move <nodes> to <phase> in the local source repo"""
746 if pushop.trmanager:
755 if pushop.trmanager:
747 phases.advanceboundary(pushop.repo,
756 phases.advanceboundary(pushop.repo,
748 pushop.trmanager.transaction(),
757 pushop.trmanager.transaction(),
749 phase,
758 phase,
750 nodes)
759 nodes)
751 else:
760 else:
752 # repo is not locked, do not change any phases!
761 # repo is not locked, do not change any phases!
753 # Informs the user that phases should have been moved when
762 # Informs the user that phases should have been moved when
754 # applicable.
763 # applicable.
755 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
764 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
756 phasestr = phases.phasenames[phase]
765 phasestr = phases.phasenames[phase]
757 if actualmoves:
766 if actualmoves:
758 pushop.ui.status(_('cannot lock source repo, skipping '
767 pushop.ui.status(_('cannot lock source repo, skipping '
759 'local %s phase update\n') % phasestr)
768 'local %s phase update\n') % phasestr)
760
769
761 def _pushobsolete(pushop):
770 def _pushobsolete(pushop):
762 """utility function to push obsolete markers to a remote"""
771 """utility function to push obsolete markers to a remote"""
763 if 'obsmarkers' in pushop.stepsdone:
772 if 'obsmarkers' in pushop.stepsdone:
764 return
773 return
765 pushop.ui.debug('try to push obsolete markers to remote\n')
774 pushop.ui.debug('try to push obsolete markers to remote\n')
766 repo = pushop.repo
775 repo = pushop.repo
767 remote = pushop.remote
776 remote = pushop.remote
768 pushop.stepsdone.add('obsmarkers')
777 pushop.stepsdone.add('obsmarkers')
769 if pushop.outobsmarkers:
778 if pushop.outobsmarkers:
770 rslts = []
779 rslts = []
771 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
780 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
772 for key in sorted(remotedata, reverse=True):
781 for key in sorted(remotedata, reverse=True):
773 # reverse sort to ensure we end with dump0
782 # reverse sort to ensure we end with dump0
774 data = remotedata[key]
783 data = remotedata[key]
775 rslts.append(remote.pushkey('obsolete', key, '', data))
784 rslts.append(remote.pushkey('obsolete', key, '', data))
776 if [r for r in rslts if not r]:
785 if [r for r in rslts if not r]:
777 msg = _('failed to push some obsolete markers!\n')
786 msg = _('failed to push some obsolete markers!\n')
778 repo.ui.warn(msg)
787 repo.ui.warn(msg)
779
788
780 def _pushbookmark(pushop):
789 def _pushbookmark(pushop):
781 """Update bookmark position on remote"""
790 """Update bookmark position on remote"""
782 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
791 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
783 return
792 return
784 pushop.stepsdone.add('bookmarks')
793 pushop.stepsdone.add('bookmarks')
785 ui = pushop.ui
794 ui = pushop.ui
786 remote = pushop.remote
795 remote = pushop.remote
787
796
788 for b, old, new in pushop.outbookmarks:
797 for b, old, new in pushop.outbookmarks:
789 action = 'update'
798 action = 'update'
790 if not old:
799 if not old:
791 action = 'export'
800 action = 'export'
792 elif not new:
801 elif not new:
793 action = 'delete'
802 action = 'delete'
794 if remote.pushkey('bookmarks', b, old, new):
803 if remote.pushkey('bookmarks', b, old, new):
795 ui.status(bookmsgmap[action][0] % b)
804 ui.status(bookmsgmap[action][0] % b)
796 else:
805 else:
797 ui.warn(bookmsgmap[action][1] % b)
806 ui.warn(bookmsgmap[action][1] % b)
798 # discovery can have set the value form invalid entry
807 # discovery can have set the value form invalid entry
799 if pushop.bkresult is not None:
808 if pushop.bkresult is not None:
800 pushop.bkresult = 1
809 pushop.bkresult = 1
801
810
802 class pulloperation(object):
811 class pulloperation(object):
803 """A object that represent a single pull operation
812 """A object that represent a single pull operation
804
813
805 It purpose is to carry pull related state and very common operation.
814 It purpose is to carry pull related state and very common operation.
806
815
807 A new should be created at the beginning of each pull and discarded
816 A new should be created at the beginning of each pull and discarded
808 afterward.
817 afterward.
809 """
818 """
810
819
811 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
820 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
812 remotebookmarks=None):
821 remotebookmarks=None):
813 # repo we pull into
822 # repo we pull into
814 self.repo = repo
823 self.repo = repo
815 # repo we pull from
824 # repo we pull from
816 self.remote = remote
825 self.remote = remote
817 # revision we try to pull (None is "all")
826 # revision we try to pull (None is "all")
818 self.heads = heads
827 self.heads = heads
819 # bookmark pulled explicitly
828 # bookmark pulled explicitly
820 self.explicitbookmarks = bookmarks
829 self.explicitbookmarks = bookmarks
821 # do we force pull?
830 # do we force pull?
822 self.force = force
831 self.force = force
823 # transaction manager
832 # transaction manager
824 self.trmanager = None
833 self.trmanager = None
825 # set of common changeset between local and remote before pull
834 # set of common changeset between local and remote before pull
826 self.common = None
835 self.common = None
827 # set of pulled head
836 # set of pulled head
828 self.rheads = None
837 self.rheads = None
829 # list of missing changeset to fetch remotely
838 # list of missing changeset to fetch remotely
830 self.fetch = None
839 self.fetch = None
831 # remote bookmarks data
840 # remote bookmarks data
832 self.remotebookmarks = remotebookmarks
841 self.remotebookmarks = remotebookmarks
833 # result of changegroup pulling (used as return code by pull)
842 # result of changegroup pulling (used as return code by pull)
834 self.cgresult = None
843 self.cgresult = None
835 # list of step already done
844 # list of step already done
836 self.stepsdone = set()
845 self.stepsdone = set()
837
846
838 @util.propertycache
847 @util.propertycache
839 def pulledsubset(self):
848 def pulledsubset(self):
840 """heads of the set of changeset target by the pull"""
849 """heads of the set of changeset target by the pull"""
841 # compute target subset
850 # compute target subset
842 if self.heads is None:
851 if self.heads is None:
843 # We pulled every thing possible
852 # We pulled every thing possible
844 # sync on everything common
853 # sync on everything common
845 c = set(self.common)
854 c = set(self.common)
846 ret = list(self.common)
855 ret = list(self.common)
847 for n in self.rheads:
856 for n in self.rheads:
848 if n not in c:
857 if n not in c:
849 ret.append(n)
858 ret.append(n)
850 return ret
859 return ret
851 else:
860 else:
852 # We pulled a specific subset
861 # We pulled a specific subset
853 # sync on this subset
862 # sync on this subset
854 return self.heads
863 return self.heads
855
864
856 def gettransaction(self):
865 def gettransaction(self):
857 # deprecated; talk to trmanager directly
866 # deprecated; talk to trmanager directly
858 return self.trmanager.transaction()
867 return self.trmanager.transaction()
859
868
860 class transactionmanager(object):
869 class transactionmanager(object):
861 """An object to manage the life cycle of a transaction
870 """An object to manage the life cycle of a transaction
862
871
863 It creates the transaction on demand and calls the appropriate hooks when
872 It creates the transaction on demand and calls the appropriate hooks when
864 closing the transaction."""
873 closing the transaction."""
865 def __init__(self, repo, source, url):
874 def __init__(self, repo, source, url):
866 self.repo = repo
875 self.repo = repo
867 self.source = source
876 self.source = source
868 self.url = url
877 self.url = url
869 self._tr = None
878 self._tr = None
870
879
871 def transaction(self):
880 def transaction(self):
872 """Return an open transaction object, constructing if necessary"""
881 """Return an open transaction object, constructing if necessary"""
873 if not self._tr:
882 if not self._tr:
874 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
883 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
875 self._tr = self.repo.transaction(trname)
884 self._tr = self.repo.transaction(trname)
876 self._tr.hookargs['source'] = self.source
885 self._tr.hookargs['source'] = self.source
877 self._tr.hookargs['url'] = self.url
886 self._tr.hookargs['url'] = self.url
878 return self._tr
887 return self._tr
879
888
880 def close(self):
889 def close(self):
881 """close transaction if created"""
890 """close transaction if created"""
882 if self._tr is not None:
891 if self._tr is not None:
883 self._tr.close()
892 self._tr.close()
884
893
885 def release(self):
894 def release(self):
886 """release transaction if created"""
895 """release transaction if created"""
887 if self._tr is not None:
896 if self._tr is not None:
888 self._tr.release()
897 self._tr.release()
889
898
890 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None):
899 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None):
891 if opargs is None:
900 if opargs is None:
892 opargs = {}
901 opargs = {}
893 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
902 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
894 **opargs)
903 **opargs)
895 if pullop.remote.local():
904 if pullop.remote.local():
896 missing = set(pullop.remote.requirements) - pullop.repo.supported
905 missing = set(pullop.remote.requirements) - pullop.repo.supported
897 if missing:
906 if missing:
898 msg = _("required features are not"
907 msg = _("required features are not"
899 " supported in the destination:"
908 " supported in the destination:"
900 " %s") % (', '.join(sorted(missing)))
909 " %s") % (', '.join(sorted(missing)))
901 raise util.Abort(msg)
910 raise util.Abort(msg)
902
911
903 lock = pullop.repo.lock()
912 lock = pullop.repo.lock()
904 try:
913 try:
905 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
914 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
906 _pulldiscovery(pullop)
915 _pulldiscovery(pullop)
907 if _canusebundle2(pullop):
916 if _canusebundle2(pullop):
908 _pullbundle2(pullop)
917 _pullbundle2(pullop)
909 _pullchangeset(pullop)
918 _pullchangeset(pullop)
910 _pullphase(pullop)
919 _pullphase(pullop)
911 _pullbookmarks(pullop)
920 _pullbookmarks(pullop)
912 _pullobsolete(pullop)
921 _pullobsolete(pullop)
913 pullop.trmanager.close()
922 pullop.trmanager.close()
914 finally:
923 finally:
915 pullop.trmanager.release()
924 pullop.trmanager.release()
916 lock.release()
925 lock.release()
917
926
918 return pullop
927 return pullop
919
928
920 # list of steps to perform discovery before pull
929 # list of steps to perform discovery before pull
921 pulldiscoveryorder = []
930 pulldiscoveryorder = []
922
931
923 # Mapping between step name and function
932 # Mapping between step name and function
924 #
933 #
925 # This exists to help extensions wrap steps if necessary
934 # This exists to help extensions wrap steps if necessary
926 pulldiscoverymapping = {}
935 pulldiscoverymapping = {}
927
936
928 def pulldiscovery(stepname):
937 def pulldiscovery(stepname):
929 """decorator for function performing discovery before pull
938 """decorator for function performing discovery before pull
930
939
931 The function is added to the step -> function mapping and appended to the
940 The function is added to the step -> function mapping and appended to the
932 list of steps. Beware that decorated function will be added in order (this
941 list of steps. Beware that decorated function will be added in order (this
933 may matter).
942 may matter).
934
943
935 You can only use this decorator for a new step, if you want to wrap a step
944 You can only use this decorator for a new step, if you want to wrap a step
936 from an extension, change the pulldiscovery dictionary directly."""
945 from an extension, change the pulldiscovery dictionary directly."""
937 def dec(func):
946 def dec(func):
938 assert stepname not in pulldiscoverymapping
947 assert stepname not in pulldiscoverymapping
939 pulldiscoverymapping[stepname] = func
948 pulldiscoverymapping[stepname] = func
940 pulldiscoveryorder.append(stepname)
949 pulldiscoveryorder.append(stepname)
941 return func
950 return func
942 return dec
951 return dec
943
952
944 def _pulldiscovery(pullop):
953 def _pulldiscovery(pullop):
945 """Run all discovery steps"""
954 """Run all discovery steps"""
946 for stepname in pulldiscoveryorder:
955 for stepname in pulldiscoveryorder:
947 step = pulldiscoverymapping[stepname]
956 step = pulldiscoverymapping[stepname]
948 step(pullop)
957 step(pullop)
949
958
950 @pulldiscovery('b1:bookmarks')
959 @pulldiscovery('b1:bookmarks')
951 def _pullbookmarkbundle1(pullop):
960 def _pullbookmarkbundle1(pullop):
952 """fetch bookmark data in bundle1 case
961 """fetch bookmark data in bundle1 case
953
962
954 If not using bundle2, we have to fetch bookmarks before changeset
963 If not using bundle2, we have to fetch bookmarks before changeset
955 discovery to reduce the chance and impact of race conditions."""
964 discovery to reduce the chance and impact of race conditions."""
956 if pullop.remotebookmarks is not None:
965 if pullop.remotebookmarks is not None:
957 return
966 return
958 if (_canusebundle2(pullop)
967 if (_canusebundle2(pullop)
959 and 'listkeys' in bundle2.bundle2caps(pullop.remote)):
968 and 'listkeys' in bundle2.bundle2caps(pullop.remote)):
960 # all known bundle2 servers now support listkeys, but lets be nice with
969 # all known bundle2 servers now support listkeys, but lets be nice with
961 # new implementation.
970 # new implementation.
962 return
971 return
963 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
972 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
964
973
965
974
966 @pulldiscovery('changegroup')
975 @pulldiscovery('changegroup')
967 def _pulldiscoverychangegroup(pullop):
976 def _pulldiscoverychangegroup(pullop):
968 """discovery phase for the pull
977 """discovery phase for the pull
969
978
970 Current handle changeset discovery only, will change handle all discovery
979 Current handle changeset discovery only, will change handle all discovery
971 at some point."""
980 at some point."""
972 tmp = discovery.findcommonincoming(pullop.repo,
981 tmp = discovery.findcommonincoming(pullop.repo,
973 pullop.remote,
982 pullop.remote,
974 heads=pullop.heads,
983 heads=pullop.heads,
975 force=pullop.force)
984 force=pullop.force)
976 common, fetch, rheads = tmp
985 common, fetch, rheads = tmp
977 nm = pullop.repo.unfiltered().changelog.nodemap
986 nm = pullop.repo.unfiltered().changelog.nodemap
978 if fetch and rheads:
987 if fetch and rheads:
979 # If a remote heads in filtered locally, lets drop it from the unknown
988 # If a remote heads in filtered locally, lets drop it from the unknown
980 # remote heads and put in back in common.
989 # remote heads and put in back in common.
981 #
990 #
982 # This is a hackish solution to catch most of "common but locally
991 # This is a hackish solution to catch most of "common but locally
983 # hidden situation". We do not performs discovery on unfiltered
992 # hidden situation". We do not performs discovery on unfiltered
984 # repository because it end up doing a pathological amount of round
993 # repository because it end up doing a pathological amount of round
985 # trip for w huge amount of changeset we do not care about.
994 # trip for w huge amount of changeset we do not care about.
986 #
995 #
987 # If a set of such "common but filtered" changeset exist on the server
996 # If a set of such "common but filtered" changeset exist on the server
988 # but are not including a remote heads, we'll not be able to detect it,
997 # but are not including a remote heads, we'll not be able to detect it,
989 scommon = set(common)
998 scommon = set(common)
990 filteredrheads = []
999 filteredrheads = []
991 for n in rheads:
1000 for n in rheads:
992 if n in nm:
1001 if n in nm:
993 if n not in scommon:
1002 if n not in scommon:
994 common.append(n)
1003 common.append(n)
995 else:
1004 else:
996 filteredrheads.append(n)
1005 filteredrheads.append(n)
997 if not filteredrheads:
1006 if not filteredrheads:
998 fetch = []
1007 fetch = []
999 rheads = filteredrheads
1008 rheads = filteredrheads
1000 pullop.common = common
1009 pullop.common = common
1001 pullop.fetch = fetch
1010 pullop.fetch = fetch
1002 pullop.rheads = rheads
1011 pullop.rheads = rheads
1003
1012
1004 def _pullbundle2(pullop):
1013 def _pullbundle2(pullop):
1005 """pull data using bundle2
1014 """pull data using bundle2
1006
1015
1007 For now, the only supported data are changegroup."""
1016 For now, the only supported data are changegroup."""
1008 remotecaps = bundle2.bundle2caps(pullop.remote)
1017 remotecaps = bundle2.bundle2caps(pullop.remote)
1009 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1018 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1010 # pulling changegroup
1019 # pulling changegroup
1011 pullop.stepsdone.add('changegroup')
1020 pullop.stepsdone.add('changegroup')
1012
1021
1013 kwargs['common'] = pullop.common
1022 kwargs['common'] = pullop.common
1014 kwargs['heads'] = pullop.heads or pullop.rheads
1023 kwargs['heads'] = pullop.heads or pullop.rheads
1015 kwargs['cg'] = pullop.fetch
1024 kwargs['cg'] = pullop.fetch
1016 if 'listkeys' in remotecaps:
1025 if 'listkeys' in remotecaps:
1017 kwargs['listkeys'] = ['phase']
1026 kwargs['listkeys'] = ['phase']
1018 if pullop.remotebookmarks is None:
1027 if pullop.remotebookmarks is None:
1019 # make sure to always includes bookmark data when migrating
1028 # make sure to always includes bookmark data when migrating
1020 # `hg incoming --bundle` to using this function.
1029 # `hg incoming --bundle` to using this function.
1021 kwargs['listkeys'].append('bookmarks')
1030 kwargs['listkeys'].append('bookmarks')
1022 if not pullop.fetch:
1031 if not pullop.fetch:
1023 pullop.repo.ui.status(_("no changes found\n"))
1032 pullop.repo.ui.status(_("no changes found\n"))
1024 pullop.cgresult = 0
1033 pullop.cgresult = 0
1025 else:
1034 else:
1026 if pullop.heads is None and list(pullop.common) == [nullid]:
1035 if pullop.heads is None and list(pullop.common) == [nullid]:
1027 pullop.repo.ui.status(_("requesting all changes\n"))
1036 pullop.repo.ui.status(_("requesting all changes\n"))
1028 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1037 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1029 remoteversions = bundle2.obsmarkersversion(remotecaps)
1038 remoteversions = bundle2.obsmarkersversion(remotecaps)
1030 if obsolete.commonversion(remoteversions) is not None:
1039 if obsolete.commonversion(remoteversions) is not None:
1031 kwargs['obsmarkers'] = True
1040 kwargs['obsmarkers'] = True
1032 pullop.stepsdone.add('obsmarkers')
1041 pullop.stepsdone.add('obsmarkers')
1033 _pullbundle2extraprepare(pullop, kwargs)
1042 _pullbundle2extraprepare(pullop, kwargs)
1034 bundle = pullop.remote.getbundle('pull', **kwargs)
1043 bundle = pullop.remote.getbundle('pull', **kwargs)
1035 try:
1044 try:
1036 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1045 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1037 except error.BundleValueError, exc:
1046 except error.BundleValueError, exc:
1038 raise util.Abort('missing support for %s' % exc)
1047 raise util.Abort('missing support for %s' % exc)
1039
1048
1040 if pullop.fetch:
1049 if pullop.fetch:
1041 results = [cg['return'] for cg in op.records['changegroup']]
1050 results = [cg['return'] for cg in op.records['changegroup']]
1042 pullop.cgresult = changegroup.combineresults(results)
1051 pullop.cgresult = changegroup.combineresults(results)
1043
1052
1044 # processing phases change
1053 # processing phases change
1045 for namespace, value in op.records['listkeys']:
1054 for namespace, value in op.records['listkeys']:
1046 if namespace == 'phases':
1055 if namespace == 'phases':
1047 _pullapplyphases(pullop, value)
1056 _pullapplyphases(pullop, value)
1048
1057
1049 # processing bookmark update
1058 # processing bookmark update
1050 for namespace, value in op.records['listkeys']:
1059 for namespace, value in op.records['listkeys']:
1051 if namespace == 'bookmarks':
1060 if namespace == 'bookmarks':
1052 pullop.remotebookmarks = value
1061 pullop.remotebookmarks = value
1053
1062
1054 # bookmark data were either already there or pulled in the bundle
1063 # bookmark data were either already there or pulled in the bundle
1055 if pullop.remotebookmarks is not None:
1064 if pullop.remotebookmarks is not None:
1056 _pullbookmarks(pullop)
1065 _pullbookmarks(pullop)
1057
1066
1058 def _pullbundle2extraprepare(pullop, kwargs):
1067 def _pullbundle2extraprepare(pullop, kwargs):
1059 """hook function so that extensions can extend the getbundle call"""
1068 """hook function so that extensions can extend the getbundle call"""
1060 pass
1069 pass
1061
1070
1062 def _pullchangeset(pullop):
1071 def _pullchangeset(pullop):
1063 """pull changeset from unbundle into the local repo"""
1072 """pull changeset from unbundle into the local repo"""
1064 # We delay the open of the transaction as late as possible so we
1073 # We delay the open of the transaction as late as possible so we
1065 # don't open transaction for nothing or you break future useful
1074 # don't open transaction for nothing or you break future useful
1066 # rollback call
1075 # rollback call
1067 if 'changegroup' in pullop.stepsdone:
1076 if 'changegroup' in pullop.stepsdone:
1068 return
1077 return
1069 pullop.stepsdone.add('changegroup')
1078 pullop.stepsdone.add('changegroup')
1070 if not pullop.fetch:
1079 if not pullop.fetch:
1071 pullop.repo.ui.status(_("no changes found\n"))
1080 pullop.repo.ui.status(_("no changes found\n"))
1072 pullop.cgresult = 0
1081 pullop.cgresult = 0
1073 return
1082 return
1074 pullop.gettransaction()
1083 pullop.gettransaction()
1075 if pullop.heads is None and list(pullop.common) == [nullid]:
1084 if pullop.heads is None and list(pullop.common) == [nullid]:
1076 pullop.repo.ui.status(_("requesting all changes\n"))
1085 pullop.repo.ui.status(_("requesting all changes\n"))
1077 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1086 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1078 # issue1320, avoid a race if remote changed after discovery
1087 # issue1320, avoid a race if remote changed after discovery
1079 pullop.heads = pullop.rheads
1088 pullop.heads = pullop.rheads
1080
1089
1081 if pullop.remote.capable('getbundle'):
1090 if pullop.remote.capable('getbundle'):
1082 # TODO: get bundlecaps from remote
1091 # TODO: get bundlecaps from remote
1083 cg = pullop.remote.getbundle('pull', common=pullop.common,
1092 cg = pullop.remote.getbundle('pull', common=pullop.common,
1084 heads=pullop.heads or pullop.rheads)
1093 heads=pullop.heads or pullop.rheads)
1085 elif pullop.heads is None:
1094 elif pullop.heads is None:
1086 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1095 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1087 elif not pullop.remote.capable('changegroupsubset'):
1096 elif not pullop.remote.capable('changegroupsubset'):
1088 raise util.Abort(_("partial pull cannot be done because "
1097 raise util.Abort(_("partial pull cannot be done because "
1089 "other repository doesn't support "
1098 "other repository doesn't support "
1090 "changegroupsubset."))
1099 "changegroupsubset."))
1091 else:
1100 else:
1092 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1101 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1093 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1102 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1094 pullop.remote.url())
1103 pullop.remote.url())
1095
1104
1096 def _pullphase(pullop):
1105 def _pullphase(pullop):
1097 # Get remote phases data from remote
1106 # Get remote phases data from remote
1098 if 'phases' in pullop.stepsdone:
1107 if 'phases' in pullop.stepsdone:
1099 return
1108 return
1100 remotephases = pullop.remote.listkeys('phases')
1109 remotephases = pullop.remote.listkeys('phases')
1101 _pullapplyphases(pullop, remotephases)
1110 _pullapplyphases(pullop, remotephases)
1102
1111
1103 def _pullapplyphases(pullop, remotephases):
1112 def _pullapplyphases(pullop, remotephases):
1104 """apply phase movement from observed remote state"""
1113 """apply phase movement from observed remote state"""
1105 if 'phases' in pullop.stepsdone:
1114 if 'phases' in pullop.stepsdone:
1106 return
1115 return
1107 pullop.stepsdone.add('phases')
1116 pullop.stepsdone.add('phases')
1108 publishing = bool(remotephases.get('publishing', False))
1117 publishing = bool(remotephases.get('publishing', False))
1109 if remotephases and not publishing:
1118 if remotephases and not publishing:
1110 # remote is new and unpublishing
1119 # remote is new and unpublishing
1111 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1120 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1112 pullop.pulledsubset,
1121 pullop.pulledsubset,
1113 remotephases)
1122 remotephases)
1114 dheads = pullop.pulledsubset
1123 dheads = pullop.pulledsubset
1115 else:
1124 else:
1116 # Remote is old or publishing all common changesets
1125 # Remote is old or publishing all common changesets
1117 # should be seen as public
1126 # should be seen as public
1118 pheads = pullop.pulledsubset
1127 pheads = pullop.pulledsubset
1119 dheads = []
1128 dheads = []
1120 unfi = pullop.repo.unfiltered()
1129 unfi = pullop.repo.unfiltered()
1121 phase = unfi._phasecache.phase
1130 phase = unfi._phasecache.phase
1122 rev = unfi.changelog.nodemap.get
1131 rev = unfi.changelog.nodemap.get
1123 public = phases.public
1132 public = phases.public
1124 draft = phases.draft
1133 draft = phases.draft
1125
1134
1126 # exclude changesets already public locally and update the others
1135 # exclude changesets already public locally and update the others
1127 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1136 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1128 if pheads:
1137 if pheads:
1129 tr = pullop.gettransaction()
1138 tr = pullop.gettransaction()
1130 phases.advanceboundary(pullop.repo, tr, public, pheads)
1139 phases.advanceboundary(pullop.repo, tr, public, pheads)
1131
1140
1132 # exclude changesets already draft locally and update the others
1141 # exclude changesets already draft locally and update the others
1133 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1142 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1134 if dheads:
1143 if dheads:
1135 tr = pullop.gettransaction()
1144 tr = pullop.gettransaction()
1136 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1145 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1137
1146
1138 def _pullbookmarks(pullop):
1147 def _pullbookmarks(pullop):
1139 """process the remote bookmark information to update the local one"""
1148 """process the remote bookmark information to update the local one"""
1140 if 'bookmarks' in pullop.stepsdone:
1149 if 'bookmarks' in pullop.stepsdone:
1141 return
1150 return
1142 pullop.stepsdone.add('bookmarks')
1151 pullop.stepsdone.add('bookmarks')
1143 repo = pullop.repo
1152 repo = pullop.repo
1144 remotebookmarks = pullop.remotebookmarks
1153 remotebookmarks = pullop.remotebookmarks
1145 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1154 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1146 pullop.remote.url(),
1155 pullop.remote.url(),
1147 pullop.gettransaction,
1156 pullop.gettransaction,
1148 explicit=pullop.explicitbookmarks)
1157 explicit=pullop.explicitbookmarks)
1149
1158
1150 def _pullobsolete(pullop):
1159 def _pullobsolete(pullop):
1151 """utility function to pull obsolete markers from a remote
1160 """utility function to pull obsolete markers from a remote
1152
1161
1153 The `gettransaction` is function that return the pull transaction, creating
1162 The `gettransaction` is function that return the pull transaction, creating
1154 one if necessary. We return the transaction to inform the calling code that
1163 one if necessary. We return the transaction to inform the calling code that
1155 a new transaction have been created (when applicable).
1164 a new transaction have been created (when applicable).
1156
1165
1157 Exists mostly to allow overriding for experimentation purpose"""
1166 Exists mostly to allow overriding for experimentation purpose"""
1158 if 'obsmarkers' in pullop.stepsdone:
1167 if 'obsmarkers' in pullop.stepsdone:
1159 return
1168 return
1160 pullop.stepsdone.add('obsmarkers')
1169 pullop.stepsdone.add('obsmarkers')
1161 tr = None
1170 tr = None
1162 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1171 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1163 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1172 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1164 remoteobs = pullop.remote.listkeys('obsolete')
1173 remoteobs = pullop.remote.listkeys('obsolete')
1165 if 'dump0' in remoteobs:
1174 if 'dump0' in remoteobs:
1166 tr = pullop.gettransaction()
1175 tr = pullop.gettransaction()
1167 for key in sorted(remoteobs, reverse=True):
1176 for key in sorted(remoteobs, reverse=True):
1168 if key.startswith('dump'):
1177 if key.startswith('dump'):
1169 data = base85.b85decode(remoteobs[key])
1178 data = base85.b85decode(remoteobs[key])
1170 pullop.repo.obsstore.mergemarkers(tr, data)
1179 pullop.repo.obsstore.mergemarkers(tr, data)
1171 pullop.repo.invalidatevolatilesets()
1180 pullop.repo.invalidatevolatilesets()
1172 return tr
1181 return tr
1173
1182
1174 def caps20to10(repo):
1183 def caps20to10(repo):
1175 """return a set with appropriate options to use bundle20 during getbundle"""
1184 """return a set with appropriate options to use bundle20 during getbundle"""
1176 caps = set(['HG20'])
1185 caps = set(['HG20'])
1177 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1186 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1178 caps.add('bundle2=' + urllib.quote(capsblob))
1187 caps.add('bundle2=' + urllib.quote(capsblob))
1179 return caps
1188 return caps
1180
1189
1181 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1190 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1182 getbundle2partsorder = []
1191 getbundle2partsorder = []
1183
1192
1184 # Mapping between step name and function
1193 # Mapping between step name and function
1185 #
1194 #
1186 # This exists to help extensions wrap steps if necessary
1195 # This exists to help extensions wrap steps if necessary
1187 getbundle2partsmapping = {}
1196 getbundle2partsmapping = {}
1188
1197
1189 def getbundle2partsgenerator(stepname, idx=None):
1198 def getbundle2partsgenerator(stepname, idx=None):
1190 """decorator for function generating bundle2 part for getbundle
1199 """decorator for function generating bundle2 part for getbundle
1191
1200
1192 The function is added to the step -> function mapping and appended to the
1201 The function is added to the step -> function mapping and appended to the
1193 list of steps. Beware that decorated functions will be added in order
1202 list of steps. Beware that decorated functions will be added in order
1194 (this may matter).
1203 (this may matter).
1195
1204
1196 You can only use this decorator for new steps, if you want to wrap a step
1205 You can only use this decorator for new steps, if you want to wrap a step
1197 from an extension, attack the getbundle2partsmapping dictionary directly."""
1206 from an extension, attack the getbundle2partsmapping dictionary directly."""
1198 def dec(func):
1207 def dec(func):
1199 assert stepname not in getbundle2partsmapping
1208 assert stepname not in getbundle2partsmapping
1200 getbundle2partsmapping[stepname] = func
1209 getbundle2partsmapping[stepname] = func
1201 if idx is None:
1210 if idx is None:
1202 getbundle2partsorder.append(stepname)
1211 getbundle2partsorder.append(stepname)
1203 else:
1212 else:
1204 getbundle2partsorder.insert(idx, stepname)
1213 getbundle2partsorder.insert(idx, stepname)
1205 return func
1214 return func
1206 return dec
1215 return dec
1207
1216
1208 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1217 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1209 **kwargs):
1218 **kwargs):
1210 """return a full bundle (with potentially multiple kind of parts)
1219 """return a full bundle (with potentially multiple kind of parts)
1211
1220
1212 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1221 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1213 passed. For now, the bundle can contain only changegroup, but this will
1222 passed. For now, the bundle can contain only changegroup, but this will
1214 changes when more part type will be available for bundle2.
1223 changes when more part type will be available for bundle2.
1215
1224
1216 This is different from changegroup.getchangegroup that only returns an HG10
1225 This is different from changegroup.getchangegroup that only returns an HG10
1217 changegroup bundle. They may eventually get reunited in the future when we
1226 changegroup bundle. They may eventually get reunited in the future when we
1218 have a clearer idea of the API we what to query different data.
1227 have a clearer idea of the API we what to query different data.
1219
1228
1220 The implementation is at a very early stage and will get massive rework
1229 The implementation is at a very early stage and will get massive rework
1221 when the API of bundle is refined.
1230 when the API of bundle is refined.
1222 """
1231 """
1223 # bundle10 case
1232 # bundle10 case
1224 usebundle2 = False
1233 usebundle2 = False
1225 if bundlecaps is not None:
1234 if bundlecaps is not None:
1226 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1235 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1227 if not usebundle2:
1236 if not usebundle2:
1228 if bundlecaps and not kwargs.get('cg', True):
1237 if bundlecaps and not kwargs.get('cg', True):
1229 raise ValueError(_('request for bundle10 must include changegroup'))
1238 raise ValueError(_('request for bundle10 must include changegroup'))
1230
1239
1231 if kwargs:
1240 if kwargs:
1232 raise ValueError(_('unsupported getbundle arguments: %s')
1241 raise ValueError(_('unsupported getbundle arguments: %s')
1233 % ', '.join(sorted(kwargs.keys())))
1242 % ', '.join(sorted(kwargs.keys())))
1234 return changegroup.getchangegroup(repo, source, heads=heads,
1243 return changegroup.getchangegroup(repo, source, heads=heads,
1235 common=common, bundlecaps=bundlecaps)
1244 common=common, bundlecaps=bundlecaps)
1236
1245
1237 # bundle20 case
1246 # bundle20 case
1238 b2caps = {}
1247 b2caps = {}
1239 for bcaps in bundlecaps:
1248 for bcaps in bundlecaps:
1240 if bcaps.startswith('bundle2='):
1249 if bcaps.startswith('bundle2='):
1241 blob = urllib.unquote(bcaps[len('bundle2='):])
1250 blob = urllib.unquote(bcaps[len('bundle2='):])
1242 b2caps.update(bundle2.decodecaps(blob))
1251 b2caps.update(bundle2.decodecaps(blob))
1243 bundler = bundle2.bundle20(repo.ui, b2caps)
1252 bundler = bundle2.bundle20(repo.ui, b2caps)
1244
1253
1245 kwargs['heads'] = heads
1254 kwargs['heads'] = heads
1246 kwargs['common'] = common
1255 kwargs['common'] = common
1247
1256
1248 for name in getbundle2partsorder:
1257 for name in getbundle2partsorder:
1249 func = getbundle2partsmapping[name]
1258 func = getbundle2partsmapping[name]
1250 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1259 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1251 **kwargs)
1260 **kwargs)
1252
1261
1253 return util.chunkbuffer(bundler.getchunks())
1262 return util.chunkbuffer(bundler.getchunks())
1254
1263
1255 @getbundle2partsgenerator('changegroup')
1264 @getbundle2partsgenerator('changegroup')
1256 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1265 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1257 b2caps=None, heads=None, common=None, **kwargs):
1266 b2caps=None, heads=None, common=None, **kwargs):
1258 """add a changegroup part to the requested bundle"""
1267 """add a changegroup part to the requested bundle"""
1259 cg = None
1268 cg = None
1260 if kwargs.get('cg', True):
1269 if kwargs.get('cg', True):
1261 # build changegroup bundle here.
1270 # build changegroup bundle here.
1262 version = None
1271 version = None
1263 cgversions = b2caps.get('changegroup')
1272 cgversions = b2caps.get('changegroup')
1264 if not cgversions: # 3.1 and 3.2 ship with an empty value
1273 if not cgversions: # 3.1 and 3.2 ship with an empty value
1265 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1274 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1266 common=common,
1275 common=common,
1267 bundlecaps=bundlecaps)
1276 bundlecaps=bundlecaps)
1268 else:
1277 else:
1269 cgversions = [v for v in cgversions if v in changegroup.packermap]
1278 cgversions = [v for v in cgversions if v in changegroup.packermap]
1270 if not cgversions:
1279 if not cgversions:
1271 raise ValueError(_('no common changegroup version'))
1280 raise ValueError(_('no common changegroup version'))
1272 version = max(cgversions)
1281 version = max(cgversions)
1273 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1282 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1274 common=common,
1283 common=common,
1275 bundlecaps=bundlecaps,
1284 bundlecaps=bundlecaps,
1276 version=version)
1285 version=version)
1277
1286
1278 if cg:
1287 if cg:
1279 part = bundler.newpart('changegroup', data=cg)
1288 part = bundler.newpart('changegroup', data=cg)
1280 if version is not None:
1289 if version is not None:
1281 part.addparam('version', version)
1290 part.addparam('version', version)
1282
1291
1283 @getbundle2partsgenerator('listkeys')
1292 @getbundle2partsgenerator('listkeys')
1284 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1293 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1285 b2caps=None, **kwargs):
1294 b2caps=None, **kwargs):
1286 """add parts containing listkeys namespaces to the requested bundle"""
1295 """add parts containing listkeys namespaces to the requested bundle"""
1287 listkeys = kwargs.get('listkeys', ())
1296 listkeys = kwargs.get('listkeys', ())
1288 for namespace in listkeys:
1297 for namespace in listkeys:
1289 part = bundler.newpart('listkeys')
1298 part = bundler.newpart('listkeys')
1290 part.addparam('namespace', namespace)
1299 part.addparam('namespace', namespace)
1291 keys = repo.listkeys(namespace).items()
1300 keys = repo.listkeys(namespace).items()
1292 part.data = pushkey.encodekeys(keys)
1301 part.data = pushkey.encodekeys(keys)
1293
1302
1294 @getbundle2partsgenerator('obsmarkers')
1303 @getbundle2partsgenerator('obsmarkers')
1295 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1304 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1296 b2caps=None, heads=None, **kwargs):
1305 b2caps=None, heads=None, **kwargs):
1297 """add an obsolescence markers part to the requested bundle"""
1306 """add an obsolescence markers part to the requested bundle"""
1298 if kwargs.get('obsmarkers', False):
1307 if kwargs.get('obsmarkers', False):
1299 if heads is None:
1308 if heads is None:
1300 heads = repo.heads()
1309 heads = repo.heads()
1301 subset = [c.node() for c in repo.set('::%ln', heads)]
1310 subset = [c.node() for c in repo.set('::%ln', heads)]
1302 markers = repo.obsstore.relevantmarkers(subset)
1311 markers = repo.obsstore.relevantmarkers(subset)
1303 markers = sorted(markers)
1312 markers = sorted(markers)
1304 buildobsmarkerspart(bundler, markers)
1313 buildobsmarkerspart(bundler, markers)
1305
1314
1306 @getbundle2partsgenerator('hgtagsfnodes')
1315 @getbundle2partsgenerator('hgtagsfnodes')
1307 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1316 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1308 b2caps=None, heads=None, common=None,
1317 b2caps=None, heads=None, common=None,
1309 **kwargs):
1318 **kwargs):
1310 """Transfer the .hgtags filenodes mapping.
1319 """Transfer the .hgtags filenodes mapping.
1311
1320
1312 Only values for heads in this bundle will be transferred.
1321 Only values for heads in this bundle will be transferred.
1313
1322
1314 The part data consists of pairs of 20 byte changeset node and .hgtags
1323 The part data consists of pairs of 20 byte changeset node and .hgtags
1315 filenodes raw values.
1324 filenodes raw values.
1316 """
1325 """
1317 # Don't send unless:
1326 # Don't send unless:
1318 # - changeset are being exchanged,
1327 # - changeset are being exchanged,
1319 # - the client supports it.
1328 # - the client supports it.
1320 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1329 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1321 return
1330 return
1322
1331
1323 outgoing = changegroup.computeoutgoing(repo, heads, common)
1332 outgoing = changegroup.computeoutgoing(repo, heads, common)
1324
1333
1325 if not outgoing.missingheads:
1334 if not outgoing.missingheads:
1326 return
1335 return
1327
1336
1328 cache = tags.hgtagsfnodescache(repo.unfiltered())
1337 cache = tags.hgtagsfnodescache(repo.unfiltered())
1329 chunks = []
1338 chunks = []
1330
1339
1331 # .hgtags fnodes are only relevant for head changesets. While we could
1340 # .hgtags fnodes are only relevant for head changesets. While we could
1332 # transfer values for all known nodes, there will likely be little to
1341 # transfer values for all known nodes, there will likely be little to
1333 # no benefit.
1342 # no benefit.
1334 #
1343 #
1335 # We don't bother using a generator to produce output data because
1344 # We don't bother using a generator to produce output data because
1336 # a) we only have 40 bytes per head and even esoteric numbers of heads
1345 # a) we only have 40 bytes per head and even esoteric numbers of heads
1337 # consume little memory (1M heads is 40MB) b) we don't want to send the
1346 # consume little memory (1M heads is 40MB) b) we don't want to send the
1338 # part if we don't have entries and knowing if we have entries requires
1347 # part if we don't have entries and knowing if we have entries requires
1339 # cache lookups.
1348 # cache lookups.
1340 for node in outgoing.missingheads:
1349 for node in outgoing.missingheads:
1341 # Don't compute missing, as this may slow down serving.
1350 # Don't compute missing, as this may slow down serving.
1342 fnode = cache.getfnode(node, computemissing=False)
1351 fnode = cache.getfnode(node, computemissing=False)
1343 if fnode is not None:
1352 if fnode is not None:
1344 chunks.extend([node, fnode])
1353 chunks.extend([node, fnode])
1345
1354
1346 if chunks:
1355 if chunks:
1347 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1356 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1348
1357
1349 def check_heads(repo, their_heads, context):
1358 def check_heads(repo, their_heads, context):
1350 """check if the heads of a repo have been modified
1359 """check if the heads of a repo have been modified
1351
1360
1352 Used by peer for unbundling.
1361 Used by peer for unbundling.
1353 """
1362 """
1354 heads = repo.heads()
1363 heads = repo.heads()
1355 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1364 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1356 if not (their_heads == ['force'] or their_heads == heads or
1365 if not (their_heads == ['force'] or their_heads == heads or
1357 their_heads == ['hashed', heads_hash]):
1366 their_heads == ['hashed', heads_hash]):
1358 # someone else committed/pushed/unbundled while we
1367 # someone else committed/pushed/unbundled while we
1359 # were transferring data
1368 # were transferring data
1360 raise error.PushRaced('repository changed while %s - '
1369 raise error.PushRaced('repository changed while %s - '
1361 'please try again' % context)
1370 'please try again' % context)
1362
1371
1363 def unbundle(repo, cg, heads, source, url):
1372 def unbundle(repo, cg, heads, source, url):
1364 """Apply a bundle to a repo.
1373 """Apply a bundle to a repo.
1365
1374
1366 this function makes sure the repo is locked during the application and have
1375 this function makes sure the repo is locked during the application and have
1367 mechanism to check that no push race occurred between the creation of the
1376 mechanism to check that no push race occurred between the creation of the
1368 bundle and its application.
1377 bundle and its application.
1369
1378
1370 If the push was raced as PushRaced exception is raised."""
1379 If the push was raced as PushRaced exception is raised."""
1371 r = 0
1380 r = 0
1372 # need a transaction when processing a bundle2 stream
1381 # need a transaction when processing a bundle2 stream
1373 wlock = lock = tr = None
1382 wlock = lock = tr = None
1374 recordout = None
1383 recordout = None
1375 # quick fix for output mismatch with bundle2 in 3.4
1384 # quick fix for output mismatch with bundle2 in 3.4
1376 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1385 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1377 False)
1386 False)
1378 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1387 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1379 captureoutput = True
1388 captureoutput = True
1380 try:
1389 try:
1381 check_heads(repo, heads, 'uploading changes')
1390 check_heads(repo, heads, 'uploading changes')
1382 # push can proceed
1391 # push can proceed
1383 if util.safehasattr(cg, 'params'):
1392 if util.safehasattr(cg, 'params'):
1384 r = None
1393 r = None
1385 try:
1394 try:
1386 wlock = repo.wlock()
1395 wlock = repo.wlock()
1387 lock = repo.lock()
1396 lock = repo.lock()
1388 tr = repo.transaction(source)
1397 tr = repo.transaction(source)
1389 tr.hookargs['source'] = source
1398 tr.hookargs['source'] = source
1390 tr.hookargs['url'] = url
1399 tr.hookargs['url'] = url
1391 tr.hookargs['bundle2'] = '1'
1400 tr.hookargs['bundle2'] = '1'
1392 op = bundle2.bundleoperation(repo, lambda: tr,
1401 op = bundle2.bundleoperation(repo, lambda: tr,
1393 captureoutput=captureoutput)
1402 captureoutput=captureoutput)
1394 try:
1403 try:
1395 r = bundle2.processbundle(repo, cg, op=op)
1404 r = bundle2.processbundle(repo, cg, op=op)
1396 finally:
1405 finally:
1397 r = op.reply
1406 r = op.reply
1398 if captureoutput and r is not None:
1407 if captureoutput and r is not None:
1399 repo.ui.pushbuffer(error=True, subproc=True)
1408 repo.ui.pushbuffer(error=True, subproc=True)
1400 def recordout(output):
1409 def recordout(output):
1401 r.newpart('output', data=output, mandatory=False)
1410 r.newpart('output', data=output, mandatory=False)
1402 tr.close()
1411 tr.close()
1403 except BaseException, exc:
1412 except BaseException, exc:
1404 exc.duringunbundle2 = True
1413 exc.duringunbundle2 = True
1405 if captureoutput and r is not None:
1414 if captureoutput and r is not None:
1406 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1415 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1407 def recordout(output):
1416 def recordout(output):
1408 part = bundle2.bundlepart('output', data=output,
1417 part = bundle2.bundlepart('output', data=output,
1409 mandatory=False)
1418 mandatory=False)
1410 parts.append(part)
1419 parts.append(part)
1411 raise
1420 raise
1412 else:
1421 else:
1413 lock = repo.lock()
1422 lock = repo.lock()
1414 r = changegroup.addchangegroup(repo, cg, source, url)
1423 r = changegroup.addchangegroup(repo, cg, source, url)
1415 finally:
1424 finally:
1416 lockmod.release(tr, lock, wlock)
1425 lockmod.release(tr, lock, wlock)
1417 if recordout is not None:
1426 if recordout is not None:
1418 recordout(repo.ui.popbuffer())
1427 recordout(repo.ui.popbuffer())
1419 return r
1428 return r
1420
1429
1421 # This is it's own function so extensions can override it.
1430 # This is it's own function so extensions can override it.
1422 def _walkstreamfiles(repo):
1431 def _walkstreamfiles(repo):
1423 return repo.store.walk()
1432 return repo.store.walk()
1424
1433
1425 def generatestreamclone(repo):
1434 def generatestreamclone(repo):
1426 """Emit content for a streaming clone.
1435 """Emit content for a streaming clone.
1427
1436
1428 This is a generator of raw chunks that constitute a streaming clone.
1437 This is a generator of raw chunks that constitute a streaming clone.
1429
1438
1430 The stream begins with a line of 2 space-delimited integers containing the
1439 The stream begins with a line of 2 space-delimited integers containing the
1431 number of entries and total bytes size.
1440 number of entries and total bytes size.
1432
1441
1433 Next, are N entries for each file being transferred. Each file entry starts
1442 Next, are N entries for each file being transferred. Each file entry starts
1434 as a line with the file name and integer size delimited by a null byte.
1443 as a line with the file name and integer size delimited by a null byte.
1435 The raw file data follows. Following the raw file data is the next file
1444 The raw file data follows. Following the raw file data is the next file
1436 entry, or EOF.
1445 entry, or EOF.
1437
1446
1438 When used on the wire protocol, an additional line indicating protocol
1447 When used on the wire protocol, an additional line indicating protocol
1439 success will be prepended to the stream. This function is not responsible
1448 success will be prepended to the stream. This function is not responsible
1440 for adding it.
1449 for adding it.
1441
1450
1442 This function will obtain a repository lock to ensure a consistent view of
1451 This function will obtain a repository lock to ensure a consistent view of
1443 the store is captured. It therefore may raise LockError.
1452 the store is captured. It therefore may raise LockError.
1444 """
1453 """
1445 entries = []
1454 entries = []
1446 total_bytes = 0
1455 total_bytes = 0
1447 # Get consistent snapshot of repo, lock during scan.
1456 # Get consistent snapshot of repo, lock during scan.
1448 lock = repo.lock()
1457 lock = repo.lock()
1449 try:
1458 try:
1450 repo.ui.debug('scanning\n')
1459 repo.ui.debug('scanning\n')
1451 for name, ename, size in _walkstreamfiles(repo):
1460 for name, ename, size in _walkstreamfiles(repo):
1452 if size:
1461 if size:
1453 entries.append((name, size))
1462 entries.append((name, size))
1454 total_bytes += size
1463 total_bytes += size
1455 finally:
1464 finally:
1456 lock.release()
1465 lock.release()
1457
1466
1458 repo.ui.debug('%d files, %d bytes to transfer\n' %
1467 repo.ui.debug('%d files, %d bytes to transfer\n' %
1459 (len(entries), total_bytes))
1468 (len(entries), total_bytes))
1460 yield '%d %d\n' % (len(entries), total_bytes)
1469 yield '%d %d\n' % (len(entries), total_bytes)
1461
1470
1462 sopener = repo.svfs
1471 sopener = repo.svfs
1463 oldaudit = sopener.mustaudit
1472 oldaudit = sopener.mustaudit
1464 debugflag = repo.ui.debugflag
1473 debugflag = repo.ui.debugflag
1465 sopener.mustaudit = False
1474 sopener.mustaudit = False
1466
1475
1467 try:
1476 try:
1468 for name, size in entries:
1477 for name, size in entries:
1469 if debugflag:
1478 if debugflag:
1470 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
1479 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
1471 # partially encode name over the wire for backwards compat
1480 # partially encode name over the wire for backwards compat
1472 yield '%s\0%d\n' % (store.encodedir(name), size)
1481 yield '%s\0%d\n' % (store.encodedir(name), size)
1473 if size <= 65536:
1482 if size <= 65536:
1474 fp = sopener(name)
1483 fp = sopener(name)
1475 try:
1484 try:
1476 data = fp.read(size)
1485 data = fp.read(size)
1477 finally:
1486 finally:
1478 fp.close()
1487 fp.close()
1479 yield data
1488 yield data
1480 else:
1489 else:
1481 for chunk in util.filechunkiter(sopener(name), limit=size):
1490 for chunk in util.filechunkiter(sopener(name), limit=size):
1482 yield chunk
1491 yield chunk
1483 finally:
1492 finally:
1484 sopener.mustaudit = oldaudit
1493 sopener.mustaudit = oldaudit
1485
1494
1486 def consumestreamclone(repo, fp):
1495 def consumestreamclone(repo, fp):
1487 """Apply the contents from a streaming clone file.
1496 """Apply the contents from a streaming clone file.
1488
1497
1489 This takes the output from "streamout" and applies it to the specified
1498 This takes the output from "streamout" and applies it to the specified
1490 repository.
1499 repository.
1491
1500
1492 Like "streamout," the status line added by the wire protocol is not handled
1501 Like "streamout," the status line added by the wire protocol is not handled
1493 by this function.
1502 by this function.
1494 """
1503 """
1495 lock = repo.lock()
1504 lock = repo.lock()
1496 try:
1505 try:
1497 repo.ui.status(_('streaming all changes\n'))
1506 repo.ui.status(_('streaming all changes\n'))
1498 l = fp.readline()
1507 l = fp.readline()
1499 try:
1508 try:
1500 total_files, total_bytes = map(int, l.split(' ', 1))
1509 total_files, total_bytes = map(int, l.split(' ', 1))
1501 except (ValueError, TypeError):
1510 except (ValueError, TypeError):
1502 raise error.ResponseError(
1511 raise error.ResponseError(
1503 _('unexpected response from remote server:'), l)
1512 _('unexpected response from remote server:'), l)
1504 repo.ui.status(_('%d files to transfer, %s of data\n') %
1513 repo.ui.status(_('%d files to transfer, %s of data\n') %
1505 (total_files, util.bytecount(total_bytes)))
1514 (total_files, util.bytecount(total_bytes)))
1506 handled_bytes = 0
1515 handled_bytes = 0
1507 repo.ui.progress(_('clone'), 0, total=total_bytes)
1516 repo.ui.progress(_('clone'), 0, total=total_bytes)
1508 start = time.time()
1517 start = time.time()
1509
1518
1510 tr = repo.transaction(_('clone'))
1519 tr = repo.transaction(_('clone'))
1511 try:
1520 try:
1512 for i in xrange(total_files):
1521 for i in xrange(total_files):
1513 # XXX doesn't support '\n' or '\r' in filenames
1522 # XXX doesn't support '\n' or '\r' in filenames
1514 l = fp.readline()
1523 l = fp.readline()
1515 try:
1524 try:
1516 name, size = l.split('\0', 1)
1525 name, size = l.split('\0', 1)
1517 size = int(size)
1526 size = int(size)
1518 except (ValueError, TypeError):
1527 except (ValueError, TypeError):
1519 raise error.ResponseError(
1528 raise error.ResponseError(
1520 _('unexpected response from remote server:'), l)
1529 _('unexpected response from remote server:'), l)
1521 if repo.ui.debugflag:
1530 if repo.ui.debugflag:
1522 repo.ui.debug('adding %s (%s)\n' %
1531 repo.ui.debug('adding %s (%s)\n' %
1523 (name, util.bytecount(size)))
1532 (name, util.bytecount(size)))
1524 # for backwards compat, name was partially encoded
1533 # for backwards compat, name was partially encoded
1525 ofp = repo.svfs(store.decodedir(name), 'w')
1534 ofp = repo.svfs(store.decodedir(name), 'w')
1526 for chunk in util.filechunkiter(fp, limit=size):
1535 for chunk in util.filechunkiter(fp, limit=size):
1527 handled_bytes += len(chunk)
1536 handled_bytes += len(chunk)
1528 repo.ui.progress(_('clone'), handled_bytes,
1537 repo.ui.progress(_('clone'), handled_bytes,
1529 total=total_bytes)
1538 total=total_bytes)
1530 ofp.write(chunk)
1539 ofp.write(chunk)
1531 ofp.close()
1540 ofp.close()
1532 tr.close()
1541 tr.close()
1533 finally:
1542 finally:
1534 tr.release()
1543 tr.release()
1535
1544
1536 # Writing straight to files circumvented the inmemory caches
1545 # Writing straight to files circumvented the inmemory caches
1537 repo.invalidate()
1546 repo.invalidate()
1538
1547
1539 elapsed = time.time() - start
1548 elapsed = time.time() - start
1540 if elapsed <= 0:
1549 if elapsed <= 0:
1541 elapsed = 0.001
1550 elapsed = 0.001
1542 repo.ui.progress(_('clone'), None)
1551 repo.ui.progress(_('clone'), None)
1543 repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1552 repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1544 (util.bytecount(total_bytes), elapsed,
1553 (util.bytecount(total_bytes), elapsed,
1545 util.bytecount(total_bytes / elapsed)))
1554 util.bytecount(total_bytes / elapsed)))
1546 finally:
1555 finally:
1547 lock.release()
1556 lock.release()
@@ -1,861 +1,869
1 Test exchange of common information using bundle2
1 Test exchange of common information using bundle2
2
2
3
3
4 $ getmainid() {
4 $ getmainid() {
5 > hg -R main log --template '{node}\n' --rev "$1"
5 > hg -R main log --template '{node}\n' --rev "$1"
6 > }
6 > }
7
7
8 enable obsolescence
8 enable obsolescence
9
9
10 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
10 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
11 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
11 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
12 > hg debuglock
12 > hg debuglock
13 > EOF
13 > EOF
14
14
15 $ cat >> $HGRCPATH << EOF
15 $ cat >> $HGRCPATH << EOF
16 > [experimental]
16 > [experimental]
17 > evolution=createmarkers,exchange
17 > evolution=createmarkers,exchange
18 > bundle2-exp=True
18 > bundle2-exp=True
19 > bundle2-output-capture=True
19 > bundle2-output-capture=True
20 > [ui]
20 > [ui]
21 > ssh=dummyssh
21 > ssh=dummyssh
22 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
22 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
23 > [web]
23 > [web]
24 > push_ssl = false
24 > push_ssl = false
25 > allow_push = *
25 > allow_push = *
26 > [phases]
26 > [phases]
27 > publish=False
27 > publish=False
28 > [hooks]
28 > [hooks]
29 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
29 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
30 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
30 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
31 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
31 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
32 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
32 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
33 > EOF
33 > EOF
34
34
35 The extension requires a repo (currently unused)
35 The extension requires a repo (currently unused)
36
36
37 $ hg init main
37 $ hg init main
38 $ cd main
38 $ cd main
39 $ touch a
39 $ touch a
40 $ hg add a
40 $ hg add a
41 $ hg commit -m 'a'
41 $ hg commit -m 'a'
42 pre-close-tip:3903775176ed draft
42 pre-close-tip:3903775176ed draft
43 postclose-tip:3903775176ed draft
43 postclose-tip:3903775176ed draft
44 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
44 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
45
45
46 $ hg unbundle $TESTDIR/bundles/rebase.hg
46 $ hg unbundle $TESTDIR/bundles/rebase.hg
47 adding changesets
47 adding changesets
48 adding manifests
48 adding manifests
49 adding file changes
49 adding file changes
50 added 8 changesets with 7 changes to 7 files (+3 heads)
50 added 8 changesets with 7 changes to 7 files (+3 heads)
51 pre-close-tip:02de42196ebe draft
51 pre-close-tip:02de42196ebe draft
52 postclose-tip:02de42196ebe draft
52 postclose-tip:02de42196ebe draft
53 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
53 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
54 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
54 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
55 (run 'hg heads' to see heads, 'hg merge' to merge)
55 (run 'hg heads' to see heads, 'hg merge' to merge)
56
56
57 $ cd ..
57 $ cd ..
58
58
59 Real world exchange
59 Real world exchange
60 =====================
60 =====================
61
61
62 Add more obsolescence information
62 Add more obsolescence information
63
63
64 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
64 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
65 pre-close-tip:02de42196ebe draft
65 pre-close-tip:02de42196ebe draft
66 postclose-tip:02de42196ebe draft
66 postclose-tip:02de42196ebe draft
67 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
67 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
68 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
68 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
69 pre-close-tip:02de42196ebe draft
69 pre-close-tip:02de42196ebe draft
70 postclose-tip:02de42196ebe draft
70 postclose-tip:02de42196ebe draft
71 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
71 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
72
72
73 clone --pull
73 clone --pull
74
74
75 $ hg -R main phase --public cd010b8cd998
75 $ hg -R main phase --public cd010b8cd998
76 pre-close-tip:02de42196ebe draft
76 pre-close-tip:02de42196ebe draft
77 postclose-tip:02de42196ebe draft
77 postclose-tip:02de42196ebe draft
78 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
78 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
79 $ hg clone main other --pull --rev 9520eea781bc
79 $ hg clone main other --pull --rev 9520eea781bc
80 adding changesets
80 adding changesets
81 adding manifests
81 adding manifests
82 adding file changes
82 adding file changes
83 added 2 changesets with 2 changes to 2 files
83 added 2 changesets with 2 changes to 2 files
84 1 new obsolescence markers
84 1 new obsolescence markers
85 pre-close-tip:9520eea781bc draft
85 pre-close-tip:9520eea781bc draft
86 postclose-tip:9520eea781bc draft
86 postclose-tip:9520eea781bc draft
87 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
87 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
88 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
88 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
89 updating to branch default
89 updating to branch default
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 $ hg -R other log -G
91 $ hg -R other log -G
92 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
92 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
93 |
93 |
94 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
94 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
95
95
96 $ hg -R other debugobsolete
96 $ hg -R other debugobsolete
97 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
97 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
98
98
99 pull
99 pull
100
100
101 $ hg -R main phase --public 9520eea781bc
101 $ hg -R main phase --public 9520eea781bc
102 pre-close-tip:02de42196ebe draft
102 pre-close-tip:02de42196ebe draft
103 postclose-tip:02de42196ebe draft
103 postclose-tip:02de42196ebe draft
104 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
104 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
105 $ hg -R other pull -r 24b6387c8c8c
105 $ hg -R other pull -r 24b6387c8c8c
106 pulling from $TESTTMP/main (glob)
106 pulling from $TESTTMP/main (glob)
107 searching for changes
107 searching for changes
108 adding changesets
108 adding changesets
109 adding manifests
109 adding manifests
110 adding file changes
110 adding file changes
111 added 1 changesets with 1 changes to 1 files (+1 heads)
111 added 1 changesets with 1 changes to 1 files (+1 heads)
112 1 new obsolescence markers
112 1 new obsolescence markers
113 pre-close-tip:24b6387c8c8c draft
113 pre-close-tip:24b6387c8c8c draft
114 postclose-tip:24b6387c8c8c draft
114 postclose-tip:24b6387c8c8c draft
115 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
115 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
116 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
116 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
117 (run 'hg heads' to see heads, 'hg merge' to merge)
117 (run 'hg heads' to see heads, 'hg merge' to merge)
118 $ hg -R other log -G
118 $ hg -R other log -G
119 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
119 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
120 |
120 |
121 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
121 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
122 |/
122 |/
123 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
123 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
124
124
125 $ hg -R other debugobsolete
125 $ hg -R other debugobsolete
126 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
126 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128
128
129 pull empty (with phase movement)
129 pull empty (with phase movement)
130
130
131 $ hg -R main phase --public 24b6387c8c8c
131 $ hg -R main phase --public 24b6387c8c8c
132 pre-close-tip:02de42196ebe draft
132 pre-close-tip:02de42196ebe draft
133 postclose-tip:02de42196ebe draft
133 postclose-tip:02de42196ebe draft
134 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
134 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
135 $ hg -R other pull -r 24b6387c8c8c
135 $ hg -R other pull -r 24b6387c8c8c
136 pulling from $TESTTMP/main (glob)
136 pulling from $TESTTMP/main (glob)
137 no changes found
137 no changes found
138 pre-close-tip:24b6387c8c8c public
138 pre-close-tip:24b6387c8c8c public
139 postclose-tip:24b6387c8c8c public
139 postclose-tip:24b6387c8c8c public
140 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
140 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
141 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
141 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
142 $ hg -R other log -G
142 $ hg -R other log -G
143 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
143 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
144 |
144 |
145 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
145 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
146 |/
146 |/
147 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
147 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
148
148
149 $ hg -R other debugobsolete
149 $ hg -R other debugobsolete
150 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
150 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
151 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
151 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
152
152
153 pull empty
153 pull empty
154
154
155 $ hg -R other pull -r 24b6387c8c8c
155 $ hg -R other pull -r 24b6387c8c8c
156 pulling from $TESTTMP/main (glob)
156 pulling from $TESTTMP/main (glob)
157 no changes found
157 no changes found
158 pre-close-tip:24b6387c8c8c public
158 pre-close-tip:24b6387c8c8c public
159 postclose-tip:24b6387c8c8c public
159 postclose-tip:24b6387c8c8c public
160 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
160 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
161 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
161 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
162 $ hg -R other log -G
162 $ hg -R other log -G
163 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
163 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
164 |
164 |
165 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
165 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
166 |/
166 |/
167 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
167 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
168
168
169 $ hg -R other debugobsolete
169 $ hg -R other debugobsolete
170 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
170 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
171 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
171 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
172
172
173 add extra data to test their exchange during push
173 add extra data to test their exchange during push
174
174
175 $ hg -R main bookmark --rev eea13746799a book_eea1
175 $ hg -R main bookmark --rev eea13746799a book_eea1
176 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
176 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
177 pre-close-tip:02de42196ebe draft
177 pre-close-tip:02de42196ebe draft
178 postclose-tip:02de42196ebe draft
178 postclose-tip:02de42196ebe draft
179 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
179 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
180 $ hg -R main bookmark --rev 02de42196ebe book_02de
180 $ hg -R main bookmark --rev 02de42196ebe book_02de
181 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
181 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
182 pre-close-tip:02de42196ebe draft book_02de
182 pre-close-tip:02de42196ebe draft book_02de
183 postclose-tip:02de42196ebe draft book_02de
183 postclose-tip:02de42196ebe draft book_02de
184 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
184 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
185 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
185 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
186 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
186 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
187 pre-close-tip:02de42196ebe draft book_02de
187 pre-close-tip:02de42196ebe draft book_02de
188 postclose-tip:02de42196ebe draft book_02de
188 postclose-tip:02de42196ebe draft book_02de
189 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
189 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
190 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
190 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
191 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
191 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
192 pre-close-tip:02de42196ebe draft book_02de
192 pre-close-tip:02de42196ebe draft book_02de
193 postclose-tip:02de42196ebe draft book_02de
193 postclose-tip:02de42196ebe draft book_02de
194 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
194 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
195 $ hg -R main bookmark --rev 32af7686d403 book_32af
195 $ hg -R main bookmark --rev 32af7686d403 book_32af
196 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
196 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
197 pre-close-tip:02de42196ebe draft book_02de
197 pre-close-tip:02de42196ebe draft book_02de
198 postclose-tip:02de42196ebe draft book_02de
198 postclose-tip:02de42196ebe draft book_02de
199 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
199 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
200
200
201 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
201 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
202 $ hg -R other bookmark --rev cd010b8cd998 book_02de
202 $ hg -R other bookmark --rev cd010b8cd998 book_02de
203 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
203 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
204 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
204 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
205 $ hg -R other bookmark --rev cd010b8cd998 book_32af
205 $ hg -R other bookmark --rev cd010b8cd998 book_32af
206
206
207 $ hg -R main phase --public eea13746799a
207 $ hg -R main phase --public eea13746799a
208 pre-close-tip:02de42196ebe draft book_02de
208 pre-close-tip:02de42196ebe draft book_02de
209 postclose-tip:02de42196ebe draft book_02de
209 postclose-tip:02de42196ebe draft book_02de
210 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
210 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
211
211
212 push
212 push
213 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
213 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
214 pushing to other
214 pushing to other
215 searching for changes
215 searching for changes
216 remote: adding changesets
216 remote: adding changesets
217 remote: adding manifests
217 remote: adding manifests
218 remote: adding file changes
218 remote: adding file changes
219 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
219 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
220 remote: 1 new obsolescence markers
220 remote: 1 new obsolescence markers
221 remote: pre-close-tip:eea13746799a public book_eea1
221 remote: pre-close-tip:eea13746799a public book_eea1
222 remote: pushkey: lock state after "phases"
222 remote: pushkey: lock state after "phases"
223 remote: lock: free
223 remote: lock: free
224 remote: wlock: free
224 remote: wlock: free
225 remote: pushkey: lock state after "bookmarks"
225 remote: pushkey: lock state after "bookmarks"
226 remote: lock: free
226 remote: lock: free
227 remote: wlock: free
227 remote: wlock: free
228 remote: postclose-tip:eea13746799a public book_eea1
228 remote: postclose-tip:eea13746799a public book_eea1
229 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
229 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
230 updating bookmark book_eea1
230 updating bookmark book_eea1
231 pre-close-tip:02de42196ebe draft book_02de
231 pre-close-tip:02de42196ebe draft book_02de
232 postclose-tip:02de42196ebe draft book_02de
232 postclose-tip:02de42196ebe draft book_02de
233 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
233 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
234 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
234 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
235 $ hg -R other log -G
235 $ hg -R other log -G
236 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
236 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
237 |\
237 |\
238 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
238 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
239 | |
239 | |
240 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
240 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
241 |/
241 |/
242 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
242 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
243
243
244 $ hg -R other debugobsolete
244 $ hg -R other debugobsolete
245 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
245 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
246 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
246 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
247 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
247 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
248
248
249 pull over ssh
249 pull over ssh
250
250
251 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
251 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
252 pulling from ssh://user@dummy/main
252 pulling from ssh://user@dummy/main
253 searching for changes
253 searching for changes
254 adding changesets
254 adding changesets
255 adding manifests
255 adding manifests
256 adding file changes
256 adding file changes
257 added 1 changesets with 1 changes to 1 files (+1 heads)
257 added 1 changesets with 1 changes to 1 files (+1 heads)
258 1 new obsolescence markers
258 1 new obsolescence markers
259 updating bookmark book_02de
259 updating bookmark book_02de
260 pre-close-tip:02de42196ebe draft book_02de
260 pre-close-tip:02de42196ebe draft book_02de
261 postclose-tip:02de42196ebe draft book_02de
261 postclose-tip:02de42196ebe draft book_02de
262 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
262 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
263 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
263 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
264 (run 'hg heads' to see heads, 'hg merge' to merge)
264 (run 'hg heads' to see heads, 'hg merge' to merge)
265 $ hg -R other debugobsolete
265 $ hg -R other debugobsolete
266 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
266 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
267 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
267 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
268 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
268 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
269 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
269 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
270
270
271 pull over http
271 pull over http
272
272
273 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
273 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
274 $ cat main.pid >> $DAEMON_PIDS
274 $ cat main.pid >> $DAEMON_PIDS
275
275
276 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
276 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
277 pulling from http://localhost:$HGPORT/
277 pulling from http://localhost:$HGPORT/
278 searching for changes
278 searching for changes
279 adding changesets
279 adding changesets
280 adding manifests
280 adding manifests
281 adding file changes
281 adding file changes
282 added 1 changesets with 1 changes to 1 files (+1 heads)
282 added 1 changesets with 1 changes to 1 files (+1 heads)
283 1 new obsolescence markers
283 1 new obsolescence markers
284 updating bookmark book_42cc
284 updating bookmark book_42cc
285 pre-close-tip:42ccdea3bb16 draft book_42cc
285 pre-close-tip:42ccdea3bb16 draft book_42cc
286 postclose-tip:42ccdea3bb16 draft book_42cc
286 postclose-tip:42ccdea3bb16 draft book_42cc
287 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
287 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
288 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
288 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
289 (run 'hg heads .' to see heads, 'hg merge' to merge)
289 (run 'hg heads .' to see heads, 'hg merge' to merge)
290 $ cat main-error.log
290 $ cat main-error.log
291 $ hg -R other debugobsolete
291 $ hg -R other debugobsolete
292 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
292 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
293 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
293 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
294 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
294 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
295 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
295 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297
297
298 push over ssh
298 push over ssh
299
299
300 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
300 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
301 pushing to ssh://user@dummy/other
301 pushing to ssh://user@dummy/other
302 searching for changes
302 searching for changes
303 remote: adding changesets
303 remote: adding changesets
304 remote: adding manifests
304 remote: adding manifests
305 remote: adding file changes
305 remote: adding file changes
306 remote: added 1 changesets with 1 changes to 1 files
306 remote: added 1 changesets with 1 changes to 1 files
307 remote: 1 new obsolescence markers
307 remote: 1 new obsolescence markers
308 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
308 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
309 remote: pushkey: lock state after "bookmarks"
309 remote: pushkey: lock state after "bookmarks"
310 remote: lock: free
310 remote: lock: free
311 remote: wlock: free
311 remote: wlock: free
312 remote: postclose-tip:5fddd98957c8 draft book_5fdd
312 remote: postclose-tip:5fddd98957c8 draft book_5fdd
313 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
313 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
314 updating bookmark book_5fdd
314 updating bookmark book_5fdd
315 pre-close-tip:02de42196ebe draft book_02de
315 pre-close-tip:02de42196ebe draft book_02de
316 postclose-tip:02de42196ebe draft book_02de
316 postclose-tip:02de42196ebe draft book_02de
317 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
317 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
318 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
318 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
319 $ hg -R other log -G
319 $ hg -R other log -G
320 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
320 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
321 |
321 |
322 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
322 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
323 |
323 |
324 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
324 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
325 | |
325 | |
326 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
326 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
327 | |/|
327 | |/|
328 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
328 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
329 |/ /
329 |/ /
330 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
330 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
331 |/
331 |/
332 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
332 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
333
333
334 $ hg -R other debugobsolete
334 $ hg -R other debugobsolete
335 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
335 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
336 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
336 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
337 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
337 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
338 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
338 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
339 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
339 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
340 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
340 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
341
341
342 push over http
342 push over http
343
343
344 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
344 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
345 $ cat other.pid >> $DAEMON_PIDS
345 $ cat other.pid >> $DAEMON_PIDS
346
346
347 $ hg -R main phase --public 32af7686d403
347 $ hg -R main phase --public 32af7686d403
348 pre-close-tip:02de42196ebe draft book_02de
348 pre-close-tip:02de42196ebe draft book_02de
349 postclose-tip:02de42196ebe draft book_02de
349 postclose-tip:02de42196ebe draft book_02de
350 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
350 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
351 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
351 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
352 pushing to http://localhost:$HGPORT2/
352 pushing to http://localhost:$HGPORT2/
353 searching for changes
353 searching for changes
354 remote: adding changesets
354 remote: adding changesets
355 remote: adding manifests
355 remote: adding manifests
356 remote: adding file changes
356 remote: adding file changes
357 remote: added 1 changesets with 1 changes to 1 files
357 remote: added 1 changesets with 1 changes to 1 files
358 remote: 1 new obsolescence markers
358 remote: 1 new obsolescence markers
359 remote: pre-close-tip:32af7686d403 public book_32af
359 remote: pre-close-tip:32af7686d403 public book_32af
360 remote: pushkey: lock state after "phases"
360 remote: pushkey: lock state after "phases"
361 remote: lock: free
361 remote: lock: free
362 remote: wlock: free
362 remote: wlock: free
363 remote: pushkey: lock state after "bookmarks"
363 remote: pushkey: lock state after "bookmarks"
364 remote: lock: free
364 remote: lock: free
365 remote: wlock: free
365 remote: wlock: free
366 remote: postclose-tip:32af7686d403 public book_32af
366 remote: postclose-tip:32af7686d403 public book_32af
367 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
367 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
368 updating bookmark book_32af
368 updating bookmark book_32af
369 pre-close-tip:02de42196ebe draft book_02de
369 pre-close-tip:02de42196ebe draft book_02de
370 postclose-tip:02de42196ebe draft book_02de
370 postclose-tip:02de42196ebe draft book_02de
371 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
371 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
372 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
372 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
373 $ cat other-error.log
373 $ cat other-error.log
374
374
375 Check final content.
375 Check final content.
376
376
377 $ hg -R other log -G
377 $ hg -R other log -G
378 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
378 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
379 |
379 |
380 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
380 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
381 |
381 |
382 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
382 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
383 |
383 |
384 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
384 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
385 | |
385 | |
386 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
386 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
387 | |/|
387 | |/|
388 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
388 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
389 |/ /
389 |/ /
390 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
390 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
391 |/
391 |/
392 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
392 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
393
393
394 $ hg -R other debugobsolete
394 $ hg -R other debugobsolete
395 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
395 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
396 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
396 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
397 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
397 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
398 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
398 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
399 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
399 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
400 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
400 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
401 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
401 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
402
402
403 (check that no 'pending' files remain)
403 (check that no 'pending' files remain)
404
404
405 $ ls -1 other/.hg/bookmarks*
405 $ ls -1 other/.hg/bookmarks*
406 other/.hg/bookmarks
406 other/.hg/bookmarks
407 $ ls -1 other/.hg/store/phaseroots*
407 $ ls -1 other/.hg/store/phaseroots*
408 other/.hg/store/phaseroots
408 other/.hg/store/phaseroots
409 $ ls -1 other/.hg/store/00changelog.i*
409 $ ls -1 other/.hg/store/00changelog.i*
410 other/.hg/store/00changelog.i
410 other/.hg/store/00changelog.i
411
411
412 Error Handling
412 Error Handling
413 ==============
413 ==============
414
414
415 Check that errors are properly returned to the client during push.
415 Check that errors are properly returned to the client during push.
416
416
417 Setting up
417 Setting up
418
418
419 $ cat > failpush.py << EOF
419 $ cat > failpush.py << EOF
420 > """A small extension that makes push fails when using bundle2
420 > """A small extension that makes push fails when using bundle2
421 >
421 >
422 > used to test error handling in bundle2
422 > used to test error handling in bundle2
423 > """
423 > """
424 >
424 >
425 > from mercurial import util
425 > from mercurial import util
426 > from mercurial import bundle2
426 > from mercurial import bundle2
427 > from mercurial import exchange
427 > from mercurial import exchange
428 > from mercurial import extensions
428 > from mercurial import extensions
429 >
429 >
430 > def _pushbundle2failpart(pushop, bundler):
430 > def _pushbundle2failpart(pushop, bundler):
431 > reason = pushop.ui.config('failpush', 'reason', None)
431 > reason = pushop.ui.config('failpush', 'reason', None)
432 > part = None
432 > part = None
433 > if reason == 'abort':
433 > if reason == 'abort':
434 > bundler.newpart('test:abort')
434 > bundler.newpart('test:abort')
435 > if reason == 'unknown':
435 > if reason == 'unknown':
436 > bundler.newpart('test:unknown')
436 > bundler.newpart('test:unknown')
437 > if reason == 'race':
437 > if reason == 'race':
438 > # 20 Bytes of crap
438 > # 20 Bytes of crap
439 > bundler.newpart('check:heads', data='01234567890123456789')
439 > bundler.newpart('check:heads', data='01234567890123456789')
440 >
440 >
441 > @bundle2.parthandler("test:abort")
441 > @bundle2.parthandler("test:abort")
442 > def handleabort(op, part):
442 > def handleabort(op, part):
443 > raise util.Abort('Abandon ship!', hint="don't panic")
443 > raise util.Abort('Abandon ship!', hint="don't panic")
444 >
444 >
445 > def uisetup(ui):
445 > def uisetup(ui):
446 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
446 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
447 > exchange.b2partsgenorder.insert(0, 'failpart')
447 > exchange.b2partsgenorder.insert(0, 'failpart')
448 >
448 >
449 > EOF
449 > EOF
450
450
451 $ cd main
451 $ cd main
452 $ hg up tip
452 $ hg up tip
453 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
453 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
454 $ echo 'I' > I
454 $ echo 'I' > I
455 $ hg add I
455 $ hg add I
456 $ hg ci -m 'I'
456 $ hg ci -m 'I'
457 pre-close-tip:e7ec4e813ba6 draft
457 pre-close-tip:e7ec4e813ba6 draft
458 postclose-tip:e7ec4e813ba6 draft
458 postclose-tip:e7ec4e813ba6 draft
459 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
459 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
460 $ hg id
460 $ hg id
461 e7ec4e813ba6 tip
461 e7ec4e813ba6 tip
462 $ cd ..
462 $ cd ..
463
463
464 $ cat << EOF >> $HGRCPATH
464 $ cat << EOF >> $HGRCPATH
465 > [extensions]
465 > [extensions]
466 > failpush=$TESTTMP/failpush.py
466 > failpush=$TESTTMP/failpush.py
467 > EOF
467 > EOF
468
468
469 $ killdaemons.py
469 $ killdaemons.py
470 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
470 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
471 $ cat other.pid >> $DAEMON_PIDS
471 $ cat other.pid >> $DAEMON_PIDS
472
472
473 Doing the actual push: Abort error
473 Doing the actual push: Abort error
474
474
475 $ cat << EOF >> $HGRCPATH
475 $ cat << EOF >> $HGRCPATH
476 > [failpush]
476 > [failpush]
477 > reason = abort
477 > reason = abort
478 > EOF
478 > EOF
479
479
480 $ hg -R main push other -r e7ec4e813ba6
480 $ hg -R main push other -r e7ec4e813ba6
481 pushing to other
481 pushing to other
482 searching for changes
482 searching for changes
483 abort: Abandon ship!
483 abort: Abandon ship!
484 (don't panic)
484 (don't panic)
485 [255]
485 [255]
486
486
487 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
487 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
488 pushing to ssh://user@dummy/other
488 pushing to ssh://user@dummy/other
489 searching for changes
489 searching for changes
490 abort: Abandon ship!
490 abort: Abandon ship!
491 (don't panic)
491 (don't panic)
492 [255]
492 [255]
493
493
494 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
494 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
495 pushing to http://localhost:$HGPORT2/
495 pushing to http://localhost:$HGPORT2/
496 searching for changes
496 searching for changes
497 abort: Abandon ship!
497 abort: Abandon ship!
498 (don't panic)
498 (don't panic)
499 [255]
499 [255]
500
500
501
501
502 Doing the actual push: unknown mandatory parts
502 Doing the actual push: unknown mandatory parts
503
503
504 $ cat << EOF >> $HGRCPATH
504 $ cat << EOF >> $HGRCPATH
505 > [failpush]
505 > [failpush]
506 > reason = unknown
506 > reason = unknown
507 > EOF
507 > EOF
508
508
509 $ hg -R main push other -r e7ec4e813ba6
509 $ hg -R main push other -r e7ec4e813ba6
510 pushing to other
510 pushing to other
511 searching for changes
511 searching for changes
512 abort: missing support for test:unknown
512 abort: missing support for test:unknown
513 [255]
513 [255]
514
514
515 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
515 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
516 pushing to ssh://user@dummy/other
516 pushing to ssh://user@dummy/other
517 searching for changes
517 searching for changes
518 abort: missing support for test:unknown
518 abort: missing support for test:unknown
519 [255]
519 [255]
520
520
521 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
521 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
522 pushing to http://localhost:$HGPORT2/
522 pushing to http://localhost:$HGPORT2/
523 searching for changes
523 searching for changes
524 abort: missing support for test:unknown
524 abort: missing support for test:unknown
525 [255]
525 [255]
526
526
527 Doing the actual push: race
527 Doing the actual push: race
528
528
529 $ cat << EOF >> $HGRCPATH
529 $ cat << EOF >> $HGRCPATH
530 > [failpush]
530 > [failpush]
531 > reason = race
531 > reason = race
532 > EOF
532 > EOF
533
533
534 $ hg -R main push other -r e7ec4e813ba6
534 $ hg -R main push other -r e7ec4e813ba6
535 pushing to other
535 pushing to other
536 searching for changes
536 searching for changes
537 abort: push failed:
537 abort: push failed:
538 'repository changed while pushing - please try again'
538 'repository changed while pushing - please try again'
539 [255]
539 [255]
540
540
541 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
541 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
542 pushing to ssh://user@dummy/other
542 pushing to ssh://user@dummy/other
543 searching for changes
543 searching for changes
544 abort: push failed:
544 abort: push failed:
545 'repository changed while pushing - please try again'
545 'repository changed while pushing - please try again'
546 [255]
546 [255]
547
547
548 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
548 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
549 pushing to http://localhost:$HGPORT2/
549 pushing to http://localhost:$HGPORT2/
550 searching for changes
550 searching for changes
551 abort: push failed:
551 abort: push failed:
552 'repository changed while pushing - please try again'
552 'repository changed while pushing - please try again'
553 [255]
553 [255]
554
554
555 Doing the actual push: hook abort
555 Doing the actual push: hook abort
556
556
557 $ cat << EOF >> $HGRCPATH
557 $ cat << EOF >> $HGRCPATH
558 > [failpush]
558 > [failpush]
559 > reason =
559 > reason =
560 > [hooks]
560 > [hooks]
561 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
561 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
562 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
562 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
563 > EOF
563 > EOF
564
564
565 $ killdaemons.py
565 $ killdaemons.py
566 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
566 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
567 $ cat other.pid >> $DAEMON_PIDS
567 $ cat other.pid >> $DAEMON_PIDS
568
568
569 $ hg -R main push other -r e7ec4e813ba6
569 $ hg -R main push other -r e7ec4e813ba6
570 pushing to other
570 pushing to other
571 searching for changes
571 searching for changes
572 remote: adding changesets
572 remote: adding changesets
573 remote: adding manifests
573 remote: adding manifests
574 remote: adding file changes
574 remote: adding file changes
575 remote: added 1 changesets with 1 changes to 1 files
575 remote: added 1 changesets with 1 changes to 1 files
576 remote: pre-close-tip:e7ec4e813ba6 draft
576 remote: pre-close-tip:e7ec4e813ba6 draft
577 remote: You shall not pass!
577 remote: You shall not pass!
578 remote: transaction abort!
578 remote: transaction abort!
579 remote: Cleaning up the mess...
579 remote: Cleaning up the mess...
580 remote: rollback completed
580 remote: rollback completed
581 abort: pretxnclose.failpush hook exited with status 1
581 abort: pretxnclose.failpush hook exited with status 1
582 [255]
582 [255]
583
583
584 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
584 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
585 pushing to ssh://user@dummy/other
585 pushing to ssh://user@dummy/other
586 searching for changes
586 searching for changes
587 remote: adding changesets
587 remote: adding changesets
588 remote: adding manifests
588 remote: adding manifests
589 remote: adding file changes
589 remote: adding file changes
590 remote: added 1 changesets with 1 changes to 1 files
590 remote: added 1 changesets with 1 changes to 1 files
591 remote: pre-close-tip:e7ec4e813ba6 draft
591 remote: pre-close-tip:e7ec4e813ba6 draft
592 remote: You shall not pass!
592 remote: You shall not pass!
593 remote: transaction abort!
593 remote: transaction abort!
594 remote: Cleaning up the mess...
594 remote: Cleaning up the mess...
595 remote: rollback completed
595 remote: rollback completed
596 abort: pretxnclose.failpush hook exited with status 1
596 abort: pretxnclose.failpush hook exited with status 1
597 [255]
597 [255]
598
598
599 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
599 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
600 pushing to http://localhost:$HGPORT2/
600 pushing to http://localhost:$HGPORT2/
601 searching for changes
601 searching for changes
602 remote: adding changesets
602 remote: adding changesets
603 remote: adding manifests
603 remote: adding manifests
604 remote: adding file changes
604 remote: adding file changes
605 remote: added 1 changesets with 1 changes to 1 files
605 remote: added 1 changesets with 1 changes to 1 files
606 remote: pre-close-tip:e7ec4e813ba6 draft
606 remote: pre-close-tip:e7ec4e813ba6 draft
607 remote: You shall not pass!
607 remote: You shall not pass!
608 remote: transaction abort!
608 remote: transaction abort!
609 remote: Cleaning up the mess...
609 remote: Cleaning up the mess...
610 remote: rollback completed
610 remote: rollback completed
611 abort: pretxnclose.failpush hook exited with status 1
611 abort: pretxnclose.failpush hook exited with status 1
612 [255]
612 [255]
613
613
614 (check that no 'pending' files remain)
614 (check that no 'pending' files remain)
615
615
616 $ ls -1 other/.hg/bookmarks*
616 $ ls -1 other/.hg/bookmarks*
617 other/.hg/bookmarks
617 other/.hg/bookmarks
618 $ ls -1 other/.hg/store/phaseroots*
618 $ ls -1 other/.hg/store/phaseroots*
619 other/.hg/store/phaseroots
619 other/.hg/store/phaseroots
620 $ ls -1 other/.hg/store/00changelog.i*
620 $ ls -1 other/.hg/store/00changelog.i*
621 other/.hg/store/00changelog.i
621 other/.hg/store/00changelog.i
622
622
623 Check error from hook during the unbundling process itself
623 Check error from hook during the unbundling process itself
624
624
625 $ cat << EOF >> $HGRCPATH
625 $ cat << EOF >> $HGRCPATH
626 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
626 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
627 > EOF
627 > EOF
628 $ killdaemons.py # reload http config
628 $ killdaemons.py # reload http config
629 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
629 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
630 $ cat other.pid >> $DAEMON_PIDS
630 $ cat other.pid >> $DAEMON_PIDS
631
631
632 $ hg -R main push other -r e7ec4e813ba6
632 $ hg -R main push other -r e7ec4e813ba6
633 pushing to other
633 pushing to other
634 searching for changes
634 searching for changes
635 remote: adding changesets
635 remote: adding changesets
636 remote: adding manifests
636 remote: adding manifests
637 remote: adding file changes
637 remote: adding file changes
638 remote: added 1 changesets with 1 changes to 1 files
638 remote: added 1 changesets with 1 changes to 1 files
639 remote: Fail early!
639 remote: Fail early!
640 remote: transaction abort!
640 remote: transaction abort!
641 remote: Cleaning up the mess...
641 remote: Cleaning up the mess...
642 remote: rollback completed
642 remote: rollback completed
643 abort: pretxnchangegroup hook exited with status 1
643 abort: pretxnchangegroup hook exited with status 1
644 [255]
644 [255]
645 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
645 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
646 pushing to ssh://user@dummy/other
646 pushing to ssh://user@dummy/other
647 searching for changes
647 searching for changes
648 remote: adding changesets
648 remote: adding changesets
649 remote: adding manifests
649 remote: adding manifests
650 remote: adding file changes
650 remote: adding file changes
651 remote: added 1 changesets with 1 changes to 1 files
651 remote: added 1 changesets with 1 changes to 1 files
652 remote: Fail early!
652 remote: Fail early!
653 remote: transaction abort!
653 remote: transaction abort!
654 remote: Cleaning up the mess...
654 remote: Cleaning up the mess...
655 remote: rollback completed
655 remote: rollback completed
656 abort: pretxnchangegroup hook exited with status 1
656 abort: pretxnchangegroup hook exited with status 1
657 [255]
657 [255]
658 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
658 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
659 pushing to http://localhost:$HGPORT2/
659 pushing to http://localhost:$HGPORT2/
660 searching for changes
660 searching for changes
661 remote: adding changesets
661 remote: adding changesets
662 remote: adding manifests
662 remote: adding manifests
663 remote: adding file changes
663 remote: adding file changes
664 remote: added 1 changesets with 1 changes to 1 files
664 remote: added 1 changesets with 1 changes to 1 files
665 remote: Fail early!
665 remote: Fail early!
666 remote: transaction abort!
666 remote: transaction abort!
667 remote: Cleaning up the mess...
667 remote: Cleaning up the mess...
668 remote: rollback completed
668 remote: rollback completed
669 abort: pretxnchangegroup hook exited with status 1
669 abort: pretxnchangegroup hook exited with status 1
670 [255]
670 [255]
671
671
672 Check output capture control.
672 Check output capture control.
673
673
674 (should be still forced for http, disabled for local and ssh)
674 (should be still forced for http, disabled for local and ssh)
675
675
676 $ cat >> $HGRCPATH << EOF
676 $ cat >> $HGRCPATH << EOF
677 > [experimental]
677 > [experimental]
678 > bundle2-output-capture=False
678 > bundle2-output-capture=False
679 > EOF
679 > EOF
680
680
681 $ hg -R main push other -r e7ec4e813ba6
681 $ hg -R main push other -r e7ec4e813ba6
682 pushing to other
682 pushing to other
683 searching for changes
683 searching for changes
684 adding changesets
684 adding changesets
685 adding manifests
685 adding manifests
686 adding file changes
686 adding file changes
687 added 1 changesets with 1 changes to 1 files
687 added 1 changesets with 1 changes to 1 files
688 Fail early!
688 Fail early!
689 transaction abort!
689 transaction abort!
690 Cleaning up the mess...
690 Cleaning up the mess...
691 rollback completed
691 rollback completed
692 abort: pretxnchangegroup hook exited with status 1
692 abort: pretxnchangegroup hook exited with status 1
693 [255]
693 [255]
694 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
694 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
695 pushing to ssh://user@dummy/other
695 pushing to ssh://user@dummy/other
696 searching for changes
696 searching for changes
697 remote: adding changesets
697 remote: adding changesets
698 remote: adding manifests
698 remote: adding manifests
699 remote: adding file changes
699 remote: adding file changes
700 remote: added 1 changesets with 1 changes to 1 files
700 remote: added 1 changesets with 1 changes to 1 files
701 remote: Fail early!
701 remote: Fail early!
702 remote: transaction abort!
702 remote: transaction abort!
703 remote: Cleaning up the mess...
703 remote: Cleaning up the mess...
704 remote: rollback completed
704 remote: rollback completed
705 abort: pretxnchangegroup hook exited with status 1
705 abort: pretxnchangegroup hook exited with status 1
706 [255]
706 [255]
707 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
707 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
708 pushing to http://localhost:$HGPORT2/
708 pushing to http://localhost:$HGPORT2/
709 searching for changes
709 searching for changes
710 remote: adding changesets
710 remote: adding changesets
711 remote: adding manifests
711 remote: adding manifests
712 remote: adding file changes
712 remote: adding file changes
713 remote: added 1 changesets with 1 changes to 1 files
713 remote: added 1 changesets with 1 changes to 1 files
714 remote: Fail early!
714 remote: Fail early!
715 remote: transaction abort!
715 remote: transaction abort!
716 remote: Cleaning up the mess...
716 remote: Cleaning up the mess...
717 remote: rollback completed
717 remote: rollback completed
718 abort: pretxnchangegroup hook exited with status 1
718 abort: pretxnchangegroup hook exited with status 1
719 [255]
719 [255]
720
720
721 Check abort from mandatory pushkey
721 Check abort from mandatory pushkey
722
722
723 $ cat > mandatorypart.py << EOF
723 $ cat > mandatorypart.py << EOF
724 > from mercurial import exchange
724 > from mercurial import exchange
725 > from mercurial import pushkey
725 > from mercurial import pushkey
726 > from mercurial import node
726 > from mercurial import node
727 > from mercurial import error
727 > @exchange.b2partsgenerator('failingpuskey')
728 > @exchange.b2partsgenerator('failingpuskey')
728 > def addfailingpushey(pushop, bundler):
729 > def addfailingpushey(pushop, bundler):
729 > enc = pushkey.encode
730 > enc = pushkey.encode
730 > part = bundler.newpart('pushkey')
731 > part = bundler.newpart('pushkey')
731 > part.addparam('namespace', enc('phases'))
732 > part.addparam('namespace', enc('phases'))
732 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
733 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
733 > part.addparam('old', enc(str(0))) # successful update
734 > part.addparam('old', enc(str(0))) # successful update
734 > part.addparam('new', enc(str(0)))
735 > part.addparam('new', enc(str(0)))
736 > def fail(pushop, exc):
737 > raise error.Abort('Correct phase push failed (because hooks)')
738 > pushop.pkfailcb[part.id] = fail
735 > EOF
739 > EOF
736 $ cat >> $HGRCPATH << EOF
740 $ cat >> $HGRCPATH << EOF
737 > [hooks]
741 > [hooks]
738 > pretxnchangegroup=
742 > pretxnchangegroup=
739 > pretxnclose.failpush=
743 > pretxnclose.failpush=
740 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
744 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
741 > [extensions]
745 > [extensions]
742 > mandatorypart=$TESTTMP/mandatorypart.py
746 > mandatorypart=$TESTTMP/mandatorypart.py
743 > EOF
747 > EOF
744 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
748 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
745 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
749 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
746 $ cat other.pid >> $DAEMON_PIDS
750 $ cat other.pid >> $DAEMON_PIDS
747
751
748 (Failure from a hook)
752 (Failure from a hook)
749
753
750 $ hg -R main push other -r e7ec4e813ba6
754 $ hg -R main push other -r e7ec4e813ba6
751 pushing to other
755 pushing to other
752 searching for changes
756 searching for changes
753 adding changesets
757 adding changesets
754 adding manifests
758 adding manifests
755 adding file changes
759 adding file changes
756 added 1 changesets with 1 changes to 1 files
760 added 1 changesets with 1 changes to 1 files
757 do not push the key !
761 do not push the key !
758 pushkey-abort: prepushkey.failpush hook exited with status 1
762 pushkey-abort: prepushkey.failpush hook exited with status 1
759 transaction abort!
763 transaction abort!
760 Cleaning up the mess...
764 Cleaning up the mess...
761 rollback completed
765 rollback completed
762 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
766 abort: Correct phase push failed (because hooks)
763 [255]
767 [255]
764 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
768 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
765 pushing to ssh://user@dummy/other
769 pushing to ssh://user@dummy/other
766 searching for changes
770 searching for changes
767 remote: adding changesets
771 remote: adding changesets
768 remote: adding manifests
772 remote: adding manifests
769 remote: adding file changes
773 remote: adding file changes
770 remote: added 1 changesets with 1 changes to 1 files
774 remote: added 1 changesets with 1 changes to 1 files
771 remote: do not push the key !
775 remote: do not push the key !
772 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
776 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
773 remote: transaction abort!
777 remote: transaction abort!
774 remote: Cleaning up the mess...
778 remote: Cleaning up the mess...
775 remote: rollback completed
779 remote: rollback completed
776 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
780 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
777 [255]
781 [255]
778 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
782 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
779 pushing to http://localhost:$HGPORT2/
783 pushing to http://localhost:$HGPORT2/
780 searching for changes
784 searching for changes
781 remote: adding changesets
785 remote: adding changesets
782 remote: adding manifests
786 remote: adding manifests
783 remote: adding file changes
787 remote: adding file changes
784 remote: added 1 changesets with 1 changes to 1 files
788 remote: added 1 changesets with 1 changes to 1 files
785 remote: do not push the key !
789 remote: do not push the key !
786 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
790 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
787 remote: transaction abort!
791 remote: transaction abort!
788 remote: Cleaning up the mess...
792 remote: Cleaning up the mess...
789 remote: rollback completed
793 remote: rollback completed
790 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
794 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
791 [255]
795 [255]
792
796
793 (Failure from a the pushkey)
797 (Failure from a the pushkey)
794
798
795 $ cat > mandatorypart.py << EOF
799 $ cat > mandatorypart.py << EOF
796 > from mercurial import exchange
800 > from mercurial import exchange
797 > from mercurial import pushkey
801 > from mercurial import pushkey
798 > from mercurial import node
802 > from mercurial import node
803 > from mercurial import error
799 > @exchange.b2partsgenerator('failingpuskey')
804 > @exchange.b2partsgenerator('failingpuskey')
800 > def addfailingpushey(pushop, bundler):
805 > def addfailingpushey(pushop, bundler):
801 > enc = pushkey.encode
806 > enc = pushkey.encode
802 > part = bundler.newpart('pushkey')
807 > part = bundler.newpart('pushkey')
803 > part.addparam('namespace', enc('phases'))
808 > part.addparam('namespace', enc('phases'))
804 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
809 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
805 > part.addparam('old', enc(str(4))) # will fail
810 > part.addparam('old', enc(str(4))) # will fail
806 > part.addparam('new', enc(str(3)))
811 > part.addparam('new', enc(str(3)))
812 > def fail(pushop, exc):
813 > raise error.Abort('Clown phase push failed')
814 > pushop.pkfailcb[part.id] = fail
807 > EOF
815 > EOF
808 $ cat >> $HGRCPATH << EOF
816 $ cat >> $HGRCPATH << EOF
809 > [hooks]
817 > [hooks]
810 > prepushkey.failpush =
818 > prepushkey.failpush =
811 > EOF
819 > EOF
812 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
820 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
813 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
821 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
814 $ cat other.pid >> $DAEMON_PIDS
822 $ cat other.pid >> $DAEMON_PIDS
815
823
816 $ hg -R main push other -r e7ec4e813ba6
824 $ hg -R main push other -r e7ec4e813ba6
817 pushing to other
825 pushing to other
818 searching for changes
826 searching for changes
819 adding changesets
827 adding changesets
820 adding manifests
828 adding manifests
821 adding file changes
829 adding file changes
822 added 1 changesets with 1 changes to 1 files
830 added 1 changesets with 1 changes to 1 files
823 transaction abort!
831 transaction abort!
824 Cleaning up the mess...
832 Cleaning up the mess...
825 rollback completed
833 rollback completed
826 pushkey: lock state after "phases"
834 pushkey: lock state after "phases"
827 lock: free
835 lock: free
828 wlock: free
836 wlock: free
829 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
837 abort: Clown phase push failed
830 [255]
838 [255]
831 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
839 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
832 pushing to ssh://user@dummy/other
840 pushing to ssh://user@dummy/other
833 searching for changes
841 searching for changes
834 remote: adding changesets
842 remote: adding changesets
835 remote: adding manifests
843 remote: adding manifests
836 remote: adding file changes
844 remote: adding file changes
837 remote: added 1 changesets with 1 changes to 1 files
845 remote: added 1 changesets with 1 changes to 1 files
838 remote: transaction abort!
846 remote: transaction abort!
839 remote: Cleaning up the mess...
847 remote: Cleaning up the mess...
840 remote: rollback completed
848 remote: rollback completed
841 remote: pushkey: lock state after "phases"
849 remote: pushkey: lock state after "phases"
842 remote: lock: free
850 remote: lock: free
843 remote: wlock: free
851 remote: wlock: free
844 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
852 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
845 [255]
853 [255]
846 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
854 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
847 pushing to http://localhost:$HGPORT2/
855 pushing to http://localhost:$HGPORT2/
848 searching for changes
856 searching for changes
849 remote: adding changesets
857 remote: adding changesets
850 remote: adding manifests
858 remote: adding manifests
851 remote: adding file changes
859 remote: adding file changes
852 remote: added 1 changesets with 1 changes to 1 files
860 remote: added 1 changesets with 1 changes to 1 files
853 remote: transaction abort!
861 remote: transaction abort!
854 remote: Cleaning up the mess...
862 remote: Cleaning up the mess...
855 remote: rollback completed
863 remote: rollback completed
856 remote: pushkey: lock state after "phases"
864 remote: pushkey: lock state after "phases"
857 remote: lock: free
865 remote: lock: free
858 remote: wlock: free
866 remote: wlock: free
859 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
867 abort: failed to update value for "phases/cd010b8cd998f3981a5a8115f94f8da4ab506089"
860 [255]
868 [255]
861
869
General Comments 0
You need to be logged in to leave comments. Login now