##// END OF EJS Templates
bundle2: allow lazily acquiring the lock...
Durham Goode -
r26566:58880acd default
parent child Browse files
Show More
@@ -1,1490 +1,1501 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13 import lock as lockmod
13 import lock as lockmod
14 import streamclone
14 import streamclone
15 import tags
15 import tags
16
16
17 def readbundle(ui, fh, fname, vfs=None):
17 def readbundle(ui, fh, fname, vfs=None):
18 header = changegroup.readexactly(fh, 4)
18 header = changegroup.readexactly(fh, 4)
19
19
20 alg = None
20 alg = None
21 if not fname:
21 if not fname:
22 fname = "stream"
22 fname = "stream"
23 if not header.startswith('HG') and header.startswith('\0'):
23 if not header.startswith('HG') and header.startswith('\0'):
24 fh = changegroup.headerlessfixup(fh, header)
24 fh = changegroup.headerlessfixup(fh, header)
25 header = "HG10"
25 header = "HG10"
26 alg = 'UN'
26 alg = 'UN'
27 elif vfs:
27 elif vfs:
28 fname = vfs.join(fname)
28 fname = vfs.join(fname)
29
29
30 magic, version = header[0:2], header[2:4]
30 magic, version = header[0:2], header[2:4]
31
31
32 if magic != 'HG':
32 if magic != 'HG':
33 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
33 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
34 if version == '10':
34 if version == '10':
35 if alg is None:
35 if alg is None:
36 alg = changegroup.readexactly(fh, 2)
36 alg = changegroup.readexactly(fh, 2)
37 return changegroup.cg1unpacker(fh, alg)
37 return changegroup.cg1unpacker(fh, alg)
38 elif version.startswith('2'):
38 elif version.startswith('2'):
39 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
39 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
40 else:
40 else:
41 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
41 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
42
42
43 def buildobsmarkerspart(bundler, markers):
43 def buildobsmarkerspart(bundler, markers):
44 """add an obsmarker part to the bundler with <markers>
44 """add an obsmarker part to the bundler with <markers>
45
45
46 No part is created if markers is empty.
46 No part is created if markers is empty.
47 Raises ValueError if the bundler doesn't support any known obsmarker format.
47 Raises ValueError if the bundler doesn't support any known obsmarker format.
48 """
48 """
49 if markers:
49 if markers:
50 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
50 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
51 version = obsolete.commonversion(remoteversions)
51 version = obsolete.commonversion(remoteversions)
52 if version is None:
52 if version is None:
53 raise ValueError('bundler do not support common obsmarker format')
53 raise ValueError('bundler do not support common obsmarker format')
54 stream = obsolete.encodemarkers(markers, True, version=version)
54 stream = obsolete.encodemarkers(markers, True, version=version)
55 return bundler.newpart('obsmarkers', data=stream)
55 return bundler.newpart('obsmarkers', data=stream)
56 return None
56 return None
57
57
58 def _canusebundle2(op):
58 def _canusebundle2(op):
59 """return true if a pull/push can use bundle2
59 """return true if a pull/push can use bundle2
60
60
61 Feel free to nuke this function when we drop the experimental option"""
61 Feel free to nuke this function when we drop the experimental option"""
62 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
62 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
63 and op.remote.capable('bundle2'))
63 and op.remote.capable('bundle2'))
64
64
65
65
66 class pushoperation(object):
66 class pushoperation(object):
67 """A object that represent a single push operation
67 """A object that represent a single push operation
68
68
69 It purpose is to carry push related state and very common operation.
69 It purpose is to carry push related state and very common operation.
70
70
71 A new should be created at the beginning of each push and discarded
71 A new should be created at the beginning of each push and discarded
72 afterward.
72 afterward.
73 """
73 """
74
74
75 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
75 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
76 bookmarks=()):
76 bookmarks=()):
77 # repo we push from
77 # repo we push from
78 self.repo = repo
78 self.repo = repo
79 self.ui = repo.ui
79 self.ui = repo.ui
80 # repo we push to
80 # repo we push to
81 self.remote = remote
81 self.remote = remote
82 # force option provided
82 # force option provided
83 self.force = force
83 self.force = force
84 # revs to be pushed (None is "all")
84 # revs to be pushed (None is "all")
85 self.revs = revs
85 self.revs = revs
86 # bookmark explicitly pushed
86 # bookmark explicitly pushed
87 self.bookmarks = bookmarks
87 self.bookmarks = bookmarks
88 # allow push of new branch
88 # allow push of new branch
89 self.newbranch = newbranch
89 self.newbranch = newbranch
90 # did a local lock get acquired?
90 # did a local lock get acquired?
91 self.locallocked = None
91 self.locallocked = None
92 # step already performed
92 # step already performed
93 # (used to check what steps have been already performed through bundle2)
93 # (used to check what steps have been already performed through bundle2)
94 self.stepsdone = set()
94 self.stepsdone = set()
95 # Integer version of the changegroup push result
95 # Integer version of the changegroup push result
96 # - None means nothing to push
96 # - None means nothing to push
97 # - 0 means HTTP error
97 # - 0 means HTTP error
98 # - 1 means we pushed and remote head count is unchanged *or*
98 # - 1 means we pushed and remote head count is unchanged *or*
99 # we have outgoing changesets but refused to push
99 # we have outgoing changesets but refused to push
100 # - other values as described by addchangegroup()
100 # - other values as described by addchangegroup()
101 self.cgresult = None
101 self.cgresult = None
102 # Boolean value for the bookmark push
102 # Boolean value for the bookmark push
103 self.bkresult = None
103 self.bkresult = None
104 # discover.outgoing object (contains common and outgoing data)
104 # discover.outgoing object (contains common and outgoing data)
105 self.outgoing = None
105 self.outgoing = None
106 # all remote heads before the push
106 # all remote heads before the push
107 self.remoteheads = None
107 self.remoteheads = None
108 # testable as a boolean indicating if any nodes are missing locally.
108 # testable as a boolean indicating if any nodes are missing locally.
109 self.incoming = None
109 self.incoming = None
110 # phases changes that must be pushed along side the changesets
110 # phases changes that must be pushed along side the changesets
111 self.outdatedphases = None
111 self.outdatedphases = None
112 # phases changes that must be pushed if changeset push fails
112 # phases changes that must be pushed if changeset push fails
113 self.fallbackoutdatedphases = None
113 self.fallbackoutdatedphases = None
114 # outgoing obsmarkers
114 # outgoing obsmarkers
115 self.outobsmarkers = set()
115 self.outobsmarkers = set()
116 # outgoing bookmarks
116 # outgoing bookmarks
117 self.outbookmarks = []
117 self.outbookmarks = []
118 # transaction manager
118 # transaction manager
119 self.trmanager = None
119 self.trmanager = None
120 # map { pushkey partid -> callback handling failure}
120 # map { pushkey partid -> callback handling failure}
121 # used to handle exception from mandatory pushkey part failure
121 # used to handle exception from mandatory pushkey part failure
122 self.pkfailcb = {}
122 self.pkfailcb = {}
123
123
124 @util.propertycache
124 @util.propertycache
125 def futureheads(self):
125 def futureheads(self):
126 """future remote heads if the changeset push succeeds"""
126 """future remote heads if the changeset push succeeds"""
127 return self.outgoing.missingheads
127 return self.outgoing.missingheads
128
128
129 @util.propertycache
129 @util.propertycache
130 def fallbackheads(self):
130 def fallbackheads(self):
131 """future remote heads if the changeset push fails"""
131 """future remote heads if the changeset push fails"""
132 if self.revs is None:
132 if self.revs is None:
133 # not target to push, all common are relevant
133 # not target to push, all common are relevant
134 return self.outgoing.commonheads
134 return self.outgoing.commonheads
135 unfi = self.repo.unfiltered()
135 unfi = self.repo.unfiltered()
136 # I want cheads = heads(::missingheads and ::commonheads)
136 # I want cheads = heads(::missingheads and ::commonheads)
137 # (missingheads is revs with secret changeset filtered out)
137 # (missingheads is revs with secret changeset filtered out)
138 #
138 #
139 # This can be expressed as:
139 # This can be expressed as:
140 # cheads = ( (missingheads and ::commonheads)
140 # cheads = ( (missingheads and ::commonheads)
141 # + (commonheads and ::missingheads))"
141 # + (commonheads and ::missingheads))"
142 # )
142 # )
143 #
143 #
144 # while trying to push we already computed the following:
144 # while trying to push we already computed the following:
145 # common = (::commonheads)
145 # common = (::commonheads)
146 # missing = ((commonheads::missingheads) - commonheads)
146 # missing = ((commonheads::missingheads) - commonheads)
147 #
147 #
148 # We can pick:
148 # We can pick:
149 # * missingheads part of common (::commonheads)
149 # * missingheads part of common (::commonheads)
150 common = self.outgoing.common
150 common = self.outgoing.common
151 nm = self.repo.changelog.nodemap
151 nm = self.repo.changelog.nodemap
152 cheads = [node for node in self.revs if nm[node] in common]
152 cheads = [node for node in self.revs if nm[node] in common]
153 # and
153 # and
154 # * commonheads parents on missing
154 # * commonheads parents on missing
155 revset = unfi.set('%ln and parents(roots(%ln))',
155 revset = unfi.set('%ln and parents(roots(%ln))',
156 self.outgoing.commonheads,
156 self.outgoing.commonheads,
157 self.outgoing.missing)
157 self.outgoing.missing)
158 cheads.extend(c.node() for c in revset)
158 cheads.extend(c.node() for c in revset)
159 return cheads
159 return cheads
160
160
161 @property
161 @property
162 def commonheads(self):
162 def commonheads(self):
163 """set of all common heads after changeset bundle push"""
163 """set of all common heads after changeset bundle push"""
164 if self.cgresult:
164 if self.cgresult:
165 return self.futureheads
165 return self.futureheads
166 else:
166 else:
167 return self.fallbackheads
167 return self.fallbackheads
168
168
169 # mapping of message used when pushing bookmark
169 # mapping of message used when pushing bookmark
170 bookmsgmap = {'update': (_("updating bookmark %s\n"),
170 bookmsgmap = {'update': (_("updating bookmark %s\n"),
171 _('updating bookmark %s failed!\n')),
171 _('updating bookmark %s failed!\n')),
172 'export': (_("exporting bookmark %s\n"),
172 'export': (_("exporting bookmark %s\n"),
173 _('exporting bookmark %s failed!\n')),
173 _('exporting bookmark %s failed!\n')),
174 'delete': (_("deleting remote bookmark %s\n"),
174 'delete': (_("deleting remote bookmark %s\n"),
175 _('deleting remote bookmark %s failed!\n')),
175 _('deleting remote bookmark %s failed!\n')),
176 }
176 }
177
177
178
178
179 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
179 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
180 '''Push outgoing changesets (limited by revs) from a local
180 '''Push outgoing changesets (limited by revs) from a local
181 repository to remote. Return an integer:
181 repository to remote. Return an integer:
182 - None means nothing to push
182 - None means nothing to push
183 - 0 means HTTP error
183 - 0 means HTTP error
184 - 1 means we pushed and remote head count is unchanged *or*
184 - 1 means we pushed and remote head count is unchanged *or*
185 we have outgoing changesets but refused to push
185 we have outgoing changesets but refused to push
186 - other values as described by addchangegroup()
186 - other values as described by addchangegroup()
187 '''
187 '''
188 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
188 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
189 if pushop.remote.local():
189 if pushop.remote.local():
190 missing = (set(pushop.repo.requirements)
190 missing = (set(pushop.repo.requirements)
191 - pushop.remote.local().supported)
191 - pushop.remote.local().supported)
192 if missing:
192 if missing:
193 msg = _("required features are not"
193 msg = _("required features are not"
194 " supported in the destination:"
194 " supported in the destination:"
195 " %s") % (', '.join(sorted(missing)))
195 " %s") % (', '.join(sorted(missing)))
196 raise util.Abort(msg)
196 raise util.Abort(msg)
197
197
198 # there are two ways to push to remote repo:
198 # there are two ways to push to remote repo:
199 #
199 #
200 # addchangegroup assumes local user can lock remote
200 # addchangegroup assumes local user can lock remote
201 # repo (local filesystem, old ssh servers).
201 # repo (local filesystem, old ssh servers).
202 #
202 #
203 # unbundle assumes local user cannot lock remote repo (new ssh
203 # unbundle assumes local user cannot lock remote repo (new ssh
204 # servers, http servers).
204 # servers, http servers).
205
205
206 if not pushop.remote.canpush():
206 if not pushop.remote.canpush():
207 raise util.Abort(_("destination does not support push"))
207 raise util.Abort(_("destination does not support push"))
208 # get local lock as we might write phase data
208 # get local lock as we might write phase data
209 localwlock = locallock = None
209 localwlock = locallock = None
210 try:
210 try:
211 # bundle2 push may receive a reply bundle touching bookmarks or other
211 # bundle2 push may receive a reply bundle touching bookmarks or other
212 # things requiring the wlock. Take it now to ensure proper ordering.
212 # things requiring the wlock. Take it now to ensure proper ordering.
213 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
213 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
214 if _canusebundle2(pushop) and maypushback:
214 if _canusebundle2(pushop) and maypushback:
215 localwlock = pushop.repo.wlock()
215 localwlock = pushop.repo.wlock()
216 locallock = pushop.repo.lock()
216 locallock = pushop.repo.lock()
217 pushop.locallocked = True
217 pushop.locallocked = True
218 except IOError as err:
218 except IOError as err:
219 pushop.locallocked = False
219 pushop.locallocked = False
220 if err.errno != errno.EACCES:
220 if err.errno != errno.EACCES:
221 raise
221 raise
222 # source repo cannot be locked.
222 # source repo cannot be locked.
223 # We do not abort the push, but just disable the local phase
223 # We do not abort the push, but just disable the local phase
224 # synchronisation.
224 # synchronisation.
225 msg = 'cannot lock source repository: %s\n' % err
225 msg = 'cannot lock source repository: %s\n' % err
226 pushop.ui.debug(msg)
226 pushop.ui.debug(msg)
227 try:
227 try:
228 if pushop.locallocked:
228 if pushop.locallocked:
229 pushop.trmanager = transactionmanager(repo,
229 pushop.trmanager = transactionmanager(repo,
230 'push-response',
230 'push-response',
231 pushop.remote.url())
231 pushop.remote.url())
232 pushop.repo.checkpush(pushop)
232 pushop.repo.checkpush(pushop)
233 lock = None
233 lock = None
234 unbundle = pushop.remote.capable('unbundle')
234 unbundle = pushop.remote.capable('unbundle')
235 if not unbundle:
235 if not unbundle:
236 lock = pushop.remote.lock()
236 lock = pushop.remote.lock()
237 try:
237 try:
238 _pushdiscovery(pushop)
238 _pushdiscovery(pushop)
239 if _canusebundle2(pushop):
239 if _canusebundle2(pushop):
240 _pushbundle2(pushop)
240 _pushbundle2(pushop)
241 _pushchangeset(pushop)
241 _pushchangeset(pushop)
242 _pushsyncphase(pushop)
242 _pushsyncphase(pushop)
243 _pushobsolete(pushop)
243 _pushobsolete(pushop)
244 _pushbookmark(pushop)
244 _pushbookmark(pushop)
245 finally:
245 finally:
246 if lock is not None:
246 if lock is not None:
247 lock.release()
247 lock.release()
248 if pushop.trmanager:
248 if pushop.trmanager:
249 pushop.trmanager.close()
249 pushop.trmanager.close()
250 finally:
250 finally:
251 if pushop.trmanager:
251 if pushop.trmanager:
252 pushop.trmanager.release()
252 pushop.trmanager.release()
253 if locallock is not None:
253 if locallock is not None:
254 locallock.release()
254 locallock.release()
255 if localwlock is not None:
255 if localwlock is not None:
256 localwlock.release()
256 localwlock.release()
257
257
258 return pushop
258 return pushop
259
259
260 # list of steps to perform discovery before push
260 # list of steps to perform discovery before push
261 pushdiscoveryorder = []
261 pushdiscoveryorder = []
262
262
263 # Mapping between step name and function
263 # Mapping between step name and function
264 #
264 #
265 # This exists to help extensions wrap steps if necessary
265 # This exists to help extensions wrap steps if necessary
266 pushdiscoverymapping = {}
266 pushdiscoverymapping = {}
267
267
268 def pushdiscovery(stepname):
268 def pushdiscovery(stepname):
269 """decorator for function performing discovery before push
269 """decorator for function performing discovery before push
270
270
271 The function is added to the step -> function mapping and appended to the
271 The function is added to the step -> function mapping and appended to the
272 list of steps. Beware that decorated function will be added in order (this
272 list of steps. Beware that decorated function will be added in order (this
273 may matter).
273 may matter).
274
274
275 You can only use this decorator for a new step, if you want to wrap a step
275 You can only use this decorator for a new step, if you want to wrap a step
276 from an extension, change the pushdiscovery dictionary directly."""
276 from an extension, change the pushdiscovery dictionary directly."""
277 def dec(func):
277 def dec(func):
278 assert stepname not in pushdiscoverymapping
278 assert stepname not in pushdiscoverymapping
279 pushdiscoverymapping[stepname] = func
279 pushdiscoverymapping[stepname] = func
280 pushdiscoveryorder.append(stepname)
280 pushdiscoveryorder.append(stepname)
281 return func
281 return func
282 return dec
282 return dec
283
283
284 def _pushdiscovery(pushop):
284 def _pushdiscovery(pushop):
285 """Run all discovery steps"""
285 """Run all discovery steps"""
286 for stepname in pushdiscoveryorder:
286 for stepname in pushdiscoveryorder:
287 step = pushdiscoverymapping[stepname]
287 step = pushdiscoverymapping[stepname]
288 step(pushop)
288 step(pushop)
289
289
290 @pushdiscovery('changeset')
290 @pushdiscovery('changeset')
291 def _pushdiscoverychangeset(pushop):
291 def _pushdiscoverychangeset(pushop):
292 """discover the changeset that need to be pushed"""
292 """discover the changeset that need to be pushed"""
293 fci = discovery.findcommonincoming
293 fci = discovery.findcommonincoming
294 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
294 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
295 common, inc, remoteheads = commoninc
295 common, inc, remoteheads = commoninc
296 fco = discovery.findcommonoutgoing
296 fco = discovery.findcommonoutgoing
297 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
297 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
298 commoninc=commoninc, force=pushop.force)
298 commoninc=commoninc, force=pushop.force)
299 pushop.outgoing = outgoing
299 pushop.outgoing = outgoing
300 pushop.remoteheads = remoteheads
300 pushop.remoteheads = remoteheads
301 pushop.incoming = inc
301 pushop.incoming = inc
302
302
303 @pushdiscovery('phase')
303 @pushdiscovery('phase')
304 def _pushdiscoveryphase(pushop):
304 def _pushdiscoveryphase(pushop):
305 """discover the phase that needs to be pushed
305 """discover the phase that needs to be pushed
306
306
307 (computed for both success and failure case for changesets push)"""
307 (computed for both success and failure case for changesets push)"""
308 outgoing = pushop.outgoing
308 outgoing = pushop.outgoing
309 unfi = pushop.repo.unfiltered()
309 unfi = pushop.repo.unfiltered()
310 remotephases = pushop.remote.listkeys('phases')
310 remotephases = pushop.remote.listkeys('phases')
311 publishing = remotephases.get('publishing', False)
311 publishing = remotephases.get('publishing', False)
312 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
312 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
313 and remotephases # server supports phases
313 and remotephases # server supports phases
314 and not pushop.outgoing.missing # no changesets to be pushed
314 and not pushop.outgoing.missing # no changesets to be pushed
315 and publishing):
315 and publishing):
316 # When:
316 # When:
317 # - this is a subrepo push
317 # - this is a subrepo push
318 # - and remote support phase
318 # - and remote support phase
319 # - and no changeset are to be pushed
319 # - and no changeset are to be pushed
320 # - and remote is publishing
320 # - and remote is publishing
321 # We may be in issue 3871 case!
321 # We may be in issue 3871 case!
322 # We drop the possible phase synchronisation done by
322 # We drop the possible phase synchronisation done by
323 # courtesy to publish changesets possibly locally draft
323 # courtesy to publish changesets possibly locally draft
324 # on the remote.
324 # on the remote.
325 remotephases = {'publishing': 'True'}
325 remotephases = {'publishing': 'True'}
326 ana = phases.analyzeremotephases(pushop.repo,
326 ana = phases.analyzeremotephases(pushop.repo,
327 pushop.fallbackheads,
327 pushop.fallbackheads,
328 remotephases)
328 remotephases)
329 pheads, droots = ana
329 pheads, droots = ana
330 extracond = ''
330 extracond = ''
331 if not publishing:
331 if not publishing:
332 extracond = ' and public()'
332 extracond = ' and public()'
333 revset = 'heads((%%ln::%%ln) %s)' % extracond
333 revset = 'heads((%%ln::%%ln) %s)' % extracond
334 # Get the list of all revs draft on remote by public here.
334 # Get the list of all revs draft on remote by public here.
335 # XXX Beware that revset break if droots is not strictly
335 # XXX Beware that revset break if droots is not strictly
336 # XXX root we may want to ensure it is but it is costly
336 # XXX root we may want to ensure it is but it is costly
337 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
337 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
338 if not outgoing.missing:
338 if not outgoing.missing:
339 future = fallback
339 future = fallback
340 else:
340 else:
341 # adds changeset we are going to push as draft
341 # adds changeset we are going to push as draft
342 #
342 #
343 # should not be necessary for publishing server, but because of an
343 # should not be necessary for publishing server, but because of an
344 # issue fixed in xxxxx we have to do it anyway.
344 # issue fixed in xxxxx we have to do it anyway.
345 fdroots = list(unfi.set('roots(%ln + %ln::)',
345 fdroots = list(unfi.set('roots(%ln + %ln::)',
346 outgoing.missing, droots))
346 outgoing.missing, droots))
347 fdroots = [f.node() for f in fdroots]
347 fdroots = [f.node() for f in fdroots]
348 future = list(unfi.set(revset, fdroots, pushop.futureheads))
348 future = list(unfi.set(revset, fdroots, pushop.futureheads))
349 pushop.outdatedphases = future
349 pushop.outdatedphases = future
350 pushop.fallbackoutdatedphases = fallback
350 pushop.fallbackoutdatedphases = fallback
351
351
352 @pushdiscovery('obsmarker')
352 @pushdiscovery('obsmarker')
353 def _pushdiscoveryobsmarkers(pushop):
353 def _pushdiscoveryobsmarkers(pushop):
354 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
354 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
355 and pushop.repo.obsstore
355 and pushop.repo.obsstore
356 and 'obsolete' in pushop.remote.listkeys('namespaces')):
356 and 'obsolete' in pushop.remote.listkeys('namespaces')):
357 repo = pushop.repo
357 repo = pushop.repo
358 # very naive computation, that can be quite expensive on big repo.
358 # very naive computation, that can be quite expensive on big repo.
359 # However: evolution is currently slow on them anyway.
359 # However: evolution is currently slow on them anyway.
360 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
360 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
361 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
361 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
362
362
363 @pushdiscovery('bookmarks')
363 @pushdiscovery('bookmarks')
364 def _pushdiscoverybookmarks(pushop):
364 def _pushdiscoverybookmarks(pushop):
365 ui = pushop.ui
365 ui = pushop.ui
366 repo = pushop.repo.unfiltered()
366 repo = pushop.repo.unfiltered()
367 remote = pushop.remote
367 remote = pushop.remote
368 ui.debug("checking for updated bookmarks\n")
368 ui.debug("checking for updated bookmarks\n")
369 ancestors = ()
369 ancestors = ()
370 if pushop.revs:
370 if pushop.revs:
371 revnums = map(repo.changelog.rev, pushop.revs)
371 revnums = map(repo.changelog.rev, pushop.revs)
372 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
372 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
373 remotebookmark = remote.listkeys('bookmarks')
373 remotebookmark = remote.listkeys('bookmarks')
374
374
375 explicit = set(pushop.bookmarks)
375 explicit = set(pushop.bookmarks)
376
376
377 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
377 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
378 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
378 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
379 for b, scid, dcid in advsrc:
379 for b, scid, dcid in advsrc:
380 if b in explicit:
380 if b in explicit:
381 explicit.remove(b)
381 explicit.remove(b)
382 if not ancestors or repo[scid].rev() in ancestors:
382 if not ancestors or repo[scid].rev() in ancestors:
383 pushop.outbookmarks.append((b, dcid, scid))
383 pushop.outbookmarks.append((b, dcid, scid))
384 # search added bookmark
384 # search added bookmark
385 for b, scid, dcid in addsrc:
385 for b, scid, dcid in addsrc:
386 if b in explicit:
386 if b in explicit:
387 explicit.remove(b)
387 explicit.remove(b)
388 pushop.outbookmarks.append((b, '', scid))
388 pushop.outbookmarks.append((b, '', scid))
389 # search for overwritten bookmark
389 # search for overwritten bookmark
390 for b, scid, dcid in advdst + diverge + differ:
390 for b, scid, dcid in advdst + diverge + differ:
391 if b in explicit:
391 if b in explicit:
392 explicit.remove(b)
392 explicit.remove(b)
393 pushop.outbookmarks.append((b, dcid, scid))
393 pushop.outbookmarks.append((b, dcid, scid))
394 # search for bookmark to delete
394 # search for bookmark to delete
395 for b, scid, dcid in adddst:
395 for b, scid, dcid in adddst:
396 if b in explicit:
396 if b in explicit:
397 explicit.remove(b)
397 explicit.remove(b)
398 # treat as "deleted locally"
398 # treat as "deleted locally"
399 pushop.outbookmarks.append((b, dcid, ''))
399 pushop.outbookmarks.append((b, dcid, ''))
400 # identical bookmarks shouldn't get reported
400 # identical bookmarks shouldn't get reported
401 for b, scid, dcid in same:
401 for b, scid, dcid in same:
402 if b in explicit:
402 if b in explicit:
403 explicit.remove(b)
403 explicit.remove(b)
404
404
405 if explicit:
405 if explicit:
406 explicit = sorted(explicit)
406 explicit = sorted(explicit)
407 # we should probably list all of them
407 # we should probably list all of them
408 ui.warn(_('bookmark %s does not exist on the local '
408 ui.warn(_('bookmark %s does not exist on the local '
409 'or remote repository!\n') % explicit[0])
409 'or remote repository!\n') % explicit[0])
410 pushop.bkresult = 2
410 pushop.bkresult = 2
411
411
412 pushop.outbookmarks.sort()
412 pushop.outbookmarks.sort()
413
413
414 def _pushcheckoutgoing(pushop):
414 def _pushcheckoutgoing(pushop):
415 outgoing = pushop.outgoing
415 outgoing = pushop.outgoing
416 unfi = pushop.repo.unfiltered()
416 unfi = pushop.repo.unfiltered()
417 if not outgoing.missing:
417 if not outgoing.missing:
418 # nothing to push
418 # nothing to push
419 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
419 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
420 return False
420 return False
421 # something to push
421 # something to push
422 if not pushop.force:
422 if not pushop.force:
423 # if repo.obsstore == False --> no obsolete
423 # if repo.obsstore == False --> no obsolete
424 # then, save the iteration
424 # then, save the iteration
425 if unfi.obsstore:
425 if unfi.obsstore:
426 # this message are here for 80 char limit reason
426 # this message are here for 80 char limit reason
427 mso = _("push includes obsolete changeset: %s!")
427 mso = _("push includes obsolete changeset: %s!")
428 mst = {"unstable": _("push includes unstable changeset: %s!"),
428 mst = {"unstable": _("push includes unstable changeset: %s!"),
429 "bumped": _("push includes bumped changeset: %s!"),
429 "bumped": _("push includes bumped changeset: %s!"),
430 "divergent": _("push includes divergent changeset: %s!")}
430 "divergent": _("push includes divergent changeset: %s!")}
431 # If we are to push if there is at least one
431 # If we are to push if there is at least one
432 # obsolete or unstable changeset in missing, at
432 # obsolete or unstable changeset in missing, at
433 # least one of the missinghead will be obsolete or
433 # least one of the missinghead will be obsolete or
434 # unstable. So checking heads only is ok
434 # unstable. So checking heads only is ok
435 for node in outgoing.missingheads:
435 for node in outgoing.missingheads:
436 ctx = unfi[node]
436 ctx = unfi[node]
437 if ctx.obsolete():
437 if ctx.obsolete():
438 raise util.Abort(mso % ctx)
438 raise util.Abort(mso % ctx)
439 elif ctx.troubled():
439 elif ctx.troubled():
440 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
440 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
441
441
442 # internal config: bookmarks.pushing
442 # internal config: bookmarks.pushing
443 newbm = pushop.ui.configlist('bookmarks', 'pushing')
443 newbm = pushop.ui.configlist('bookmarks', 'pushing')
444 discovery.checkheads(unfi, pushop.remote, outgoing,
444 discovery.checkheads(unfi, pushop.remote, outgoing,
445 pushop.remoteheads,
445 pushop.remoteheads,
446 pushop.newbranch,
446 pushop.newbranch,
447 bool(pushop.incoming),
447 bool(pushop.incoming),
448 newbm)
448 newbm)
449 return True
449 return True
450
450
451 # List of names of steps to perform for an outgoing bundle2, order matters.
451 # List of names of steps to perform for an outgoing bundle2, order matters.
452 b2partsgenorder = []
452 b2partsgenorder = []
453
453
454 # Mapping between step name and function
454 # Mapping between step name and function
455 #
455 #
456 # This exists to help extensions wrap steps if necessary
456 # This exists to help extensions wrap steps if necessary
457 b2partsgenmapping = {}
457 b2partsgenmapping = {}
458
458
459 def b2partsgenerator(stepname, idx=None):
459 def b2partsgenerator(stepname, idx=None):
460 """decorator for function generating bundle2 part
460 """decorator for function generating bundle2 part
461
461
462 The function is added to the step -> function mapping and appended to the
462 The function is added to the step -> function mapping and appended to the
463 list of steps. Beware that decorated functions will be added in order
463 list of steps. Beware that decorated functions will be added in order
464 (this may matter).
464 (this may matter).
465
465
466 You can only use this decorator for new steps, if you want to wrap a step
466 You can only use this decorator for new steps, if you want to wrap a step
467 from an extension, attack the b2partsgenmapping dictionary directly."""
467 from an extension, attack the b2partsgenmapping dictionary directly."""
468 def dec(func):
468 def dec(func):
469 assert stepname not in b2partsgenmapping
469 assert stepname not in b2partsgenmapping
470 b2partsgenmapping[stepname] = func
470 b2partsgenmapping[stepname] = func
471 if idx is None:
471 if idx is None:
472 b2partsgenorder.append(stepname)
472 b2partsgenorder.append(stepname)
473 else:
473 else:
474 b2partsgenorder.insert(idx, stepname)
474 b2partsgenorder.insert(idx, stepname)
475 return func
475 return func
476 return dec
476 return dec
477
477
478 def _pushb2ctxcheckheads(pushop, bundler):
478 def _pushb2ctxcheckheads(pushop, bundler):
479 """Generate race condition checking parts
479 """Generate race condition checking parts
480
480
481 Exists as an indepedent function to aid extensions
481 Exists as an indepedent function to aid extensions
482 """
482 """
483 if not pushop.force:
483 if not pushop.force:
484 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
484 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
485
485
486 @b2partsgenerator('changeset')
486 @b2partsgenerator('changeset')
487 def _pushb2ctx(pushop, bundler):
487 def _pushb2ctx(pushop, bundler):
488 """handle changegroup push through bundle2
488 """handle changegroup push through bundle2
489
489
490 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
490 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
491 """
491 """
492 if 'changesets' in pushop.stepsdone:
492 if 'changesets' in pushop.stepsdone:
493 return
493 return
494 pushop.stepsdone.add('changesets')
494 pushop.stepsdone.add('changesets')
495 # Send known heads to the server for race detection.
495 # Send known heads to the server for race detection.
496 if not _pushcheckoutgoing(pushop):
496 if not _pushcheckoutgoing(pushop):
497 return
497 return
498 pushop.repo.prepushoutgoinghooks(pushop.repo,
498 pushop.repo.prepushoutgoinghooks(pushop.repo,
499 pushop.remote,
499 pushop.remote,
500 pushop.outgoing)
500 pushop.outgoing)
501
501
502 _pushb2ctxcheckheads(pushop, bundler)
502 _pushb2ctxcheckheads(pushop, bundler)
503
503
504 b2caps = bundle2.bundle2caps(pushop.remote)
504 b2caps = bundle2.bundle2caps(pushop.remote)
505 version = None
505 version = None
506 cgversions = b2caps.get('changegroup')
506 cgversions = b2caps.get('changegroup')
507 if not cgversions: # 3.1 and 3.2 ship with an empty value
507 if not cgversions: # 3.1 and 3.2 ship with an empty value
508 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
508 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
509 pushop.outgoing)
509 pushop.outgoing)
510 else:
510 else:
511 cgversions = [v for v in cgversions if v in changegroup.packermap]
511 cgversions = [v for v in cgversions if v in changegroup.packermap]
512 if not cgversions:
512 if not cgversions:
513 raise ValueError(_('no common changegroup version'))
513 raise ValueError(_('no common changegroup version'))
514 version = max(cgversions)
514 version = max(cgversions)
515 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
515 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
516 pushop.outgoing,
516 pushop.outgoing,
517 version=version)
517 version=version)
518 cgpart = bundler.newpart('changegroup', data=cg)
518 cgpart = bundler.newpart('changegroup', data=cg)
519 if version is not None:
519 if version is not None:
520 cgpart.addparam('version', version)
520 cgpart.addparam('version', version)
521 def handlereply(op):
521 def handlereply(op):
522 """extract addchangegroup returns from server reply"""
522 """extract addchangegroup returns from server reply"""
523 cgreplies = op.records.getreplies(cgpart.id)
523 cgreplies = op.records.getreplies(cgpart.id)
524 assert len(cgreplies['changegroup']) == 1
524 assert len(cgreplies['changegroup']) == 1
525 pushop.cgresult = cgreplies['changegroup'][0]['return']
525 pushop.cgresult = cgreplies['changegroup'][0]['return']
526 return handlereply
526 return handlereply
527
527
528 @b2partsgenerator('phase')
528 @b2partsgenerator('phase')
529 def _pushb2phases(pushop, bundler):
529 def _pushb2phases(pushop, bundler):
530 """handle phase push through bundle2"""
530 """handle phase push through bundle2"""
531 if 'phases' in pushop.stepsdone:
531 if 'phases' in pushop.stepsdone:
532 return
532 return
533 b2caps = bundle2.bundle2caps(pushop.remote)
533 b2caps = bundle2.bundle2caps(pushop.remote)
534 if not 'pushkey' in b2caps:
534 if not 'pushkey' in b2caps:
535 return
535 return
536 pushop.stepsdone.add('phases')
536 pushop.stepsdone.add('phases')
537 part2node = []
537 part2node = []
538
538
539 def handlefailure(pushop, exc):
539 def handlefailure(pushop, exc):
540 targetid = int(exc.partid)
540 targetid = int(exc.partid)
541 for partid, node in part2node:
541 for partid, node in part2node:
542 if partid == targetid:
542 if partid == targetid:
543 raise error.Abort(_('updating %s to public failed') % node)
543 raise error.Abort(_('updating %s to public failed') % node)
544
544
545 enc = pushkey.encode
545 enc = pushkey.encode
546 for newremotehead in pushop.outdatedphases:
546 for newremotehead in pushop.outdatedphases:
547 part = bundler.newpart('pushkey')
547 part = bundler.newpart('pushkey')
548 part.addparam('namespace', enc('phases'))
548 part.addparam('namespace', enc('phases'))
549 part.addparam('key', enc(newremotehead.hex()))
549 part.addparam('key', enc(newremotehead.hex()))
550 part.addparam('old', enc(str(phases.draft)))
550 part.addparam('old', enc(str(phases.draft)))
551 part.addparam('new', enc(str(phases.public)))
551 part.addparam('new', enc(str(phases.public)))
552 part2node.append((part.id, newremotehead))
552 part2node.append((part.id, newremotehead))
553 pushop.pkfailcb[part.id] = handlefailure
553 pushop.pkfailcb[part.id] = handlefailure
554
554
555 def handlereply(op):
555 def handlereply(op):
556 for partid, node in part2node:
556 for partid, node in part2node:
557 partrep = op.records.getreplies(partid)
557 partrep = op.records.getreplies(partid)
558 results = partrep['pushkey']
558 results = partrep['pushkey']
559 assert len(results) <= 1
559 assert len(results) <= 1
560 msg = None
560 msg = None
561 if not results:
561 if not results:
562 msg = _('server ignored update of %s to public!\n') % node
562 msg = _('server ignored update of %s to public!\n') % node
563 elif not int(results[0]['return']):
563 elif not int(results[0]['return']):
564 msg = _('updating %s to public failed!\n') % node
564 msg = _('updating %s to public failed!\n') % node
565 if msg is not None:
565 if msg is not None:
566 pushop.ui.warn(msg)
566 pushop.ui.warn(msg)
567 return handlereply
567 return handlereply
568
568
569 @b2partsgenerator('obsmarkers')
569 @b2partsgenerator('obsmarkers')
570 def _pushb2obsmarkers(pushop, bundler):
570 def _pushb2obsmarkers(pushop, bundler):
571 if 'obsmarkers' in pushop.stepsdone:
571 if 'obsmarkers' in pushop.stepsdone:
572 return
572 return
573 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
573 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
574 if obsolete.commonversion(remoteversions) is None:
574 if obsolete.commonversion(remoteversions) is None:
575 return
575 return
576 pushop.stepsdone.add('obsmarkers')
576 pushop.stepsdone.add('obsmarkers')
577 if pushop.outobsmarkers:
577 if pushop.outobsmarkers:
578 markers = sorted(pushop.outobsmarkers)
578 markers = sorted(pushop.outobsmarkers)
579 buildobsmarkerspart(bundler, markers)
579 buildobsmarkerspart(bundler, markers)
580
580
581 @b2partsgenerator('bookmarks')
581 @b2partsgenerator('bookmarks')
582 def _pushb2bookmarks(pushop, bundler):
582 def _pushb2bookmarks(pushop, bundler):
583 """handle bookmark push through bundle2"""
583 """handle bookmark push through bundle2"""
584 if 'bookmarks' in pushop.stepsdone:
584 if 'bookmarks' in pushop.stepsdone:
585 return
585 return
586 b2caps = bundle2.bundle2caps(pushop.remote)
586 b2caps = bundle2.bundle2caps(pushop.remote)
587 if 'pushkey' not in b2caps:
587 if 'pushkey' not in b2caps:
588 return
588 return
589 pushop.stepsdone.add('bookmarks')
589 pushop.stepsdone.add('bookmarks')
590 part2book = []
590 part2book = []
591 enc = pushkey.encode
591 enc = pushkey.encode
592
592
593 def handlefailure(pushop, exc):
593 def handlefailure(pushop, exc):
594 targetid = int(exc.partid)
594 targetid = int(exc.partid)
595 for partid, book, action in part2book:
595 for partid, book, action in part2book:
596 if partid == targetid:
596 if partid == targetid:
597 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
597 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
598 # we should not be called for part we did not generated
598 # we should not be called for part we did not generated
599 assert False
599 assert False
600
600
601 for book, old, new in pushop.outbookmarks:
601 for book, old, new in pushop.outbookmarks:
602 part = bundler.newpart('pushkey')
602 part = bundler.newpart('pushkey')
603 part.addparam('namespace', enc('bookmarks'))
603 part.addparam('namespace', enc('bookmarks'))
604 part.addparam('key', enc(book))
604 part.addparam('key', enc(book))
605 part.addparam('old', enc(old))
605 part.addparam('old', enc(old))
606 part.addparam('new', enc(new))
606 part.addparam('new', enc(new))
607 action = 'update'
607 action = 'update'
608 if not old:
608 if not old:
609 action = 'export'
609 action = 'export'
610 elif not new:
610 elif not new:
611 action = 'delete'
611 action = 'delete'
612 part2book.append((part.id, book, action))
612 part2book.append((part.id, book, action))
613 pushop.pkfailcb[part.id] = handlefailure
613 pushop.pkfailcb[part.id] = handlefailure
614
614
615 def handlereply(op):
615 def handlereply(op):
616 ui = pushop.ui
616 ui = pushop.ui
617 for partid, book, action in part2book:
617 for partid, book, action in part2book:
618 partrep = op.records.getreplies(partid)
618 partrep = op.records.getreplies(partid)
619 results = partrep['pushkey']
619 results = partrep['pushkey']
620 assert len(results) <= 1
620 assert len(results) <= 1
621 if not results:
621 if not results:
622 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
622 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
623 else:
623 else:
624 ret = int(results[0]['return'])
624 ret = int(results[0]['return'])
625 if ret:
625 if ret:
626 ui.status(bookmsgmap[action][0] % book)
626 ui.status(bookmsgmap[action][0] % book)
627 else:
627 else:
628 ui.warn(bookmsgmap[action][1] % book)
628 ui.warn(bookmsgmap[action][1] % book)
629 if pushop.bkresult is not None:
629 if pushop.bkresult is not None:
630 pushop.bkresult = 1
630 pushop.bkresult = 1
631 return handlereply
631 return handlereply
632
632
633
633
634 def _pushbundle2(pushop):
634 def _pushbundle2(pushop):
635 """push data to the remote using bundle2
635 """push data to the remote using bundle2
636
636
637 The only currently supported type of data is changegroup but this will
637 The only currently supported type of data is changegroup but this will
638 evolve in the future."""
638 evolve in the future."""
639 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
639 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
640 pushback = (pushop.trmanager
640 pushback = (pushop.trmanager
641 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
641 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
642
642
643 # create reply capability
643 # create reply capability
644 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
644 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
645 allowpushback=pushback))
645 allowpushback=pushback))
646 bundler.newpart('replycaps', data=capsblob)
646 bundler.newpart('replycaps', data=capsblob)
647 replyhandlers = []
647 replyhandlers = []
648 for partgenname in b2partsgenorder:
648 for partgenname in b2partsgenorder:
649 partgen = b2partsgenmapping[partgenname]
649 partgen = b2partsgenmapping[partgenname]
650 ret = partgen(pushop, bundler)
650 ret = partgen(pushop, bundler)
651 if callable(ret):
651 if callable(ret):
652 replyhandlers.append(ret)
652 replyhandlers.append(ret)
653 # do not push if nothing to push
653 # do not push if nothing to push
654 if bundler.nbparts <= 1:
654 if bundler.nbparts <= 1:
655 return
655 return
656 stream = util.chunkbuffer(bundler.getchunks())
656 stream = util.chunkbuffer(bundler.getchunks())
657 try:
657 try:
658 try:
658 try:
659 reply = pushop.remote.unbundle(stream, ['force'], 'push')
659 reply = pushop.remote.unbundle(stream, ['force'], 'push')
660 except error.BundleValueError as exc:
660 except error.BundleValueError as exc:
661 raise util.Abort('missing support for %s' % exc)
661 raise util.Abort('missing support for %s' % exc)
662 try:
662 try:
663 trgetter = None
663 trgetter = None
664 if pushback:
664 if pushback:
665 trgetter = pushop.trmanager.transaction
665 trgetter = pushop.trmanager.transaction
666 op = bundle2.processbundle(pushop.repo, reply, trgetter)
666 op = bundle2.processbundle(pushop.repo, reply, trgetter)
667 except error.BundleValueError as exc:
667 except error.BundleValueError as exc:
668 raise util.Abort('missing support for %s' % exc)
668 raise util.Abort('missing support for %s' % exc)
669 except error.PushkeyFailed as exc:
669 except error.PushkeyFailed as exc:
670 partid = int(exc.partid)
670 partid = int(exc.partid)
671 if partid not in pushop.pkfailcb:
671 if partid not in pushop.pkfailcb:
672 raise
672 raise
673 pushop.pkfailcb[partid](pushop, exc)
673 pushop.pkfailcb[partid](pushop, exc)
674 for rephand in replyhandlers:
674 for rephand in replyhandlers:
675 rephand(op)
675 rephand(op)
676
676
677 def _pushchangeset(pushop):
677 def _pushchangeset(pushop):
678 """Make the actual push of changeset bundle to remote repo"""
678 """Make the actual push of changeset bundle to remote repo"""
679 if 'changesets' in pushop.stepsdone:
679 if 'changesets' in pushop.stepsdone:
680 return
680 return
681 pushop.stepsdone.add('changesets')
681 pushop.stepsdone.add('changesets')
682 if not _pushcheckoutgoing(pushop):
682 if not _pushcheckoutgoing(pushop):
683 return
683 return
684 pushop.repo.prepushoutgoinghooks(pushop.repo,
684 pushop.repo.prepushoutgoinghooks(pushop.repo,
685 pushop.remote,
685 pushop.remote,
686 pushop.outgoing)
686 pushop.outgoing)
687 outgoing = pushop.outgoing
687 outgoing = pushop.outgoing
688 unbundle = pushop.remote.capable('unbundle')
688 unbundle = pushop.remote.capable('unbundle')
689 # TODO: get bundlecaps from remote
689 # TODO: get bundlecaps from remote
690 bundlecaps = None
690 bundlecaps = None
691 # create a changegroup from local
691 # create a changegroup from local
692 if pushop.revs is None and not (outgoing.excluded
692 if pushop.revs is None and not (outgoing.excluded
693 or pushop.repo.changelog.filteredrevs):
693 or pushop.repo.changelog.filteredrevs):
694 # push everything,
694 # push everything,
695 # use the fast path, no race possible on push
695 # use the fast path, no race possible on push
696 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
696 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
697 cg = changegroup.getsubset(pushop.repo,
697 cg = changegroup.getsubset(pushop.repo,
698 outgoing,
698 outgoing,
699 bundler,
699 bundler,
700 'push',
700 'push',
701 fastpath=True)
701 fastpath=True)
702 else:
702 else:
703 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
703 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
704 bundlecaps)
704 bundlecaps)
705
705
706 # apply changegroup to remote
706 # apply changegroup to remote
707 if unbundle:
707 if unbundle:
708 # local repo finds heads on server, finds out what
708 # local repo finds heads on server, finds out what
709 # revs it must push. once revs transferred, if server
709 # revs it must push. once revs transferred, if server
710 # finds it has different heads (someone else won
710 # finds it has different heads (someone else won
711 # commit/push race), server aborts.
711 # commit/push race), server aborts.
712 if pushop.force:
712 if pushop.force:
713 remoteheads = ['force']
713 remoteheads = ['force']
714 else:
714 else:
715 remoteheads = pushop.remoteheads
715 remoteheads = pushop.remoteheads
716 # ssh: return remote's addchangegroup()
716 # ssh: return remote's addchangegroup()
717 # http: return remote's addchangegroup() or 0 for error
717 # http: return remote's addchangegroup() or 0 for error
718 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
718 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
719 pushop.repo.url())
719 pushop.repo.url())
720 else:
720 else:
721 # we return an integer indicating remote head count
721 # we return an integer indicating remote head count
722 # change
722 # change
723 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
723 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
724 pushop.repo.url())
724 pushop.repo.url())
725
725
726 def _pushsyncphase(pushop):
726 def _pushsyncphase(pushop):
727 """synchronise phase information locally and remotely"""
727 """synchronise phase information locally and remotely"""
728 cheads = pushop.commonheads
728 cheads = pushop.commonheads
729 # even when we don't push, exchanging phase data is useful
729 # even when we don't push, exchanging phase data is useful
730 remotephases = pushop.remote.listkeys('phases')
730 remotephases = pushop.remote.listkeys('phases')
731 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
731 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
732 and remotephases # server supports phases
732 and remotephases # server supports phases
733 and pushop.cgresult is None # nothing was pushed
733 and pushop.cgresult is None # nothing was pushed
734 and remotephases.get('publishing', False)):
734 and remotephases.get('publishing', False)):
735 # When:
735 # When:
736 # - this is a subrepo push
736 # - this is a subrepo push
737 # - and remote support phase
737 # - and remote support phase
738 # - and no changeset was pushed
738 # - and no changeset was pushed
739 # - and remote is publishing
739 # - and remote is publishing
740 # We may be in issue 3871 case!
740 # We may be in issue 3871 case!
741 # We drop the possible phase synchronisation done by
741 # We drop the possible phase synchronisation done by
742 # courtesy to publish changesets possibly locally draft
742 # courtesy to publish changesets possibly locally draft
743 # on the remote.
743 # on the remote.
744 remotephases = {'publishing': 'True'}
744 remotephases = {'publishing': 'True'}
745 if not remotephases: # old server or public only reply from non-publishing
745 if not remotephases: # old server or public only reply from non-publishing
746 _localphasemove(pushop, cheads)
746 _localphasemove(pushop, cheads)
747 # don't push any phase data as there is nothing to push
747 # don't push any phase data as there is nothing to push
748 else:
748 else:
749 ana = phases.analyzeremotephases(pushop.repo, cheads,
749 ana = phases.analyzeremotephases(pushop.repo, cheads,
750 remotephases)
750 remotephases)
751 pheads, droots = ana
751 pheads, droots = ana
752 ### Apply remote phase on local
752 ### Apply remote phase on local
753 if remotephases.get('publishing', False):
753 if remotephases.get('publishing', False):
754 _localphasemove(pushop, cheads)
754 _localphasemove(pushop, cheads)
755 else: # publish = False
755 else: # publish = False
756 _localphasemove(pushop, pheads)
756 _localphasemove(pushop, pheads)
757 _localphasemove(pushop, cheads, phases.draft)
757 _localphasemove(pushop, cheads, phases.draft)
758 ### Apply local phase on remote
758 ### Apply local phase on remote
759
759
760 if pushop.cgresult:
760 if pushop.cgresult:
761 if 'phases' in pushop.stepsdone:
761 if 'phases' in pushop.stepsdone:
762 # phases already pushed though bundle2
762 # phases already pushed though bundle2
763 return
763 return
764 outdated = pushop.outdatedphases
764 outdated = pushop.outdatedphases
765 else:
765 else:
766 outdated = pushop.fallbackoutdatedphases
766 outdated = pushop.fallbackoutdatedphases
767
767
768 pushop.stepsdone.add('phases')
768 pushop.stepsdone.add('phases')
769
769
770 # filter heads already turned public by the push
770 # filter heads already turned public by the push
771 outdated = [c for c in outdated if c.node() not in pheads]
771 outdated = [c for c in outdated if c.node() not in pheads]
772 # fallback to independent pushkey command
772 # fallback to independent pushkey command
773 for newremotehead in outdated:
773 for newremotehead in outdated:
774 r = pushop.remote.pushkey('phases',
774 r = pushop.remote.pushkey('phases',
775 newremotehead.hex(),
775 newremotehead.hex(),
776 str(phases.draft),
776 str(phases.draft),
777 str(phases.public))
777 str(phases.public))
778 if not r:
778 if not r:
779 pushop.ui.warn(_('updating %s to public failed!\n')
779 pushop.ui.warn(_('updating %s to public failed!\n')
780 % newremotehead)
780 % newremotehead)
781
781
782 def _localphasemove(pushop, nodes, phase=phases.public):
782 def _localphasemove(pushop, nodes, phase=phases.public):
783 """move <nodes> to <phase> in the local source repo"""
783 """move <nodes> to <phase> in the local source repo"""
784 if pushop.trmanager:
784 if pushop.trmanager:
785 phases.advanceboundary(pushop.repo,
785 phases.advanceboundary(pushop.repo,
786 pushop.trmanager.transaction(),
786 pushop.trmanager.transaction(),
787 phase,
787 phase,
788 nodes)
788 nodes)
789 else:
789 else:
790 # repo is not locked, do not change any phases!
790 # repo is not locked, do not change any phases!
791 # Informs the user that phases should have been moved when
791 # Informs the user that phases should have been moved when
792 # applicable.
792 # applicable.
793 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
793 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
794 phasestr = phases.phasenames[phase]
794 phasestr = phases.phasenames[phase]
795 if actualmoves:
795 if actualmoves:
796 pushop.ui.status(_('cannot lock source repo, skipping '
796 pushop.ui.status(_('cannot lock source repo, skipping '
797 'local %s phase update\n') % phasestr)
797 'local %s phase update\n') % phasestr)
798
798
799 def _pushobsolete(pushop):
799 def _pushobsolete(pushop):
800 """utility function to push obsolete markers to a remote"""
800 """utility function to push obsolete markers to a remote"""
801 if 'obsmarkers' in pushop.stepsdone:
801 if 'obsmarkers' in pushop.stepsdone:
802 return
802 return
803 repo = pushop.repo
803 repo = pushop.repo
804 remote = pushop.remote
804 remote = pushop.remote
805 pushop.stepsdone.add('obsmarkers')
805 pushop.stepsdone.add('obsmarkers')
806 if pushop.outobsmarkers:
806 if pushop.outobsmarkers:
807 pushop.ui.debug('try to push obsolete markers to remote\n')
807 pushop.ui.debug('try to push obsolete markers to remote\n')
808 rslts = []
808 rslts = []
809 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
809 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
810 for key in sorted(remotedata, reverse=True):
810 for key in sorted(remotedata, reverse=True):
811 # reverse sort to ensure we end with dump0
811 # reverse sort to ensure we end with dump0
812 data = remotedata[key]
812 data = remotedata[key]
813 rslts.append(remote.pushkey('obsolete', key, '', data))
813 rslts.append(remote.pushkey('obsolete', key, '', data))
814 if [r for r in rslts if not r]:
814 if [r for r in rslts if not r]:
815 msg = _('failed to push some obsolete markers!\n')
815 msg = _('failed to push some obsolete markers!\n')
816 repo.ui.warn(msg)
816 repo.ui.warn(msg)
817
817
818 def _pushbookmark(pushop):
818 def _pushbookmark(pushop):
819 """Update bookmark position on remote"""
819 """Update bookmark position on remote"""
820 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
820 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
821 return
821 return
822 pushop.stepsdone.add('bookmarks')
822 pushop.stepsdone.add('bookmarks')
823 ui = pushop.ui
823 ui = pushop.ui
824 remote = pushop.remote
824 remote = pushop.remote
825
825
826 for b, old, new in pushop.outbookmarks:
826 for b, old, new in pushop.outbookmarks:
827 action = 'update'
827 action = 'update'
828 if not old:
828 if not old:
829 action = 'export'
829 action = 'export'
830 elif not new:
830 elif not new:
831 action = 'delete'
831 action = 'delete'
832 if remote.pushkey('bookmarks', b, old, new):
832 if remote.pushkey('bookmarks', b, old, new):
833 ui.status(bookmsgmap[action][0] % b)
833 ui.status(bookmsgmap[action][0] % b)
834 else:
834 else:
835 ui.warn(bookmsgmap[action][1] % b)
835 ui.warn(bookmsgmap[action][1] % b)
836 # discovery can have set the value form invalid entry
836 # discovery can have set the value form invalid entry
837 if pushop.bkresult is not None:
837 if pushop.bkresult is not None:
838 pushop.bkresult = 1
838 pushop.bkresult = 1
839
839
840 class pulloperation(object):
840 class pulloperation(object):
841 """A object that represent a single pull operation
841 """A object that represent a single pull operation
842
842
843 It purpose is to carry pull related state and very common operation.
843 It purpose is to carry pull related state and very common operation.
844
844
845 A new should be created at the beginning of each pull and discarded
845 A new should be created at the beginning of each pull and discarded
846 afterward.
846 afterward.
847 """
847 """
848
848
849 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
849 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
850 remotebookmarks=None, streamclonerequested=None):
850 remotebookmarks=None, streamclonerequested=None):
851 # repo we pull into
851 # repo we pull into
852 self.repo = repo
852 self.repo = repo
853 # repo we pull from
853 # repo we pull from
854 self.remote = remote
854 self.remote = remote
855 # revision we try to pull (None is "all")
855 # revision we try to pull (None is "all")
856 self.heads = heads
856 self.heads = heads
857 # bookmark pulled explicitly
857 # bookmark pulled explicitly
858 self.explicitbookmarks = bookmarks
858 self.explicitbookmarks = bookmarks
859 # do we force pull?
859 # do we force pull?
860 self.force = force
860 self.force = force
861 # whether a streaming clone was requested
861 # whether a streaming clone was requested
862 self.streamclonerequested = streamclonerequested
862 self.streamclonerequested = streamclonerequested
863 # transaction manager
863 # transaction manager
864 self.trmanager = None
864 self.trmanager = None
865 # set of common changeset between local and remote before pull
865 # set of common changeset between local and remote before pull
866 self.common = None
866 self.common = None
867 # set of pulled head
867 # set of pulled head
868 self.rheads = None
868 self.rheads = None
869 # list of missing changeset to fetch remotely
869 # list of missing changeset to fetch remotely
870 self.fetch = None
870 self.fetch = None
871 # remote bookmarks data
871 # remote bookmarks data
872 self.remotebookmarks = remotebookmarks
872 self.remotebookmarks = remotebookmarks
873 # result of changegroup pulling (used as return code by pull)
873 # result of changegroup pulling (used as return code by pull)
874 self.cgresult = None
874 self.cgresult = None
875 # list of step already done
875 # list of step already done
876 self.stepsdone = set()
876 self.stepsdone = set()
877
877
878 @util.propertycache
878 @util.propertycache
879 def pulledsubset(self):
879 def pulledsubset(self):
880 """heads of the set of changeset target by the pull"""
880 """heads of the set of changeset target by the pull"""
881 # compute target subset
881 # compute target subset
882 if self.heads is None:
882 if self.heads is None:
883 # We pulled every thing possible
883 # We pulled every thing possible
884 # sync on everything common
884 # sync on everything common
885 c = set(self.common)
885 c = set(self.common)
886 ret = list(self.common)
886 ret = list(self.common)
887 for n in self.rheads:
887 for n in self.rheads:
888 if n not in c:
888 if n not in c:
889 ret.append(n)
889 ret.append(n)
890 return ret
890 return ret
891 else:
891 else:
892 # We pulled a specific subset
892 # We pulled a specific subset
893 # sync on this subset
893 # sync on this subset
894 return self.heads
894 return self.heads
895
895
896 @util.propertycache
896 @util.propertycache
897 def canusebundle2(self):
897 def canusebundle2(self):
898 return _canusebundle2(self)
898 return _canusebundle2(self)
899
899
900 @util.propertycache
900 @util.propertycache
901 def remotebundle2caps(self):
901 def remotebundle2caps(self):
902 return bundle2.bundle2caps(self.remote)
902 return bundle2.bundle2caps(self.remote)
903
903
904 def gettransaction(self):
904 def gettransaction(self):
905 # deprecated; talk to trmanager directly
905 # deprecated; talk to trmanager directly
906 return self.trmanager.transaction()
906 return self.trmanager.transaction()
907
907
908 class transactionmanager(object):
908 class transactionmanager(object):
909 """An object to manage the life cycle of a transaction
909 """An object to manage the life cycle of a transaction
910
910
911 It creates the transaction on demand and calls the appropriate hooks when
911 It creates the transaction on demand and calls the appropriate hooks when
912 closing the transaction."""
912 closing the transaction."""
913 def __init__(self, repo, source, url):
913 def __init__(self, repo, source, url):
914 self.repo = repo
914 self.repo = repo
915 self.source = source
915 self.source = source
916 self.url = url
916 self.url = url
917 self._tr = None
917 self._tr = None
918
918
919 def transaction(self):
919 def transaction(self):
920 """Return an open transaction object, constructing if necessary"""
920 """Return an open transaction object, constructing if necessary"""
921 if not self._tr:
921 if not self._tr:
922 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
922 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
923 self._tr = self.repo.transaction(trname)
923 self._tr = self.repo.transaction(trname)
924 self._tr.hookargs['source'] = self.source
924 self._tr.hookargs['source'] = self.source
925 self._tr.hookargs['url'] = self.url
925 self._tr.hookargs['url'] = self.url
926 return self._tr
926 return self._tr
927
927
928 def close(self):
928 def close(self):
929 """close transaction if created"""
929 """close transaction if created"""
930 if self._tr is not None:
930 if self._tr is not None:
931 self._tr.close()
931 self._tr.close()
932
932
933 def release(self):
933 def release(self):
934 """release transaction if created"""
934 """release transaction if created"""
935 if self._tr is not None:
935 if self._tr is not None:
936 self._tr.release()
936 self._tr.release()
937
937
938 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
938 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
939 streamclonerequested=None):
939 streamclonerequested=None):
940 """Fetch repository data from a remote.
940 """Fetch repository data from a remote.
941
941
942 This is the main function used to retrieve data from a remote repository.
942 This is the main function used to retrieve data from a remote repository.
943
943
944 ``repo`` is the local repository to clone into.
944 ``repo`` is the local repository to clone into.
945 ``remote`` is a peer instance.
945 ``remote`` is a peer instance.
946 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
946 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
947 default) means to pull everything from the remote.
947 default) means to pull everything from the remote.
948 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
948 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
949 default, all remote bookmarks are pulled.
949 default, all remote bookmarks are pulled.
950 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
950 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
951 initialization.
951 initialization.
952 ``streamclonerequested`` is a boolean indicating whether a "streaming
952 ``streamclonerequested`` is a boolean indicating whether a "streaming
953 clone" is requested. A "streaming clone" is essentially a raw file copy
953 clone" is requested. A "streaming clone" is essentially a raw file copy
954 of revlogs from the server. This only works when the local repository is
954 of revlogs from the server. This only works when the local repository is
955 empty. The default value of ``None`` means to respect the server
955 empty. The default value of ``None`` means to respect the server
956 configuration for preferring stream clones.
956 configuration for preferring stream clones.
957
957
958 Returns the ``pulloperation`` created for this pull.
958 Returns the ``pulloperation`` created for this pull.
959 """
959 """
960 if opargs is None:
960 if opargs is None:
961 opargs = {}
961 opargs = {}
962 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
962 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
963 streamclonerequested=streamclonerequested, **opargs)
963 streamclonerequested=streamclonerequested, **opargs)
964 if pullop.remote.local():
964 if pullop.remote.local():
965 missing = set(pullop.remote.requirements) - pullop.repo.supported
965 missing = set(pullop.remote.requirements) - pullop.repo.supported
966 if missing:
966 if missing:
967 msg = _("required features are not"
967 msg = _("required features are not"
968 " supported in the destination:"
968 " supported in the destination:"
969 " %s") % (', '.join(sorted(missing)))
969 " %s") % (', '.join(sorted(missing)))
970 raise util.Abort(msg)
970 raise util.Abort(msg)
971
971
972 lock = pullop.repo.lock()
972 lock = pullop.repo.lock()
973 try:
973 try:
974 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
974 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
975 streamclone.maybeperformlegacystreamclone(pullop)
975 streamclone.maybeperformlegacystreamclone(pullop)
976 _pulldiscovery(pullop)
976 _pulldiscovery(pullop)
977 if pullop.canusebundle2:
977 if pullop.canusebundle2:
978 _pullbundle2(pullop)
978 _pullbundle2(pullop)
979 _pullchangeset(pullop)
979 _pullchangeset(pullop)
980 _pullphase(pullop)
980 _pullphase(pullop)
981 _pullbookmarks(pullop)
981 _pullbookmarks(pullop)
982 _pullobsolete(pullop)
982 _pullobsolete(pullop)
983 pullop.trmanager.close()
983 pullop.trmanager.close()
984 finally:
984 finally:
985 pullop.trmanager.release()
985 pullop.trmanager.release()
986 lock.release()
986 lock.release()
987
987
988 return pullop
988 return pullop
989
989
990 # list of steps to perform discovery before pull
990 # list of steps to perform discovery before pull
991 pulldiscoveryorder = []
991 pulldiscoveryorder = []
992
992
993 # Mapping between step name and function
993 # Mapping between step name and function
994 #
994 #
995 # This exists to help extensions wrap steps if necessary
995 # This exists to help extensions wrap steps if necessary
996 pulldiscoverymapping = {}
996 pulldiscoverymapping = {}
997
997
998 def pulldiscovery(stepname):
998 def pulldiscovery(stepname):
999 """decorator for function performing discovery before pull
999 """decorator for function performing discovery before pull
1000
1000
1001 The function is added to the step -> function mapping and appended to the
1001 The function is added to the step -> function mapping and appended to the
1002 list of steps. Beware that decorated function will be added in order (this
1002 list of steps. Beware that decorated function will be added in order (this
1003 may matter).
1003 may matter).
1004
1004
1005 You can only use this decorator for a new step, if you want to wrap a step
1005 You can only use this decorator for a new step, if you want to wrap a step
1006 from an extension, change the pulldiscovery dictionary directly."""
1006 from an extension, change the pulldiscovery dictionary directly."""
1007 def dec(func):
1007 def dec(func):
1008 assert stepname not in pulldiscoverymapping
1008 assert stepname not in pulldiscoverymapping
1009 pulldiscoverymapping[stepname] = func
1009 pulldiscoverymapping[stepname] = func
1010 pulldiscoveryorder.append(stepname)
1010 pulldiscoveryorder.append(stepname)
1011 return func
1011 return func
1012 return dec
1012 return dec
1013
1013
1014 def _pulldiscovery(pullop):
1014 def _pulldiscovery(pullop):
1015 """Run all discovery steps"""
1015 """Run all discovery steps"""
1016 for stepname in pulldiscoveryorder:
1016 for stepname in pulldiscoveryorder:
1017 step = pulldiscoverymapping[stepname]
1017 step = pulldiscoverymapping[stepname]
1018 step(pullop)
1018 step(pullop)
1019
1019
1020 @pulldiscovery('b1:bookmarks')
1020 @pulldiscovery('b1:bookmarks')
1021 def _pullbookmarkbundle1(pullop):
1021 def _pullbookmarkbundle1(pullop):
1022 """fetch bookmark data in bundle1 case
1022 """fetch bookmark data in bundle1 case
1023
1023
1024 If not using bundle2, we have to fetch bookmarks before changeset
1024 If not using bundle2, we have to fetch bookmarks before changeset
1025 discovery to reduce the chance and impact of race conditions."""
1025 discovery to reduce the chance and impact of race conditions."""
1026 if pullop.remotebookmarks is not None:
1026 if pullop.remotebookmarks is not None:
1027 return
1027 return
1028 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1028 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1029 # all known bundle2 servers now support listkeys, but lets be nice with
1029 # all known bundle2 servers now support listkeys, but lets be nice with
1030 # new implementation.
1030 # new implementation.
1031 return
1031 return
1032 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1032 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1033
1033
1034
1034
1035 @pulldiscovery('changegroup')
1035 @pulldiscovery('changegroup')
1036 def _pulldiscoverychangegroup(pullop):
1036 def _pulldiscoverychangegroup(pullop):
1037 """discovery phase for the pull
1037 """discovery phase for the pull
1038
1038
1039 Current handle changeset discovery only, will change handle all discovery
1039 Current handle changeset discovery only, will change handle all discovery
1040 at some point."""
1040 at some point."""
1041 tmp = discovery.findcommonincoming(pullop.repo,
1041 tmp = discovery.findcommonincoming(pullop.repo,
1042 pullop.remote,
1042 pullop.remote,
1043 heads=pullop.heads,
1043 heads=pullop.heads,
1044 force=pullop.force)
1044 force=pullop.force)
1045 common, fetch, rheads = tmp
1045 common, fetch, rheads = tmp
1046 nm = pullop.repo.unfiltered().changelog.nodemap
1046 nm = pullop.repo.unfiltered().changelog.nodemap
1047 if fetch and rheads:
1047 if fetch and rheads:
1048 # If a remote heads in filtered locally, lets drop it from the unknown
1048 # If a remote heads in filtered locally, lets drop it from the unknown
1049 # remote heads and put in back in common.
1049 # remote heads and put in back in common.
1050 #
1050 #
1051 # This is a hackish solution to catch most of "common but locally
1051 # This is a hackish solution to catch most of "common but locally
1052 # hidden situation". We do not performs discovery on unfiltered
1052 # hidden situation". We do not performs discovery on unfiltered
1053 # repository because it end up doing a pathological amount of round
1053 # repository because it end up doing a pathological amount of round
1054 # trip for w huge amount of changeset we do not care about.
1054 # trip for w huge amount of changeset we do not care about.
1055 #
1055 #
1056 # If a set of such "common but filtered" changeset exist on the server
1056 # If a set of such "common but filtered" changeset exist on the server
1057 # but are not including a remote heads, we'll not be able to detect it,
1057 # but are not including a remote heads, we'll not be able to detect it,
1058 scommon = set(common)
1058 scommon = set(common)
1059 filteredrheads = []
1059 filteredrheads = []
1060 for n in rheads:
1060 for n in rheads:
1061 if n in nm:
1061 if n in nm:
1062 if n not in scommon:
1062 if n not in scommon:
1063 common.append(n)
1063 common.append(n)
1064 else:
1064 else:
1065 filteredrheads.append(n)
1065 filteredrheads.append(n)
1066 if not filteredrheads:
1066 if not filteredrheads:
1067 fetch = []
1067 fetch = []
1068 rheads = filteredrheads
1068 rheads = filteredrheads
1069 pullop.common = common
1069 pullop.common = common
1070 pullop.fetch = fetch
1070 pullop.fetch = fetch
1071 pullop.rheads = rheads
1071 pullop.rheads = rheads
1072
1072
1073 def _pullbundle2(pullop):
1073 def _pullbundle2(pullop):
1074 """pull data using bundle2
1074 """pull data using bundle2
1075
1075
1076 For now, the only supported data are changegroup."""
1076 For now, the only supported data are changegroup."""
1077 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1077 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1078
1078
1079 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1079 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1080
1080
1081 # pulling changegroup
1081 # pulling changegroup
1082 pullop.stepsdone.add('changegroup')
1082 pullop.stepsdone.add('changegroup')
1083
1083
1084 kwargs['common'] = pullop.common
1084 kwargs['common'] = pullop.common
1085 kwargs['heads'] = pullop.heads or pullop.rheads
1085 kwargs['heads'] = pullop.heads or pullop.rheads
1086 kwargs['cg'] = pullop.fetch
1086 kwargs['cg'] = pullop.fetch
1087 if 'listkeys' in pullop.remotebundle2caps:
1087 if 'listkeys' in pullop.remotebundle2caps:
1088 kwargs['listkeys'] = ['phase']
1088 kwargs['listkeys'] = ['phase']
1089 if pullop.remotebookmarks is None:
1089 if pullop.remotebookmarks is None:
1090 # make sure to always includes bookmark data when migrating
1090 # make sure to always includes bookmark data when migrating
1091 # `hg incoming --bundle` to using this function.
1091 # `hg incoming --bundle` to using this function.
1092 kwargs['listkeys'].append('bookmarks')
1092 kwargs['listkeys'].append('bookmarks')
1093 if streaming:
1093 if streaming:
1094 pullop.repo.ui.status(_('streaming all changes\n'))
1094 pullop.repo.ui.status(_('streaming all changes\n'))
1095 elif not pullop.fetch:
1095 elif not pullop.fetch:
1096 pullop.repo.ui.status(_("no changes found\n"))
1096 pullop.repo.ui.status(_("no changes found\n"))
1097 pullop.cgresult = 0
1097 pullop.cgresult = 0
1098 else:
1098 else:
1099 if pullop.heads is None and list(pullop.common) == [nullid]:
1099 if pullop.heads is None and list(pullop.common) == [nullid]:
1100 pullop.repo.ui.status(_("requesting all changes\n"))
1100 pullop.repo.ui.status(_("requesting all changes\n"))
1101 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1101 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1102 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1102 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1103 if obsolete.commonversion(remoteversions) is not None:
1103 if obsolete.commonversion(remoteversions) is not None:
1104 kwargs['obsmarkers'] = True
1104 kwargs['obsmarkers'] = True
1105 pullop.stepsdone.add('obsmarkers')
1105 pullop.stepsdone.add('obsmarkers')
1106 _pullbundle2extraprepare(pullop, kwargs)
1106 _pullbundle2extraprepare(pullop, kwargs)
1107 bundle = pullop.remote.getbundle('pull', **kwargs)
1107 bundle = pullop.remote.getbundle('pull', **kwargs)
1108 try:
1108 try:
1109 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1109 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1110 except error.BundleValueError as exc:
1110 except error.BundleValueError as exc:
1111 raise util.Abort('missing support for %s' % exc)
1111 raise util.Abort('missing support for %s' % exc)
1112
1112
1113 if pullop.fetch:
1113 if pullop.fetch:
1114 results = [cg['return'] for cg in op.records['changegroup']]
1114 results = [cg['return'] for cg in op.records['changegroup']]
1115 pullop.cgresult = changegroup.combineresults(results)
1115 pullop.cgresult = changegroup.combineresults(results)
1116
1116
1117 # processing phases change
1117 # processing phases change
1118 for namespace, value in op.records['listkeys']:
1118 for namespace, value in op.records['listkeys']:
1119 if namespace == 'phases':
1119 if namespace == 'phases':
1120 _pullapplyphases(pullop, value)
1120 _pullapplyphases(pullop, value)
1121
1121
1122 # processing bookmark update
1122 # processing bookmark update
1123 for namespace, value in op.records['listkeys']:
1123 for namespace, value in op.records['listkeys']:
1124 if namespace == 'bookmarks':
1124 if namespace == 'bookmarks':
1125 pullop.remotebookmarks = value
1125 pullop.remotebookmarks = value
1126
1126
1127 # bookmark data were either already there or pulled in the bundle
1127 # bookmark data were either already there or pulled in the bundle
1128 if pullop.remotebookmarks is not None:
1128 if pullop.remotebookmarks is not None:
1129 _pullbookmarks(pullop)
1129 _pullbookmarks(pullop)
1130
1130
1131 def _pullbundle2extraprepare(pullop, kwargs):
1131 def _pullbundle2extraprepare(pullop, kwargs):
1132 """hook function so that extensions can extend the getbundle call"""
1132 """hook function so that extensions can extend the getbundle call"""
1133 pass
1133 pass
1134
1134
1135 def _pullchangeset(pullop):
1135 def _pullchangeset(pullop):
1136 """pull changeset from unbundle into the local repo"""
1136 """pull changeset from unbundle into the local repo"""
1137 # We delay the open of the transaction as late as possible so we
1137 # We delay the open of the transaction as late as possible so we
1138 # don't open transaction for nothing or you break future useful
1138 # don't open transaction for nothing or you break future useful
1139 # rollback call
1139 # rollback call
1140 if 'changegroup' in pullop.stepsdone:
1140 if 'changegroup' in pullop.stepsdone:
1141 return
1141 return
1142 pullop.stepsdone.add('changegroup')
1142 pullop.stepsdone.add('changegroup')
1143 if not pullop.fetch:
1143 if not pullop.fetch:
1144 pullop.repo.ui.status(_("no changes found\n"))
1144 pullop.repo.ui.status(_("no changes found\n"))
1145 pullop.cgresult = 0
1145 pullop.cgresult = 0
1146 return
1146 return
1147 pullop.gettransaction()
1147 pullop.gettransaction()
1148 if pullop.heads is None and list(pullop.common) == [nullid]:
1148 if pullop.heads is None and list(pullop.common) == [nullid]:
1149 pullop.repo.ui.status(_("requesting all changes\n"))
1149 pullop.repo.ui.status(_("requesting all changes\n"))
1150 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1150 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1151 # issue1320, avoid a race if remote changed after discovery
1151 # issue1320, avoid a race if remote changed after discovery
1152 pullop.heads = pullop.rheads
1152 pullop.heads = pullop.rheads
1153
1153
1154 if pullop.remote.capable('getbundle'):
1154 if pullop.remote.capable('getbundle'):
1155 # TODO: get bundlecaps from remote
1155 # TODO: get bundlecaps from remote
1156 cg = pullop.remote.getbundle('pull', common=pullop.common,
1156 cg = pullop.remote.getbundle('pull', common=pullop.common,
1157 heads=pullop.heads or pullop.rheads)
1157 heads=pullop.heads or pullop.rheads)
1158 elif pullop.heads is None:
1158 elif pullop.heads is None:
1159 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1159 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1160 elif not pullop.remote.capable('changegroupsubset'):
1160 elif not pullop.remote.capable('changegroupsubset'):
1161 raise util.Abort(_("partial pull cannot be done because "
1161 raise util.Abort(_("partial pull cannot be done because "
1162 "other repository doesn't support "
1162 "other repository doesn't support "
1163 "changegroupsubset."))
1163 "changegroupsubset."))
1164 else:
1164 else:
1165 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1165 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1166 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1166 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1167 pullop.remote.url())
1167 pullop.remote.url())
1168
1168
1169 def _pullphase(pullop):
1169 def _pullphase(pullop):
1170 # Get remote phases data from remote
1170 # Get remote phases data from remote
1171 if 'phases' in pullop.stepsdone:
1171 if 'phases' in pullop.stepsdone:
1172 return
1172 return
1173 remotephases = pullop.remote.listkeys('phases')
1173 remotephases = pullop.remote.listkeys('phases')
1174 _pullapplyphases(pullop, remotephases)
1174 _pullapplyphases(pullop, remotephases)
1175
1175
1176 def _pullapplyphases(pullop, remotephases):
1176 def _pullapplyphases(pullop, remotephases):
1177 """apply phase movement from observed remote state"""
1177 """apply phase movement from observed remote state"""
1178 if 'phases' in pullop.stepsdone:
1178 if 'phases' in pullop.stepsdone:
1179 return
1179 return
1180 pullop.stepsdone.add('phases')
1180 pullop.stepsdone.add('phases')
1181 publishing = bool(remotephases.get('publishing', False))
1181 publishing = bool(remotephases.get('publishing', False))
1182 if remotephases and not publishing:
1182 if remotephases and not publishing:
1183 # remote is new and unpublishing
1183 # remote is new and unpublishing
1184 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1184 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1185 pullop.pulledsubset,
1185 pullop.pulledsubset,
1186 remotephases)
1186 remotephases)
1187 dheads = pullop.pulledsubset
1187 dheads = pullop.pulledsubset
1188 else:
1188 else:
1189 # Remote is old or publishing all common changesets
1189 # Remote is old or publishing all common changesets
1190 # should be seen as public
1190 # should be seen as public
1191 pheads = pullop.pulledsubset
1191 pheads = pullop.pulledsubset
1192 dheads = []
1192 dheads = []
1193 unfi = pullop.repo.unfiltered()
1193 unfi = pullop.repo.unfiltered()
1194 phase = unfi._phasecache.phase
1194 phase = unfi._phasecache.phase
1195 rev = unfi.changelog.nodemap.get
1195 rev = unfi.changelog.nodemap.get
1196 public = phases.public
1196 public = phases.public
1197 draft = phases.draft
1197 draft = phases.draft
1198
1198
1199 # exclude changesets already public locally and update the others
1199 # exclude changesets already public locally and update the others
1200 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1200 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1201 if pheads:
1201 if pheads:
1202 tr = pullop.gettransaction()
1202 tr = pullop.gettransaction()
1203 phases.advanceboundary(pullop.repo, tr, public, pheads)
1203 phases.advanceboundary(pullop.repo, tr, public, pheads)
1204
1204
1205 # exclude changesets already draft locally and update the others
1205 # exclude changesets already draft locally and update the others
1206 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1206 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1207 if dheads:
1207 if dheads:
1208 tr = pullop.gettransaction()
1208 tr = pullop.gettransaction()
1209 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1209 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1210
1210
1211 def _pullbookmarks(pullop):
1211 def _pullbookmarks(pullop):
1212 """process the remote bookmark information to update the local one"""
1212 """process the remote bookmark information to update the local one"""
1213 if 'bookmarks' in pullop.stepsdone:
1213 if 'bookmarks' in pullop.stepsdone:
1214 return
1214 return
1215 pullop.stepsdone.add('bookmarks')
1215 pullop.stepsdone.add('bookmarks')
1216 repo = pullop.repo
1216 repo = pullop.repo
1217 remotebookmarks = pullop.remotebookmarks
1217 remotebookmarks = pullop.remotebookmarks
1218 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1218 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1219 pullop.remote.url(),
1219 pullop.remote.url(),
1220 pullop.gettransaction,
1220 pullop.gettransaction,
1221 explicit=pullop.explicitbookmarks)
1221 explicit=pullop.explicitbookmarks)
1222
1222
1223 def _pullobsolete(pullop):
1223 def _pullobsolete(pullop):
1224 """utility function to pull obsolete markers from a remote
1224 """utility function to pull obsolete markers from a remote
1225
1225
1226 The `gettransaction` is function that return the pull transaction, creating
1226 The `gettransaction` is function that return the pull transaction, creating
1227 one if necessary. We return the transaction to inform the calling code that
1227 one if necessary. We return the transaction to inform the calling code that
1228 a new transaction have been created (when applicable).
1228 a new transaction have been created (when applicable).
1229
1229
1230 Exists mostly to allow overriding for experimentation purpose"""
1230 Exists mostly to allow overriding for experimentation purpose"""
1231 if 'obsmarkers' in pullop.stepsdone:
1231 if 'obsmarkers' in pullop.stepsdone:
1232 return
1232 return
1233 pullop.stepsdone.add('obsmarkers')
1233 pullop.stepsdone.add('obsmarkers')
1234 tr = None
1234 tr = None
1235 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1235 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1236 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1236 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1237 remoteobs = pullop.remote.listkeys('obsolete')
1237 remoteobs = pullop.remote.listkeys('obsolete')
1238 if 'dump0' in remoteobs:
1238 if 'dump0' in remoteobs:
1239 tr = pullop.gettransaction()
1239 tr = pullop.gettransaction()
1240 for key in sorted(remoteobs, reverse=True):
1240 for key in sorted(remoteobs, reverse=True):
1241 if key.startswith('dump'):
1241 if key.startswith('dump'):
1242 data = base85.b85decode(remoteobs[key])
1242 data = base85.b85decode(remoteobs[key])
1243 pullop.repo.obsstore.mergemarkers(tr, data)
1243 pullop.repo.obsstore.mergemarkers(tr, data)
1244 pullop.repo.invalidatevolatilesets()
1244 pullop.repo.invalidatevolatilesets()
1245 return tr
1245 return tr
1246
1246
1247 def caps20to10(repo):
1247 def caps20to10(repo):
1248 """return a set with appropriate options to use bundle20 during getbundle"""
1248 """return a set with appropriate options to use bundle20 during getbundle"""
1249 caps = set(['HG20'])
1249 caps = set(['HG20'])
1250 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1250 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1251 caps.add('bundle2=' + urllib.quote(capsblob))
1251 caps.add('bundle2=' + urllib.quote(capsblob))
1252 return caps
1252 return caps
1253
1253
1254 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1254 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1255 getbundle2partsorder = []
1255 getbundle2partsorder = []
1256
1256
1257 # Mapping between step name and function
1257 # Mapping between step name and function
1258 #
1258 #
1259 # This exists to help extensions wrap steps if necessary
1259 # This exists to help extensions wrap steps if necessary
1260 getbundle2partsmapping = {}
1260 getbundle2partsmapping = {}
1261
1261
1262 def getbundle2partsgenerator(stepname, idx=None):
1262 def getbundle2partsgenerator(stepname, idx=None):
1263 """decorator for function generating bundle2 part for getbundle
1263 """decorator for function generating bundle2 part for getbundle
1264
1264
1265 The function is added to the step -> function mapping and appended to the
1265 The function is added to the step -> function mapping and appended to the
1266 list of steps. Beware that decorated functions will be added in order
1266 list of steps. Beware that decorated functions will be added in order
1267 (this may matter).
1267 (this may matter).
1268
1268
1269 You can only use this decorator for new steps, if you want to wrap a step
1269 You can only use this decorator for new steps, if you want to wrap a step
1270 from an extension, attack the getbundle2partsmapping dictionary directly."""
1270 from an extension, attack the getbundle2partsmapping dictionary directly."""
1271 def dec(func):
1271 def dec(func):
1272 assert stepname not in getbundle2partsmapping
1272 assert stepname not in getbundle2partsmapping
1273 getbundle2partsmapping[stepname] = func
1273 getbundle2partsmapping[stepname] = func
1274 if idx is None:
1274 if idx is None:
1275 getbundle2partsorder.append(stepname)
1275 getbundle2partsorder.append(stepname)
1276 else:
1276 else:
1277 getbundle2partsorder.insert(idx, stepname)
1277 getbundle2partsorder.insert(idx, stepname)
1278 return func
1278 return func
1279 return dec
1279 return dec
1280
1280
1281 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1281 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1282 **kwargs):
1282 **kwargs):
1283 """return a full bundle (with potentially multiple kind of parts)
1283 """return a full bundle (with potentially multiple kind of parts)
1284
1284
1285 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1285 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1286 passed. For now, the bundle can contain only changegroup, but this will
1286 passed. For now, the bundle can contain only changegroup, but this will
1287 changes when more part type will be available for bundle2.
1287 changes when more part type will be available for bundle2.
1288
1288
1289 This is different from changegroup.getchangegroup that only returns an HG10
1289 This is different from changegroup.getchangegroup that only returns an HG10
1290 changegroup bundle. They may eventually get reunited in the future when we
1290 changegroup bundle. They may eventually get reunited in the future when we
1291 have a clearer idea of the API we what to query different data.
1291 have a clearer idea of the API we what to query different data.
1292
1292
1293 The implementation is at a very early stage and will get massive rework
1293 The implementation is at a very early stage and will get massive rework
1294 when the API of bundle is refined.
1294 when the API of bundle is refined.
1295 """
1295 """
1296 # bundle10 case
1296 # bundle10 case
1297 usebundle2 = False
1297 usebundle2 = False
1298 if bundlecaps is not None:
1298 if bundlecaps is not None:
1299 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1299 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1300 if not usebundle2:
1300 if not usebundle2:
1301 if bundlecaps and not kwargs.get('cg', True):
1301 if bundlecaps and not kwargs.get('cg', True):
1302 raise ValueError(_('request for bundle10 must include changegroup'))
1302 raise ValueError(_('request for bundle10 must include changegroup'))
1303
1303
1304 if kwargs:
1304 if kwargs:
1305 raise ValueError(_('unsupported getbundle arguments: %s')
1305 raise ValueError(_('unsupported getbundle arguments: %s')
1306 % ', '.join(sorted(kwargs.keys())))
1306 % ', '.join(sorted(kwargs.keys())))
1307 return changegroup.getchangegroup(repo, source, heads=heads,
1307 return changegroup.getchangegroup(repo, source, heads=heads,
1308 common=common, bundlecaps=bundlecaps)
1308 common=common, bundlecaps=bundlecaps)
1309
1309
1310 # bundle20 case
1310 # bundle20 case
1311 b2caps = {}
1311 b2caps = {}
1312 for bcaps in bundlecaps:
1312 for bcaps in bundlecaps:
1313 if bcaps.startswith('bundle2='):
1313 if bcaps.startswith('bundle2='):
1314 blob = urllib.unquote(bcaps[len('bundle2='):])
1314 blob = urllib.unquote(bcaps[len('bundle2='):])
1315 b2caps.update(bundle2.decodecaps(blob))
1315 b2caps.update(bundle2.decodecaps(blob))
1316 bundler = bundle2.bundle20(repo.ui, b2caps)
1316 bundler = bundle2.bundle20(repo.ui, b2caps)
1317
1317
1318 kwargs['heads'] = heads
1318 kwargs['heads'] = heads
1319 kwargs['common'] = common
1319 kwargs['common'] = common
1320
1320
1321 for name in getbundle2partsorder:
1321 for name in getbundle2partsorder:
1322 func = getbundle2partsmapping[name]
1322 func = getbundle2partsmapping[name]
1323 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1323 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1324 **kwargs)
1324 **kwargs)
1325
1325
1326 return util.chunkbuffer(bundler.getchunks())
1326 return util.chunkbuffer(bundler.getchunks())
1327
1327
1328 @getbundle2partsgenerator('changegroup')
1328 @getbundle2partsgenerator('changegroup')
1329 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1329 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1330 b2caps=None, heads=None, common=None, **kwargs):
1330 b2caps=None, heads=None, common=None, **kwargs):
1331 """add a changegroup part to the requested bundle"""
1331 """add a changegroup part to the requested bundle"""
1332 cg = None
1332 cg = None
1333 if kwargs.get('cg', True):
1333 if kwargs.get('cg', True):
1334 # build changegroup bundle here.
1334 # build changegroup bundle here.
1335 version = None
1335 version = None
1336 cgversions = b2caps.get('changegroup')
1336 cgversions = b2caps.get('changegroup')
1337 getcgkwargs = {}
1337 getcgkwargs = {}
1338 if cgversions: # 3.1 and 3.2 ship with an empty value
1338 if cgversions: # 3.1 and 3.2 ship with an empty value
1339 cgversions = [v for v in cgversions if v in changegroup.packermap]
1339 cgversions = [v for v in cgversions if v in changegroup.packermap]
1340 if not cgversions:
1340 if not cgversions:
1341 raise ValueError(_('no common changegroup version'))
1341 raise ValueError(_('no common changegroup version'))
1342 version = getcgkwargs['version'] = max(cgversions)
1342 version = getcgkwargs['version'] = max(cgversions)
1343 outgoing = changegroup.computeoutgoing(repo, heads, common)
1343 outgoing = changegroup.computeoutgoing(repo, heads, common)
1344 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1344 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1345 bundlecaps=bundlecaps,
1345 bundlecaps=bundlecaps,
1346 **getcgkwargs)
1346 **getcgkwargs)
1347
1347
1348 if cg:
1348 if cg:
1349 part = bundler.newpart('changegroup', data=cg)
1349 part = bundler.newpart('changegroup', data=cg)
1350 if version is not None:
1350 if version is not None:
1351 part.addparam('version', version)
1351 part.addparam('version', version)
1352 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1352 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1353
1353
1354 @getbundle2partsgenerator('listkeys')
1354 @getbundle2partsgenerator('listkeys')
1355 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1355 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1356 b2caps=None, **kwargs):
1356 b2caps=None, **kwargs):
1357 """add parts containing listkeys namespaces to the requested bundle"""
1357 """add parts containing listkeys namespaces to the requested bundle"""
1358 listkeys = kwargs.get('listkeys', ())
1358 listkeys = kwargs.get('listkeys', ())
1359 for namespace in listkeys:
1359 for namespace in listkeys:
1360 part = bundler.newpart('listkeys')
1360 part = bundler.newpart('listkeys')
1361 part.addparam('namespace', namespace)
1361 part.addparam('namespace', namespace)
1362 keys = repo.listkeys(namespace).items()
1362 keys = repo.listkeys(namespace).items()
1363 part.data = pushkey.encodekeys(keys)
1363 part.data = pushkey.encodekeys(keys)
1364
1364
1365 @getbundle2partsgenerator('obsmarkers')
1365 @getbundle2partsgenerator('obsmarkers')
1366 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1366 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1367 b2caps=None, heads=None, **kwargs):
1367 b2caps=None, heads=None, **kwargs):
1368 """add an obsolescence markers part to the requested bundle"""
1368 """add an obsolescence markers part to the requested bundle"""
1369 if kwargs.get('obsmarkers', False):
1369 if kwargs.get('obsmarkers', False):
1370 if heads is None:
1370 if heads is None:
1371 heads = repo.heads()
1371 heads = repo.heads()
1372 subset = [c.node() for c in repo.set('::%ln', heads)]
1372 subset = [c.node() for c in repo.set('::%ln', heads)]
1373 markers = repo.obsstore.relevantmarkers(subset)
1373 markers = repo.obsstore.relevantmarkers(subset)
1374 markers = sorted(markers)
1374 markers = sorted(markers)
1375 buildobsmarkerspart(bundler, markers)
1375 buildobsmarkerspart(bundler, markers)
1376
1376
1377 @getbundle2partsgenerator('hgtagsfnodes')
1377 @getbundle2partsgenerator('hgtagsfnodes')
1378 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1378 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1379 b2caps=None, heads=None, common=None,
1379 b2caps=None, heads=None, common=None,
1380 **kwargs):
1380 **kwargs):
1381 """Transfer the .hgtags filenodes mapping.
1381 """Transfer the .hgtags filenodes mapping.
1382
1382
1383 Only values for heads in this bundle will be transferred.
1383 Only values for heads in this bundle will be transferred.
1384
1384
1385 The part data consists of pairs of 20 byte changeset node and .hgtags
1385 The part data consists of pairs of 20 byte changeset node and .hgtags
1386 filenodes raw values.
1386 filenodes raw values.
1387 """
1387 """
1388 # Don't send unless:
1388 # Don't send unless:
1389 # - changeset are being exchanged,
1389 # - changeset are being exchanged,
1390 # - the client supports it.
1390 # - the client supports it.
1391 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1391 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1392 return
1392 return
1393
1393
1394 outgoing = changegroup.computeoutgoing(repo, heads, common)
1394 outgoing = changegroup.computeoutgoing(repo, heads, common)
1395
1395
1396 if not outgoing.missingheads:
1396 if not outgoing.missingheads:
1397 return
1397 return
1398
1398
1399 cache = tags.hgtagsfnodescache(repo.unfiltered())
1399 cache = tags.hgtagsfnodescache(repo.unfiltered())
1400 chunks = []
1400 chunks = []
1401
1401
1402 # .hgtags fnodes are only relevant for head changesets. While we could
1402 # .hgtags fnodes are only relevant for head changesets. While we could
1403 # transfer values for all known nodes, there will likely be little to
1403 # transfer values for all known nodes, there will likely be little to
1404 # no benefit.
1404 # no benefit.
1405 #
1405 #
1406 # We don't bother using a generator to produce output data because
1406 # We don't bother using a generator to produce output data because
1407 # a) we only have 40 bytes per head and even esoteric numbers of heads
1407 # a) we only have 40 bytes per head and even esoteric numbers of heads
1408 # consume little memory (1M heads is 40MB) b) we don't want to send the
1408 # consume little memory (1M heads is 40MB) b) we don't want to send the
1409 # part if we don't have entries and knowing if we have entries requires
1409 # part if we don't have entries and knowing if we have entries requires
1410 # cache lookups.
1410 # cache lookups.
1411 for node in outgoing.missingheads:
1411 for node in outgoing.missingheads:
1412 # Don't compute missing, as this may slow down serving.
1412 # Don't compute missing, as this may slow down serving.
1413 fnode = cache.getfnode(node, computemissing=False)
1413 fnode = cache.getfnode(node, computemissing=False)
1414 if fnode is not None:
1414 if fnode is not None:
1415 chunks.extend([node, fnode])
1415 chunks.extend([node, fnode])
1416
1416
1417 if chunks:
1417 if chunks:
1418 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1418 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1419
1419
1420 def check_heads(repo, their_heads, context):
1420 def check_heads(repo, their_heads, context):
1421 """check if the heads of a repo have been modified
1421 """check if the heads of a repo have been modified
1422
1422
1423 Used by peer for unbundling.
1423 Used by peer for unbundling.
1424 """
1424 """
1425 heads = repo.heads()
1425 heads = repo.heads()
1426 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1426 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1427 if not (their_heads == ['force'] or their_heads == heads or
1427 if not (their_heads == ['force'] or their_heads == heads or
1428 their_heads == ['hashed', heads_hash]):
1428 their_heads == ['hashed', heads_hash]):
1429 # someone else committed/pushed/unbundled while we
1429 # someone else committed/pushed/unbundled while we
1430 # were transferring data
1430 # were transferring data
1431 raise error.PushRaced('repository changed while %s - '
1431 raise error.PushRaced('repository changed while %s - '
1432 'please try again' % context)
1432 'please try again' % context)
1433
1433
1434 def unbundle(repo, cg, heads, source, url):
1434 def unbundle(repo, cg, heads, source, url):
1435 """Apply a bundle to a repo.
1435 """Apply a bundle to a repo.
1436
1436
1437 this function makes sure the repo is locked during the application and have
1437 this function makes sure the repo is locked during the application and have
1438 mechanism to check that no push race occurred between the creation of the
1438 mechanism to check that no push race occurred between the creation of the
1439 bundle and its application.
1439 bundle and its application.
1440
1440
1441 If the push was raced as PushRaced exception is raised."""
1441 If the push was raced as PushRaced exception is raised."""
1442 r = 0
1442 r = 0
1443 # need a transaction when processing a bundle2 stream
1443 # need a transaction when processing a bundle2 stream
1444 wlock = lock = tr = None
1444 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1445 lockandtr = [None, None, None]
1445 recordout = None
1446 recordout = None
1446 # quick fix for output mismatch with bundle2 in 3.4
1447 # quick fix for output mismatch with bundle2 in 3.4
1447 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1448 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1448 False)
1449 False)
1449 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1450 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1450 captureoutput = True
1451 captureoutput = True
1451 try:
1452 try:
1452 check_heads(repo, heads, 'uploading changes')
1453 check_heads(repo, heads, 'uploading changes')
1453 # push can proceed
1454 # push can proceed
1454 if util.safehasattr(cg, 'params'):
1455 if util.safehasattr(cg, 'params'):
1455 r = None
1456 r = None
1456 try:
1457 try:
1457 wlock = repo.wlock()
1458 def gettransaction():
1458 lock = repo.lock()
1459 if not lockandtr[2]:
1459 tr = repo.transaction(source)
1460 lockandtr[0] = repo.wlock()
1460 tr.hookargs['source'] = source
1461 lockandtr[1] = repo.lock()
1461 tr.hookargs['url'] = url
1462 lockandtr[2] = repo.transaction(source)
1462 tr.hookargs['bundle2'] = '1'
1463 lockandtr[2].hookargs['source'] = source
1463 op = bundle2.bundleoperation(repo, lambda: tr,
1464 lockandtr[2].hookargs['url'] = url
1465 lockandtr[2].hookargs['bundle2'] = '1'
1466 return lockandtr[2]
1467
1468 # Do greedy locking by default until we're satisfied with lazy
1469 # locking.
1470 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1471 gettransaction()
1472
1473 op = bundle2.bundleoperation(repo, gettransaction,
1464 captureoutput=captureoutput)
1474 captureoutput=captureoutput)
1465 try:
1475 try:
1466 op = bundle2.processbundle(repo, cg, op=op)
1476 op = bundle2.processbundle(repo, cg, op=op)
1467 finally:
1477 finally:
1468 r = op.reply
1478 r = op.reply
1469 if captureoutput and r is not None:
1479 if captureoutput and r is not None:
1470 repo.ui.pushbuffer(error=True, subproc=True)
1480 repo.ui.pushbuffer(error=True, subproc=True)
1471 def recordout(output):
1481 def recordout(output):
1472 r.newpart('output', data=output, mandatory=False)
1482 r.newpart('output', data=output, mandatory=False)
1473 tr.close()
1483 if lockandtr[2] is not None:
1484 lockandtr[2].close()
1474 except BaseException as exc:
1485 except BaseException as exc:
1475 exc.duringunbundle2 = True
1486 exc.duringunbundle2 = True
1476 if captureoutput and r is not None:
1487 if captureoutput and r is not None:
1477 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1488 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1478 def recordout(output):
1489 def recordout(output):
1479 part = bundle2.bundlepart('output', data=output,
1490 part = bundle2.bundlepart('output', data=output,
1480 mandatory=False)
1491 mandatory=False)
1481 parts.append(part)
1492 parts.append(part)
1482 raise
1493 raise
1483 else:
1494 else:
1484 lock = repo.lock()
1495 lockandtr[1] = repo.lock()
1485 r = changegroup.addchangegroup(repo, cg, source, url)
1496 r = changegroup.addchangegroup(repo, cg, source, url)
1486 finally:
1497 finally:
1487 lockmod.release(tr, lock, wlock)
1498 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1488 if recordout is not None:
1499 if recordout is not None:
1489 recordout(repo.ui.popbuffer())
1500 recordout(repo.ui.popbuffer())
1490 return r
1501 return r
@@ -1,899 +1,944 b''
1 Test exchange of common information using bundle2
1 Test exchange of common information using bundle2
2
2
3
3
4 $ getmainid() {
4 $ getmainid() {
5 > hg -R main log --template '{node}\n' --rev "$1"
5 > hg -R main log --template '{node}\n' --rev "$1"
6 > }
6 > }
7
7
8 enable obsolescence
8 enable obsolescence
9
9
10 $ cp $HGRCPATH $TESTTMP/hgrc.orig
10 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
11 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
11 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
12 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
12 > hg debuglock
13 > hg debuglock
13 > EOF
14 > EOF
14
15
15 $ cat >> $HGRCPATH << EOF
16 $ cat >> $HGRCPATH << EOF
16 > [experimental]
17 > [experimental]
17 > evolution=createmarkers,exchange
18 > evolution=createmarkers,exchange
18 > bundle2-exp=True
19 > bundle2-exp=True
19 > bundle2-output-capture=True
20 > bundle2-output-capture=True
20 > [ui]
21 > [ui]
21 > ssh=python "$TESTDIR/dummyssh"
22 > ssh=python "$TESTDIR/dummyssh"
22 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
23 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
23 > [web]
24 > [web]
24 > push_ssl = false
25 > push_ssl = false
25 > allow_push = *
26 > allow_push = *
26 > [phases]
27 > [phases]
27 > publish=False
28 > publish=False
28 > [hooks]
29 > [hooks]
29 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
30 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
30 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
31 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
31 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
32 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
32 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
33 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
33 > EOF
34 > EOF
34
35
35 The extension requires a repo (currently unused)
36 The extension requires a repo (currently unused)
36
37
37 $ hg init main
38 $ hg init main
38 $ cd main
39 $ cd main
39 $ touch a
40 $ touch a
40 $ hg add a
41 $ hg add a
41 $ hg commit -m 'a'
42 $ hg commit -m 'a'
42 pre-close-tip:3903775176ed draft
43 pre-close-tip:3903775176ed draft
43 postclose-tip:3903775176ed draft
44 postclose-tip:3903775176ed draft
44 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
45 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
45
46
46 $ hg unbundle $TESTDIR/bundles/rebase.hg
47 $ hg unbundle $TESTDIR/bundles/rebase.hg
47 adding changesets
48 adding changesets
48 adding manifests
49 adding manifests
49 adding file changes
50 adding file changes
50 added 8 changesets with 7 changes to 7 files (+3 heads)
51 added 8 changesets with 7 changes to 7 files (+3 heads)
51 pre-close-tip:02de42196ebe draft
52 pre-close-tip:02de42196ebe draft
52 postclose-tip:02de42196ebe draft
53 postclose-tip:02de42196ebe draft
53 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
54 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
54 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
55 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
55 (run 'hg heads' to see heads, 'hg merge' to merge)
56 (run 'hg heads' to see heads, 'hg merge' to merge)
56
57
57 $ cd ..
58 $ cd ..
58
59
59 Real world exchange
60 Real world exchange
60 =====================
61 =====================
61
62
62 Add more obsolescence information
63 Add more obsolescence information
63
64
64 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
65 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
65 pre-close-tip:02de42196ebe draft
66 pre-close-tip:02de42196ebe draft
66 postclose-tip:02de42196ebe draft
67 postclose-tip:02de42196ebe draft
67 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
68 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
68 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
69 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
69 pre-close-tip:02de42196ebe draft
70 pre-close-tip:02de42196ebe draft
70 postclose-tip:02de42196ebe draft
71 postclose-tip:02de42196ebe draft
71 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
72 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
72
73
73 clone --pull
74 clone --pull
74
75
75 $ hg -R main phase --public cd010b8cd998
76 $ hg -R main phase --public cd010b8cd998
76 pre-close-tip:02de42196ebe draft
77 pre-close-tip:02de42196ebe draft
77 postclose-tip:02de42196ebe draft
78 postclose-tip:02de42196ebe draft
78 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
79 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
79 $ hg clone main other --pull --rev 9520eea781bc
80 $ hg clone main other --pull --rev 9520eea781bc
80 adding changesets
81 adding changesets
81 adding manifests
82 adding manifests
82 adding file changes
83 adding file changes
83 added 2 changesets with 2 changes to 2 files
84 added 2 changesets with 2 changes to 2 files
84 1 new obsolescence markers
85 1 new obsolescence markers
85 pre-close-tip:9520eea781bc draft
86 pre-close-tip:9520eea781bc draft
86 postclose-tip:9520eea781bc draft
87 postclose-tip:9520eea781bc draft
87 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
88 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
88 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
89 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
89 updating to branch default
90 updating to branch default
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 $ hg -R other log -G
92 $ hg -R other log -G
92 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
93 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
93 |
94 |
94 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
95 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
95
96
96 $ hg -R other debugobsolete
97 $ hg -R other debugobsolete
97 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
98 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
98
99
99 pull
100 pull
100
101
101 $ hg -R main phase --public 9520eea781bc
102 $ hg -R main phase --public 9520eea781bc
102 pre-close-tip:02de42196ebe draft
103 pre-close-tip:02de42196ebe draft
103 postclose-tip:02de42196ebe draft
104 postclose-tip:02de42196ebe draft
104 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
105 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
105 $ hg -R other pull -r 24b6387c8c8c
106 $ hg -R other pull -r 24b6387c8c8c
106 pulling from $TESTTMP/main (glob)
107 pulling from $TESTTMP/main (glob)
107 searching for changes
108 searching for changes
108 adding changesets
109 adding changesets
109 adding manifests
110 adding manifests
110 adding file changes
111 adding file changes
111 added 1 changesets with 1 changes to 1 files (+1 heads)
112 added 1 changesets with 1 changes to 1 files (+1 heads)
112 1 new obsolescence markers
113 1 new obsolescence markers
113 pre-close-tip:24b6387c8c8c draft
114 pre-close-tip:24b6387c8c8c draft
114 postclose-tip:24b6387c8c8c draft
115 postclose-tip:24b6387c8c8c draft
115 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
116 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
116 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
117 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
117 (run 'hg heads' to see heads, 'hg merge' to merge)
118 (run 'hg heads' to see heads, 'hg merge' to merge)
118 $ hg -R other log -G
119 $ hg -R other log -G
119 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
120 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
120 |
121 |
121 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
122 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
122 |/
123 |/
123 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
124 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
124
125
125 $ hg -R other debugobsolete
126 $ hg -R other debugobsolete
126 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128
129
129 pull empty (with phase movement)
130 pull empty (with phase movement)
130
131
131 $ hg -R main phase --public 24b6387c8c8c
132 $ hg -R main phase --public 24b6387c8c8c
132 pre-close-tip:02de42196ebe draft
133 pre-close-tip:02de42196ebe draft
133 postclose-tip:02de42196ebe draft
134 postclose-tip:02de42196ebe draft
134 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
135 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
135 $ hg -R other pull -r 24b6387c8c8c
136 $ hg -R other pull -r 24b6387c8c8c
136 pulling from $TESTTMP/main (glob)
137 pulling from $TESTTMP/main (glob)
137 no changes found
138 no changes found
138 pre-close-tip:24b6387c8c8c public
139 pre-close-tip:24b6387c8c8c public
139 postclose-tip:24b6387c8c8c public
140 postclose-tip:24b6387c8c8c public
140 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
141 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
141 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
142 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
142 $ hg -R other log -G
143 $ hg -R other log -G
143 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
144 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
144 |
145 |
145 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
146 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
146 |/
147 |/
147 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
148 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
148
149
149 $ hg -R other debugobsolete
150 $ hg -R other debugobsolete
150 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
151 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
151 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
152 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
152
153
153 pull empty
154 pull empty
154
155
155 $ hg -R other pull -r 24b6387c8c8c
156 $ hg -R other pull -r 24b6387c8c8c
156 pulling from $TESTTMP/main (glob)
157 pulling from $TESTTMP/main (glob)
157 no changes found
158 no changes found
158 pre-close-tip:24b6387c8c8c public
159 pre-close-tip:24b6387c8c8c public
159 postclose-tip:24b6387c8c8c public
160 postclose-tip:24b6387c8c8c public
160 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
161 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
161 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
162 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
162 $ hg -R other log -G
163 $ hg -R other log -G
163 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
164 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
164 |
165 |
165 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
166 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
166 |/
167 |/
167 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
168 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
168
169
169 $ hg -R other debugobsolete
170 $ hg -R other debugobsolete
170 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
171 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
171 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
172 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
172
173
173 add extra data to test their exchange during push
174 add extra data to test their exchange during push
174
175
175 $ hg -R main bookmark --rev eea13746799a book_eea1
176 $ hg -R main bookmark --rev eea13746799a book_eea1
176 pre-close-tip:02de42196ebe draft
177 pre-close-tip:02de42196ebe draft
177 postclose-tip:02de42196ebe draft
178 postclose-tip:02de42196ebe draft
178 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
179 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
179 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
180 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
180 pre-close-tip:02de42196ebe draft
181 pre-close-tip:02de42196ebe draft
181 postclose-tip:02de42196ebe draft
182 postclose-tip:02de42196ebe draft
182 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
183 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
183 $ hg -R main bookmark --rev 02de42196ebe book_02de
184 $ hg -R main bookmark --rev 02de42196ebe book_02de
184 pre-close-tip:02de42196ebe draft book_02de
185 pre-close-tip:02de42196ebe draft book_02de
185 postclose-tip:02de42196ebe draft book_02de
186 postclose-tip:02de42196ebe draft book_02de
186 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
187 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
187 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
188 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
188 pre-close-tip:02de42196ebe draft book_02de
189 pre-close-tip:02de42196ebe draft book_02de
189 postclose-tip:02de42196ebe draft book_02de
190 postclose-tip:02de42196ebe draft book_02de
190 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
191 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
191 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
192 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
192 pre-close-tip:02de42196ebe draft book_02de
193 pre-close-tip:02de42196ebe draft book_02de
193 postclose-tip:02de42196ebe draft book_02de
194 postclose-tip:02de42196ebe draft book_02de
194 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
195 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
195 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
196 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
196 pre-close-tip:02de42196ebe draft book_02de
197 pre-close-tip:02de42196ebe draft book_02de
197 postclose-tip:02de42196ebe draft book_02de
198 postclose-tip:02de42196ebe draft book_02de
198 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
199 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
199 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
200 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
200 pre-close-tip:02de42196ebe draft book_02de
201 pre-close-tip:02de42196ebe draft book_02de
201 postclose-tip:02de42196ebe draft book_02de
202 postclose-tip:02de42196ebe draft book_02de
202 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
203 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
203 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
204 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
204 pre-close-tip:02de42196ebe draft book_02de
205 pre-close-tip:02de42196ebe draft book_02de
205 postclose-tip:02de42196ebe draft book_02de
206 postclose-tip:02de42196ebe draft book_02de
206 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
207 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
207 $ hg -R main bookmark --rev 32af7686d403 book_32af
208 $ hg -R main bookmark --rev 32af7686d403 book_32af
208 pre-close-tip:02de42196ebe draft book_02de
209 pre-close-tip:02de42196ebe draft book_02de
209 postclose-tip:02de42196ebe draft book_02de
210 postclose-tip:02de42196ebe draft book_02de
210 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
211 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
211 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
212 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
212 pre-close-tip:02de42196ebe draft book_02de
213 pre-close-tip:02de42196ebe draft book_02de
213 postclose-tip:02de42196ebe draft book_02de
214 postclose-tip:02de42196ebe draft book_02de
214 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
215 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
215
216
216 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
217 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
217 pre-close-tip:24b6387c8c8c public
218 pre-close-tip:24b6387c8c8c public
218 postclose-tip:24b6387c8c8c public
219 postclose-tip:24b6387c8c8c public
219 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
220 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
220 $ hg -R other bookmark --rev cd010b8cd998 book_02de
221 $ hg -R other bookmark --rev cd010b8cd998 book_02de
221 pre-close-tip:24b6387c8c8c public
222 pre-close-tip:24b6387c8c8c public
222 postclose-tip:24b6387c8c8c public
223 postclose-tip:24b6387c8c8c public
223 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
224 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
224 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
225 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
225 pre-close-tip:24b6387c8c8c public
226 pre-close-tip:24b6387c8c8c public
226 postclose-tip:24b6387c8c8c public
227 postclose-tip:24b6387c8c8c public
227 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
228 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
228 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
229 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
229 pre-close-tip:24b6387c8c8c public
230 pre-close-tip:24b6387c8c8c public
230 postclose-tip:24b6387c8c8c public
231 postclose-tip:24b6387c8c8c public
231 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
232 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
232 $ hg -R other bookmark --rev cd010b8cd998 book_32af
233 $ hg -R other bookmark --rev cd010b8cd998 book_32af
233 pre-close-tip:24b6387c8c8c public
234 pre-close-tip:24b6387c8c8c public
234 postclose-tip:24b6387c8c8c public
235 postclose-tip:24b6387c8c8c public
235 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
236 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
236
237
237 $ hg -R main phase --public eea13746799a
238 $ hg -R main phase --public eea13746799a
238 pre-close-tip:02de42196ebe draft book_02de
239 pre-close-tip:02de42196ebe draft book_02de
239 postclose-tip:02de42196ebe draft book_02de
240 postclose-tip:02de42196ebe draft book_02de
240 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
241 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
241
242
242 push
243 push
243 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
244 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
244 pushing to other
245 pushing to other
245 searching for changes
246 searching for changes
246 remote: adding changesets
247 remote: adding changesets
247 remote: adding manifests
248 remote: adding manifests
248 remote: adding file changes
249 remote: adding file changes
249 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
250 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
250 remote: 1 new obsolescence markers
251 remote: 1 new obsolescence markers
251 remote: pre-close-tip:eea13746799a public book_eea1
252 remote: pre-close-tip:eea13746799a public book_eea1
252 remote: pushkey: lock state after "phases"
253 remote: pushkey: lock state after "phases"
253 remote: lock: free
254 remote: lock: free
254 remote: wlock: free
255 remote: wlock: free
255 remote: pushkey: lock state after "bookmarks"
256 remote: pushkey: lock state after "bookmarks"
256 remote: lock: free
257 remote: lock: free
257 remote: wlock: free
258 remote: wlock: free
258 remote: postclose-tip:eea13746799a public book_eea1
259 remote: postclose-tip:eea13746799a public book_eea1
259 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
260 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
260 updating bookmark book_eea1
261 updating bookmark book_eea1
261 pre-close-tip:02de42196ebe draft book_02de
262 pre-close-tip:02de42196ebe draft book_02de
262 postclose-tip:02de42196ebe draft book_02de
263 postclose-tip:02de42196ebe draft book_02de
263 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
264 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
264 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
265 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
265 $ hg -R other log -G
266 $ hg -R other log -G
266 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
267 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
267 |\
268 |\
268 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
269 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
269 | |
270 | |
270 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
271 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
271 |/
272 |/
272 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
273 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
273
274
274 $ hg -R other debugobsolete
275 $ hg -R other debugobsolete
275 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
276 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
276 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
277 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
277 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
278 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
278
279
279 pull over ssh
280 pull over ssh
280
281
281 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
282 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
282 pulling from ssh://user@dummy/main
283 pulling from ssh://user@dummy/main
283 searching for changes
284 searching for changes
284 adding changesets
285 adding changesets
285 adding manifests
286 adding manifests
286 adding file changes
287 adding file changes
287 added 1 changesets with 1 changes to 1 files (+1 heads)
288 added 1 changesets with 1 changes to 1 files (+1 heads)
288 1 new obsolescence markers
289 1 new obsolescence markers
289 updating bookmark book_02de
290 updating bookmark book_02de
290 pre-close-tip:02de42196ebe draft book_02de
291 pre-close-tip:02de42196ebe draft book_02de
291 postclose-tip:02de42196ebe draft book_02de
292 postclose-tip:02de42196ebe draft book_02de
292 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
293 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
293 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
294 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
294 (run 'hg heads' to see heads, 'hg merge' to merge)
295 (run 'hg heads' to see heads, 'hg merge' to merge)
295 $ hg -R other debugobsolete
296 $ hg -R other debugobsolete
296 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
300 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
300
301
301 pull over http
302 pull over http
302
303
303 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
304 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
304 $ cat main.pid >> $DAEMON_PIDS
305 $ cat main.pid >> $DAEMON_PIDS
305
306
306 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
307 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
307 pulling from http://localhost:$HGPORT/
308 pulling from http://localhost:$HGPORT/
308 searching for changes
309 searching for changes
309 adding changesets
310 adding changesets
310 adding manifests
311 adding manifests
311 adding file changes
312 adding file changes
312 added 1 changesets with 1 changes to 1 files (+1 heads)
313 added 1 changesets with 1 changes to 1 files (+1 heads)
313 1 new obsolescence markers
314 1 new obsolescence markers
314 updating bookmark book_42cc
315 updating bookmark book_42cc
315 pre-close-tip:42ccdea3bb16 draft book_42cc
316 pre-close-tip:42ccdea3bb16 draft book_42cc
316 postclose-tip:42ccdea3bb16 draft book_42cc
317 postclose-tip:42ccdea3bb16 draft book_42cc
317 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
318 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
318 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
319 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
319 (run 'hg heads .' to see heads, 'hg merge' to merge)
320 (run 'hg heads .' to see heads, 'hg merge' to merge)
320 $ cat main-error.log
321 $ cat main-error.log
321 $ hg -R other debugobsolete
322 $ hg -R other debugobsolete
322 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
323 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
323 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
324 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
324 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
325 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
325 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
326 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
326 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
327 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
327
328
328 push over ssh
329 push over ssh
329
330
330 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
331 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
331 pushing to ssh://user@dummy/other
332 pushing to ssh://user@dummy/other
332 searching for changes
333 searching for changes
333 remote: adding changesets
334 remote: adding changesets
334 remote: adding manifests
335 remote: adding manifests
335 remote: adding file changes
336 remote: adding file changes
336 remote: added 1 changesets with 1 changes to 1 files
337 remote: added 1 changesets with 1 changes to 1 files
337 remote: 1 new obsolescence markers
338 remote: 1 new obsolescence markers
338 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
339 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
339 remote: pushkey: lock state after "bookmarks"
340 remote: pushkey: lock state after "bookmarks"
340 remote: lock: free
341 remote: lock: free
341 remote: wlock: free
342 remote: wlock: free
342 remote: postclose-tip:5fddd98957c8 draft book_5fdd
343 remote: postclose-tip:5fddd98957c8 draft book_5fdd
343 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
344 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
344 updating bookmark book_5fdd
345 updating bookmark book_5fdd
345 pre-close-tip:02de42196ebe draft book_02de
346 pre-close-tip:02de42196ebe draft book_02de
346 postclose-tip:02de42196ebe draft book_02de
347 postclose-tip:02de42196ebe draft book_02de
347 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
348 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
348 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
349 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
349 $ hg -R other log -G
350 $ hg -R other log -G
350 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
351 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
351 |
352 |
352 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
353 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
353 |
354 |
354 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
355 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
355 | |
356 | |
356 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
357 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
357 | |/|
358 | |/|
358 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
359 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
359 |/ /
360 |/ /
360 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
361 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
361 |/
362 |/
362 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
363 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
363
364
364 $ hg -R other debugobsolete
365 $ hg -R other debugobsolete
365 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
366 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
366 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
367 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
367 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
368 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
368 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
369 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
369 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
370 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
370 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
371 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
371
372
372 push over http
373 push over http
373
374
374 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
375 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
375 $ cat other.pid >> $DAEMON_PIDS
376 $ cat other.pid >> $DAEMON_PIDS
376
377
377 $ hg -R main phase --public 32af7686d403
378 $ hg -R main phase --public 32af7686d403
378 pre-close-tip:02de42196ebe draft book_02de
379 pre-close-tip:02de42196ebe draft book_02de
379 postclose-tip:02de42196ebe draft book_02de
380 postclose-tip:02de42196ebe draft book_02de
380 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
381 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
381 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
382 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
382 pushing to http://localhost:$HGPORT2/
383 pushing to http://localhost:$HGPORT2/
383 searching for changes
384 searching for changes
384 remote: adding changesets
385 remote: adding changesets
385 remote: adding manifests
386 remote: adding manifests
386 remote: adding file changes
387 remote: adding file changes
387 remote: added 1 changesets with 1 changes to 1 files
388 remote: added 1 changesets with 1 changes to 1 files
388 remote: 1 new obsolescence markers
389 remote: 1 new obsolescence markers
389 remote: pre-close-tip:32af7686d403 public book_32af
390 remote: pre-close-tip:32af7686d403 public book_32af
390 remote: pushkey: lock state after "phases"
391 remote: pushkey: lock state after "phases"
391 remote: lock: free
392 remote: lock: free
392 remote: wlock: free
393 remote: wlock: free
393 remote: pushkey: lock state after "bookmarks"
394 remote: pushkey: lock state after "bookmarks"
394 remote: lock: free
395 remote: lock: free
395 remote: wlock: free
396 remote: wlock: free
396 remote: postclose-tip:32af7686d403 public book_32af
397 remote: postclose-tip:32af7686d403 public book_32af
397 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
398 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
398 updating bookmark book_32af
399 updating bookmark book_32af
399 pre-close-tip:02de42196ebe draft book_02de
400 pre-close-tip:02de42196ebe draft book_02de
400 postclose-tip:02de42196ebe draft book_02de
401 postclose-tip:02de42196ebe draft book_02de
401 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
402 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
402 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
403 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
403 $ cat other-error.log
404 $ cat other-error.log
404
405
405 Check final content.
406 Check final content.
406
407
407 $ hg -R other log -G
408 $ hg -R other log -G
408 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
409 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
409 |
410 |
410 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
411 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
411 |
412 |
412 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
413 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
413 |
414 |
414 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
415 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
415 | |
416 | |
416 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
417 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
417 | |/|
418 | |/|
418 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
419 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
419 |/ /
420 |/ /
420 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
421 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
421 |/
422 |/
422 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
423 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
423
424
424 $ hg -R other debugobsolete
425 $ hg -R other debugobsolete
425 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
426 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
426 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
427 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
427 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
428 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
428 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
429 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
429 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
430 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
430 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
431 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
431 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
432 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
432
433
433 (check that no 'pending' files remain)
434 (check that no 'pending' files remain)
434
435
435 $ ls -1 other/.hg/bookmarks*
436 $ ls -1 other/.hg/bookmarks*
436 other/.hg/bookmarks
437 other/.hg/bookmarks
437 $ ls -1 other/.hg/store/phaseroots*
438 $ ls -1 other/.hg/store/phaseroots*
438 other/.hg/store/phaseroots
439 other/.hg/store/phaseroots
439 $ ls -1 other/.hg/store/00changelog.i*
440 $ ls -1 other/.hg/store/00changelog.i*
440 other/.hg/store/00changelog.i
441 other/.hg/store/00changelog.i
441
442
442 Error Handling
443 Error Handling
443 ==============
444 ==============
444
445
445 Check that errors are properly returned to the client during push.
446 Check that errors are properly returned to the client during push.
446
447
447 Setting up
448 Setting up
448
449
449 $ cat > failpush.py << EOF
450 $ cat > failpush.py << EOF
450 > """A small extension that makes push fails when using bundle2
451 > """A small extension that makes push fails when using bundle2
451 >
452 >
452 > used to test error handling in bundle2
453 > used to test error handling in bundle2
453 > """
454 > """
454 >
455 >
455 > from mercurial import util
456 > from mercurial import util
456 > from mercurial import bundle2
457 > from mercurial import bundle2
457 > from mercurial import exchange
458 > from mercurial import exchange
458 > from mercurial import extensions
459 > from mercurial import extensions
459 >
460 >
460 > def _pushbundle2failpart(pushop, bundler):
461 > def _pushbundle2failpart(pushop, bundler):
461 > reason = pushop.ui.config('failpush', 'reason', None)
462 > reason = pushop.ui.config('failpush', 'reason', None)
462 > part = None
463 > part = None
463 > if reason == 'abort':
464 > if reason == 'abort':
464 > bundler.newpart('test:abort')
465 > bundler.newpart('test:abort')
465 > if reason == 'unknown':
466 > if reason == 'unknown':
466 > bundler.newpart('test:unknown')
467 > bundler.newpart('test:unknown')
467 > if reason == 'race':
468 > if reason == 'race':
468 > # 20 Bytes of crap
469 > # 20 Bytes of crap
469 > bundler.newpart('check:heads', data='01234567890123456789')
470 > bundler.newpart('check:heads', data='01234567890123456789')
470 >
471 >
471 > @bundle2.parthandler("test:abort")
472 > @bundle2.parthandler("test:abort")
472 > def handleabort(op, part):
473 > def handleabort(op, part):
473 > raise util.Abort('Abandon ship!', hint="don't panic")
474 > raise util.Abort('Abandon ship!', hint="don't panic")
474 >
475 >
475 > def uisetup(ui):
476 > def uisetup(ui):
476 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
477 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
477 > exchange.b2partsgenorder.insert(0, 'failpart')
478 > exchange.b2partsgenorder.insert(0, 'failpart')
478 >
479 >
479 > EOF
480 > EOF
480
481
481 $ cd main
482 $ cd main
482 $ hg up tip
483 $ hg up tip
483 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
484 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
484 $ echo 'I' > I
485 $ echo 'I' > I
485 $ hg add I
486 $ hg add I
486 $ hg ci -m 'I'
487 $ hg ci -m 'I'
487 pre-close-tip:e7ec4e813ba6 draft
488 pre-close-tip:e7ec4e813ba6 draft
488 postclose-tip:e7ec4e813ba6 draft
489 postclose-tip:e7ec4e813ba6 draft
489 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
490 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
490 $ hg id
491 $ hg id
491 e7ec4e813ba6 tip
492 e7ec4e813ba6 tip
492 $ cd ..
493 $ cd ..
493
494
494 $ cat << EOF >> $HGRCPATH
495 $ cat << EOF >> $HGRCPATH
495 > [extensions]
496 > [extensions]
496 > failpush=$TESTTMP/failpush.py
497 > failpush=$TESTTMP/failpush.py
497 > EOF
498 > EOF
498
499
499 $ killdaemons.py
500 $ killdaemons.py
500 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
501 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
501 $ cat other.pid >> $DAEMON_PIDS
502 $ cat other.pid >> $DAEMON_PIDS
502
503
503 Doing the actual push: Abort error
504 Doing the actual push: Abort error
504
505
505 $ cat << EOF >> $HGRCPATH
506 $ cat << EOF >> $HGRCPATH
506 > [failpush]
507 > [failpush]
507 > reason = abort
508 > reason = abort
508 > EOF
509 > EOF
509
510
510 $ hg -R main push other -r e7ec4e813ba6
511 $ hg -R main push other -r e7ec4e813ba6
511 pushing to other
512 pushing to other
512 searching for changes
513 searching for changes
513 abort: Abandon ship!
514 abort: Abandon ship!
514 (don't panic)
515 (don't panic)
515 [255]
516 [255]
516
517
517 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
518 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
518 pushing to ssh://user@dummy/other
519 pushing to ssh://user@dummy/other
519 searching for changes
520 searching for changes
520 abort: Abandon ship!
521 abort: Abandon ship!
521 (don't panic)
522 (don't panic)
522 [255]
523 [255]
523
524
524 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
525 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
525 pushing to http://localhost:$HGPORT2/
526 pushing to http://localhost:$HGPORT2/
526 searching for changes
527 searching for changes
527 abort: Abandon ship!
528 abort: Abandon ship!
528 (don't panic)
529 (don't panic)
529 [255]
530 [255]
530
531
531
532
532 Doing the actual push: unknown mandatory parts
533 Doing the actual push: unknown mandatory parts
533
534
534 $ cat << EOF >> $HGRCPATH
535 $ cat << EOF >> $HGRCPATH
535 > [failpush]
536 > [failpush]
536 > reason = unknown
537 > reason = unknown
537 > EOF
538 > EOF
538
539
539 $ hg -R main push other -r e7ec4e813ba6
540 $ hg -R main push other -r e7ec4e813ba6
540 pushing to other
541 pushing to other
541 searching for changes
542 searching for changes
542 abort: missing support for test:unknown
543 abort: missing support for test:unknown
543 [255]
544 [255]
544
545
545 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
546 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
546 pushing to ssh://user@dummy/other
547 pushing to ssh://user@dummy/other
547 searching for changes
548 searching for changes
548 abort: missing support for test:unknown
549 abort: missing support for test:unknown
549 [255]
550 [255]
550
551
551 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
552 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
552 pushing to http://localhost:$HGPORT2/
553 pushing to http://localhost:$HGPORT2/
553 searching for changes
554 searching for changes
554 abort: missing support for test:unknown
555 abort: missing support for test:unknown
555 [255]
556 [255]
556
557
557 Doing the actual push: race
558 Doing the actual push: race
558
559
559 $ cat << EOF >> $HGRCPATH
560 $ cat << EOF >> $HGRCPATH
560 > [failpush]
561 > [failpush]
561 > reason = race
562 > reason = race
562 > EOF
563 > EOF
563
564
564 $ hg -R main push other -r e7ec4e813ba6
565 $ hg -R main push other -r e7ec4e813ba6
565 pushing to other
566 pushing to other
566 searching for changes
567 searching for changes
567 abort: push failed:
568 abort: push failed:
568 'repository changed while pushing - please try again'
569 'repository changed while pushing - please try again'
569 [255]
570 [255]
570
571
571 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
572 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
572 pushing to ssh://user@dummy/other
573 pushing to ssh://user@dummy/other
573 searching for changes
574 searching for changes
574 abort: push failed:
575 abort: push failed:
575 'repository changed while pushing - please try again'
576 'repository changed while pushing - please try again'
576 [255]
577 [255]
577
578
578 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
579 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
579 pushing to http://localhost:$HGPORT2/
580 pushing to http://localhost:$HGPORT2/
580 searching for changes
581 searching for changes
581 abort: push failed:
582 abort: push failed:
582 'repository changed while pushing - please try again'
583 'repository changed while pushing - please try again'
583 [255]
584 [255]
584
585
585 Doing the actual push: hook abort
586 Doing the actual push: hook abort
586
587
587 $ cat << EOF >> $HGRCPATH
588 $ cat << EOF >> $HGRCPATH
588 > [failpush]
589 > [failpush]
589 > reason =
590 > reason =
590 > [hooks]
591 > [hooks]
591 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
592 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
592 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
593 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
593 > EOF
594 > EOF
594
595
595 $ killdaemons.py
596 $ killdaemons.py
596 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
597 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
597 $ cat other.pid >> $DAEMON_PIDS
598 $ cat other.pid >> $DAEMON_PIDS
598
599
599 $ hg -R main push other -r e7ec4e813ba6
600 $ hg -R main push other -r e7ec4e813ba6
600 pushing to other
601 pushing to other
601 searching for changes
602 searching for changes
602 remote: adding changesets
603 remote: adding changesets
603 remote: adding manifests
604 remote: adding manifests
604 remote: adding file changes
605 remote: adding file changes
605 remote: added 1 changesets with 1 changes to 1 files
606 remote: added 1 changesets with 1 changes to 1 files
606 remote: pre-close-tip:e7ec4e813ba6 draft
607 remote: pre-close-tip:e7ec4e813ba6 draft
607 remote: You shall not pass!
608 remote: You shall not pass!
608 remote: transaction abort!
609 remote: transaction abort!
609 remote: Cleaning up the mess...
610 remote: Cleaning up the mess...
610 remote: rollback completed
611 remote: rollback completed
611 abort: pretxnclose.failpush hook exited with status 1
612 abort: pretxnclose.failpush hook exited with status 1
612 [255]
613 [255]
613
614
614 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
615 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
615 pushing to ssh://user@dummy/other
616 pushing to ssh://user@dummy/other
616 searching for changes
617 searching for changes
617 remote: adding changesets
618 remote: adding changesets
618 remote: adding manifests
619 remote: adding manifests
619 remote: adding file changes
620 remote: adding file changes
620 remote: added 1 changesets with 1 changes to 1 files
621 remote: added 1 changesets with 1 changes to 1 files
621 remote: pre-close-tip:e7ec4e813ba6 draft
622 remote: pre-close-tip:e7ec4e813ba6 draft
622 remote: You shall not pass!
623 remote: You shall not pass!
623 remote: transaction abort!
624 remote: transaction abort!
624 remote: Cleaning up the mess...
625 remote: Cleaning up the mess...
625 remote: rollback completed
626 remote: rollback completed
626 abort: pretxnclose.failpush hook exited with status 1
627 abort: pretxnclose.failpush hook exited with status 1
627 [255]
628 [255]
628
629
629 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
630 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
630 pushing to http://localhost:$HGPORT2/
631 pushing to http://localhost:$HGPORT2/
631 searching for changes
632 searching for changes
632 remote: adding changesets
633 remote: adding changesets
633 remote: adding manifests
634 remote: adding manifests
634 remote: adding file changes
635 remote: adding file changes
635 remote: added 1 changesets with 1 changes to 1 files
636 remote: added 1 changesets with 1 changes to 1 files
636 remote: pre-close-tip:e7ec4e813ba6 draft
637 remote: pre-close-tip:e7ec4e813ba6 draft
637 remote: You shall not pass!
638 remote: You shall not pass!
638 remote: transaction abort!
639 remote: transaction abort!
639 remote: Cleaning up the mess...
640 remote: Cleaning up the mess...
640 remote: rollback completed
641 remote: rollback completed
641 abort: pretxnclose.failpush hook exited with status 1
642 abort: pretxnclose.failpush hook exited with status 1
642 [255]
643 [255]
643
644
644 (check that no 'pending' files remain)
645 (check that no 'pending' files remain)
645
646
646 $ ls -1 other/.hg/bookmarks*
647 $ ls -1 other/.hg/bookmarks*
647 other/.hg/bookmarks
648 other/.hg/bookmarks
648 $ ls -1 other/.hg/store/phaseroots*
649 $ ls -1 other/.hg/store/phaseroots*
649 other/.hg/store/phaseroots
650 other/.hg/store/phaseroots
650 $ ls -1 other/.hg/store/00changelog.i*
651 $ ls -1 other/.hg/store/00changelog.i*
651 other/.hg/store/00changelog.i
652 other/.hg/store/00changelog.i
652
653
653 Check error from hook during the unbundling process itself
654 Check error from hook during the unbundling process itself
654
655
655 $ cat << EOF >> $HGRCPATH
656 $ cat << EOF >> $HGRCPATH
656 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
657 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
657 > EOF
658 > EOF
658 $ killdaemons.py # reload http config
659 $ killdaemons.py # reload http config
659 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
660 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
660 $ cat other.pid >> $DAEMON_PIDS
661 $ cat other.pid >> $DAEMON_PIDS
661
662
662 $ hg -R main push other -r e7ec4e813ba6
663 $ hg -R main push other -r e7ec4e813ba6
663 pushing to other
664 pushing to other
664 searching for changes
665 searching for changes
665 remote: adding changesets
666 remote: adding changesets
666 remote: adding manifests
667 remote: adding manifests
667 remote: adding file changes
668 remote: adding file changes
668 remote: added 1 changesets with 1 changes to 1 files
669 remote: added 1 changesets with 1 changes to 1 files
669 remote: Fail early!
670 remote: Fail early!
670 remote: transaction abort!
671 remote: transaction abort!
671 remote: Cleaning up the mess...
672 remote: Cleaning up the mess...
672 remote: rollback completed
673 remote: rollback completed
673 abort: pretxnchangegroup hook exited with status 1
674 abort: pretxnchangegroup hook exited with status 1
674 [255]
675 [255]
675 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
676 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
676 pushing to ssh://user@dummy/other
677 pushing to ssh://user@dummy/other
677 searching for changes
678 searching for changes
678 remote: adding changesets
679 remote: adding changesets
679 remote: adding manifests
680 remote: adding manifests
680 remote: adding file changes
681 remote: adding file changes
681 remote: added 1 changesets with 1 changes to 1 files
682 remote: added 1 changesets with 1 changes to 1 files
682 remote: Fail early!
683 remote: Fail early!
683 remote: transaction abort!
684 remote: transaction abort!
684 remote: Cleaning up the mess...
685 remote: Cleaning up the mess...
685 remote: rollback completed
686 remote: rollback completed
686 abort: pretxnchangegroup hook exited with status 1
687 abort: pretxnchangegroup hook exited with status 1
687 [255]
688 [255]
688 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
689 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
689 pushing to http://localhost:$HGPORT2/
690 pushing to http://localhost:$HGPORT2/
690 searching for changes
691 searching for changes
691 remote: adding changesets
692 remote: adding changesets
692 remote: adding manifests
693 remote: adding manifests
693 remote: adding file changes
694 remote: adding file changes
694 remote: added 1 changesets with 1 changes to 1 files
695 remote: added 1 changesets with 1 changes to 1 files
695 remote: Fail early!
696 remote: Fail early!
696 remote: transaction abort!
697 remote: transaction abort!
697 remote: Cleaning up the mess...
698 remote: Cleaning up the mess...
698 remote: rollback completed
699 remote: rollback completed
699 abort: pretxnchangegroup hook exited with status 1
700 abort: pretxnchangegroup hook exited with status 1
700 [255]
701 [255]
701
702
702 Check output capture control.
703 Check output capture control.
703
704
704 (should be still forced for http, disabled for local and ssh)
705 (should be still forced for http, disabled for local and ssh)
705
706
706 $ cat >> $HGRCPATH << EOF
707 $ cat >> $HGRCPATH << EOF
707 > [experimental]
708 > [experimental]
708 > bundle2-output-capture=False
709 > bundle2-output-capture=False
709 > EOF
710 > EOF
710
711
711 $ hg -R main push other -r e7ec4e813ba6
712 $ hg -R main push other -r e7ec4e813ba6
712 pushing to other
713 pushing to other
713 searching for changes
714 searching for changes
714 adding changesets
715 adding changesets
715 adding manifests
716 adding manifests
716 adding file changes
717 adding file changes
717 added 1 changesets with 1 changes to 1 files
718 added 1 changesets with 1 changes to 1 files
718 Fail early!
719 Fail early!
719 transaction abort!
720 transaction abort!
720 Cleaning up the mess...
721 Cleaning up the mess...
721 rollback completed
722 rollback completed
722 abort: pretxnchangegroup hook exited with status 1
723 abort: pretxnchangegroup hook exited with status 1
723 [255]
724 [255]
724 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
725 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
725 pushing to ssh://user@dummy/other
726 pushing to ssh://user@dummy/other
726 searching for changes
727 searching for changes
727 remote: adding changesets
728 remote: adding changesets
728 remote: adding manifests
729 remote: adding manifests
729 remote: adding file changes
730 remote: adding file changes
730 remote: added 1 changesets with 1 changes to 1 files
731 remote: added 1 changesets with 1 changes to 1 files
731 remote: Fail early!
732 remote: Fail early!
732 remote: transaction abort!
733 remote: transaction abort!
733 remote: Cleaning up the mess...
734 remote: Cleaning up the mess...
734 remote: rollback completed
735 remote: rollback completed
735 abort: pretxnchangegroup hook exited with status 1
736 abort: pretxnchangegroup hook exited with status 1
736 [255]
737 [255]
737 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
738 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
738 pushing to http://localhost:$HGPORT2/
739 pushing to http://localhost:$HGPORT2/
739 searching for changes
740 searching for changes
740 remote: adding changesets
741 remote: adding changesets
741 remote: adding manifests
742 remote: adding manifests
742 remote: adding file changes
743 remote: adding file changes
743 remote: added 1 changesets with 1 changes to 1 files
744 remote: added 1 changesets with 1 changes to 1 files
744 remote: Fail early!
745 remote: Fail early!
745 remote: transaction abort!
746 remote: transaction abort!
746 remote: Cleaning up the mess...
747 remote: Cleaning up the mess...
747 remote: rollback completed
748 remote: rollback completed
748 abort: pretxnchangegroup hook exited with status 1
749 abort: pretxnchangegroup hook exited with status 1
749 [255]
750 [255]
750
751
751 Check abort from mandatory pushkey
752 Check abort from mandatory pushkey
752
753
753 $ cat > mandatorypart.py << EOF
754 $ cat > mandatorypart.py << EOF
754 > from mercurial import exchange
755 > from mercurial import exchange
755 > from mercurial import pushkey
756 > from mercurial import pushkey
756 > from mercurial import node
757 > from mercurial import node
757 > from mercurial import error
758 > from mercurial import error
758 > @exchange.b2partsgenerator('failingpuskey')
759 > @exchange.b2partsgenerator('failingpuskey')
759 > def addfailingpushey(pushop, bundler):
760 > def addfailingpushey(pushop, bundler):
760 > enc = pushkey.encode
761 > enc = pushkey.encode
761 > part = bundler.newpart('pushkey')
762 > part = bundler.newpart('pushkey')
762 > part.addparam('namespace', enc('phases'))
763 > part.addparam('namespace', enc('phases'))
763 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
764 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
764 > part.addparam('old', enc(str(0))) # successful update
765 > part.addparam('old', enc(str(0))) # successful update
765 > part.addparam('new', enc(str(0)))
766 > part.addparam('new', enc(str(0)))
766 > def fail(pushop, exc):
767 > def fail(pushop, exc):
767 > raise error.Abort('Correct phase push failed (because hooks)')
768 > raise error.Abort('Correct phase push failed (because hooks)')
768 > pushop.pkfailcb[part.id] = fail
769 > pushop.pkfailcb[part.id] = fail
769 > EOF
770 > EOF
770 $ cat >> $HGRCPATH << EOF
771 $ cat >> $HGRCPATH << EOF
771 > [hooks]
772 > [hooks]
772 > pretxnchangegroup=
773 > pretxnchangegroup=
773 > pretxnclose.failpush=
774 > pretxnclose.failpush=
774 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
775 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
775 > [extensions]
776 > [extensions]
776 > mandatorypart=$TESTTMP/mandatorypart.py
777 > mandatorypart=$TESTTMP/mandatorypart.py
777 > EOF
778 > EOF
778 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
779 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
779 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
780 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
780 $ cat other.pid >> $DAEMON_PIDS
781 $ cat other.pid >> $DAEMON_PIDS
781
782
782 (Failure from a hook)
783 (Failure from a hook)
783
784
784 $ hg -R main push other -r e7ec4e813ba6
785 $ hg -R main push other -r e7ec4e813ba6
785 pushing to other
786 pushing to other
786 searching for changes
787 searching for changes
787 adding changesets
788 adding changesets
788 adding manifests
789 adding manifests
789 adding file changes
790 adding file changes
790 added 1 changesets with 1 changes to 1 files
791 added 1 changesets with 1 changes to 1 files
791 do not push the key !
792 do not push the key !
792 pushkey-abort: prepushkey.failpush hook exited with status 1
793 pushkey-abort: prepushkey.failpush hook exited with status 1
793 transaction abort!
794 transaction abort!
794 Cleaning up the mess...
795 Cleaning up the mess...
795 rollback completed
796 rollback completed
796 abort: Correct phase push failed (because hooks)
797 abort: Correct phase push failed (because hooks)
797 [255]
798 [255]
798 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
799 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
799 pushing to ssh://user@dummy/other
800 pushing to ssh://user@dummy/other
800 searching for changes
801 searching for changes
801 remote: adding changesets
802 remote: adding changesets
802 remote: adding manifests
803 remote: adding manifests
803 remote: adding file changes
804 remote: adding file changes
804 remote: added 1 changesets with 1 changes to 1 files
805 remote: added 1 changesets with 1 changes to 1 files
805 remote: do not push the key !
806 remote: do not push the key !
806 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
807 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
807 remote: transaction abort!
808 remote: transaction abort!
808 remote: Cleaning up the mess...
809 remote: Cleaning up the mess...
809 remote: rollback completed
810 remote: rollback completed
810 abort: Correct phase push failed (because hooks)
811 abort: Correct phase push failed (because hooks)
811 [255]
812 [255]
812 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
813 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
813 pushing to http://localhost:$HGPORT2/
814 pushing to http://localhost:$HGPORT2/
814 searching for changes
815 searching for changes
815 remote: adding changesets
816 remote: adding changesets
816 remote: adding manifests
817 remote: adding manifests
817 remote: adding file changes
818 remote: adding file changes
818 remote: added 1 changesets with 1 changes to 1 files
819 remote: added 1 changesets with 1 changes to 1 files
819 remote: do not push the key !
820 remote: do not push the key !
820 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
821 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
821 remote: transaction abort!
822 remote: transaction abort!
822 remote: Cleaning up the mess...
823 remote: Cleaning up the mess...
823 remote: rollback completed
824 remote: rollback completed
824 abort: Correct phase push failed (because hooks)
825 abort: Correct phase push failed (because hooks)
825 [255]
826 [255]
826
827
827 (Failure from a the pushkey)
828 (Failure from a the pushkey)
828
829
829 $ cat > mandatorypart.py << EOF
830 $ cat > mandatorypart.py << EOF
830 > from mercurial import exchange
831 > from mercurial import exchange
831 > from mercurial import pushkey
832 > from mercurial import pushkey
832 > from mercurial import node
833 > from mercurial import node
833 > from mercurial import error
834 > from mercurial import error
834 > @exchange.b2partsgenerator('failingpuskey')
835 > @exchange.b2partsgenerator('failingpuskey')
835 > def addfailingpushey(pushop, bundler):
836 > def addfailingpushey(pushop, bundler):
836 > enc = pushkey.encode
837 > enc = pushkey.encode
837 > part = bundler.newpart('pushkey')
838 > part = bundler.newpart('pushkey')
838 > part.addparam('namespace', enc('phases'))
839 > part.addparam('namespace', enc('phases'))
839 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
840 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
840 > part.addparam('old', enc(str(4))) # will fail
841 > part.addparam('old', enc(str(4))) # will fail
841 > part.addparam('new', enc(str(3)))
842 > part.addparam('new', enc(str(3)))
842 > def fail(pushop, exc):
843 > def fail(pushop, exc):
843 > raise error.Abort('Clown phase push failed')
844 > raise error.Abort('Clown phase push failed')
844 > pushop.pkfailcb[part.id] = fail
845 > pushop.pkfailcb[part.id] = fail
845 > EOF
846 > EOF
846 $ cat >> $HGRCPATH << EOF
847 $ cat >> $HGRCPATH << EOF
847 > [hooks]
848 > [hooks]
848 > prepushkey.failpush =
849 > prepushkey.failpush =
849 > EOF
850 > EOF
850 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
851 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
851 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
852 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
852 $ cat other.pid >> $DAEMON_PIDS
853 $ cat other.pid >> $DAEMON_PIDS
853
854
854 $ hg -R main push other -r e7ec4e813ba6
855 $ hg -R main push other -r e7ec4e813ba6
855 pushing to other
856 pushing to other
856 searching for changes
857 searching for changes
857 adding changesets
858 adding changesets
858 adding manifests
859 adding manifests
859 adding file changes
860 adding file changes
860 added 1 changesets with 1 changes to 1 files
861 added 1 changesets with 1 changes to 1 files
861 transaction abort!
862 transaction abort!
862 Cleaning up the mess...
863 Cleaning up the mess...
863 rollback completed
864 rollback completed
864 pushkey: lock state after "phases"
865 pushkey: lock state after "phases"
865 lock: free
866 lock: free
866 wlock: free
867 wlock: free
867 abort: Clown phase push failed
868 abort: Clown phase push failed
868 [255]
869 [255]
869 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
870 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
870 pushing to ssh://user@dummy/other
871 pushing to ssh://user@dummy/other
871 searching for changes
872 searching for changes
872 remote: adding changesets
873 remote: adding changesets
873 remote: adding manifests
874 remote: adding manifests
874 remote: adding file changes
875 remote: adding file changes
875 remote: added 1 changesets with 1 changes to 1 files
876 remote: added 1 changesets with 1 changes to 1 files
876 remote: transaction abort!
877 remote: transaction abort!
877 remote: Cleaning up the mess...
878 remote: Cleaning up the mess...
878 remote: rollback completed
879 remote: rollback completed
879 remote: pushkey: lock state after "phases"
880 remote: pushkey: lock state after "phases"
880 remote: lock: free
881 remote: lock: free
881 remote: wlock: free
882 remote: wlock: free
882 abort: Clown phase push failed
883 abort: Clown phase push failed
883 [255]
884 [255]
884 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
885 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
885 pushing to http://localhost:$HGPORT2/
886 pushing to http://localhost:$HGPORT2/
886 searching for changes
887 searching for changes
887 remote: adding changesets
888 remote: adding changesets
888 remote: adding manifests
889 remote: adding manifests
889 remote: adding file changes
890 remote: adding file changes
890 remote: added 1 changesets with 1 changes to 1 files
891 remote: added 1 changesets with 1 changes to 1 files
891 remote: transaction abort!
892 remote: transaction abort!
892 remote: Cleaning up the mess...
893 remote: Cleaning up the mess...
893 remote: rollback completed
894 remote: rollback completed
894 remote: pushkey: lock state after "phases"
895 remote: pushkey: lock state after "phases"
895 remote: lock: free
896 remote: lock: free
896 remote: wlock: free
897 remote: wlock: free
897 abort: Clown phase push failed
898 abort: Clown phase push failed
898 [255]
899 [255]
899
900
901 Test lazily acquiring the lock during unbundle
902 $ cp $TESTTMP/hgrc.orig $HGRCPATH
903 $ cat >> $HGRCPATH <<EOF
904 > [ui]
905 > ssh=python "$TESTDIR/dummyssh"
906 > EOF
907
908 $ cat >> $TESTTMP/locktester.py <<EOF
909 > import os
910 > from mercurial import extensions, bundle2, util
911 > def checklock(orig, repo, *args, **kwargs):
912 > if repo.svfs.lexists("lock"):
913 > raise util.Abort("Lock should not be taken")
914 > return orig(repo, *args, **kwargs)
915 > def extsetup(ui):
916 > extensions.wrapfunction(bundle2, 'processbundle', checklock)
917 > EOF
918
919 $ hg init lazylock
920 $ cat >> lazylock/.hg/hgrc <<EOF
921 > [extensions]
922 > locktester=$TESTTMP/locktester.py
923 > EOF
924
925 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
926 $ cd lazylockclient
927 $ touch a && hg ci -Aqm a
928 $ hg push
929 pushing to ssh://user@dummy/lazylock
930 searching for changes
931 abort: Lock should not be taken
932 [255]
933
934 $ cat >> ../lazylock/.hg/hgrc <<EOF
935 > [experimental]
936 > bundle2lazylocking=True
937 > EOF
938 $ hg push
939 pushing to ssh://user@dummy/lazylock
940 searching for changes
941 remote: adding changesets
942 remote: adding manifests
943 remote: adding file changes
944 remote: added 1 changesets with 1 changes to 1 files
General Comments 0
You need to be logged in to leave comments. Login now