##// END OF EJS Templates
exchange: catch down to BaseException when handling bundle2...
Pierre-Yves David -
r25182:ee665d3b default
parent child Browse files
Show More
@@ -1,1334 +1,1334 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13 import lock as lockmod
13 import lock as lockmod
14
14
15 def readbundle(ui, fh, fname, vfs=None):
15 def readbundle(ui, fh, fname, vfs=None):
16 header = changegroup.readexactly(fh, 4)
16 header = changegroup.readexactly(fh, 4)
17
17
18 alg = None
18 alg = None
19 if not fname:
19 if not fname:
20 fname = "stream"
20 fname = "stream"
21 if not header.startswith('HG') and header.startswith('\0'):
21 if not header.startswith('HG') and header.startswith('\0'):
22 fh = changegroup.headerlessfixup(fh, header)
22 fh = changegroup.headerlessfixup(fh, header)
23 header = "HG10"
23 header = "HG10"
24 alg = 'UN'
24 alg = 'UN'
25 elif vfs:
25 elif vfs:
26 fname = vfs.join(fname)
26 fname = vfs.join(fname)
27
27
28 magic, version = header[0:2], header[2:4]
28 magic, version = header[0:2], header[2:4]
29
29
30 if magic != 'HG':
30 if magic != 'HG':
31 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
32 if version == '10':
32 if version == '10':
33 if alg is None:
33 if alg is None:
34 alg = changegroup.readexactly(fh, 2)
34 alg = changegroup.readexactly(fh, 2)
35 return changegroup.cg1unpacker(fh, alg)
35 return changegroup.cg1unpacker(fh, alg)
36 elif version.startswith('2'):
36 elif version.startswith('2'):
37 return bundle2.getunbundler(ui, fh, header=magic + version)
37 return bundle2.getunbundler(ui, fh, header=magic + version)
38 else:
38 else:
39 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
40
40
41 def buildobsmarkerspart(bundler, markers):
41 def buildobsmarkerspart(bundler, markers):
42 """add an obsmarker part to the bundler with <markers>
42 """add an obsmarker part to the bundler with <markers>
43
43
44 No part is created if markers is empty.
44 No part is created if markers is empty.
45 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 Raises ValueError if the bundler doesn't support any known obsmarker format.
46 """
46 """
47 if markers:
47 if markers:
48 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
49 version = obsolete.commonversion(remoteversions)
49 version = obsolete.commonversion(remoteversions)
50 if version is None:
50 if version is None:
51 raise ValueError('bundler do not support common obsmarker format')
51 raise ValueError('bundler do not support common obsmarker format')
52 stream = obsolete.encodemarkers(markers, True, version=version)
52 stream = obsolete.encodemarkers(markers, True, version=version)
53 return bundler.newpart('obsmarkers', data=stream)
53 return bundler.newpart('obsmarkers', data=stream)
54 return None
54 return None
55
55
56 def _canusebundle2(op):
56 def _canusebundle2(op):
57 """return true if a pull/push can use bundle2
57 """return true if a pull/push can use bundle2
58
58
59 Feel free to nuke this function when we drop the experimental option"""
59 Feel free to nuke this function when we drop the experimental option"""
60 return (op.repo.ui.configbool('experimental', 'bundle2-exp', False)
60 return (op.repo.ui.configbool('experimental', 'bundle2-exp', False)
61 and op.remote.capable('bundle2'))
61 and op.remote.capable('bundle2'))
62
62
63
63
64 class pushoperation(object):
64 class pushoperation(object):
65 """A object that represent a single push operation
65 """A object that represent a single push operation
66
66
67 It purpose is to carry push related state and very common operation.
67 It purpose is to carry push related state and very common operation.
68
68
69 A new should be created at the beginning of each push and discarded
69 A new should be created at the beginning of each push and discarded
70 afterward.
70 afterward.
71 """
71 """
72
72
73 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
73 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
74 bookmarks=()):
74 bookmarks=()):
75 # repo we push from
75 # repo we push from
76 self.repo = repo
76 self.repo = repo
77 self.ui = repo.ui
77 self.ui = repo.ui
78 # repo we push to
78 # repo we push to
79 self.remote = remote
79 self.remote = remote
80 # force option provided
80 # force option provided
81 self.force = force
81 self.force = force
82 # revs to be pushed (None is "all")
82 # revs to be pushed (None is "all")
83 self.revs = revs
83 self.revs = revs
84 # bookmark explicitly pushed
84 # bookmark explicitly pushed
85 self.bookmarks = bookmarks
85 self.bookmarks = bookmarks
86 # allow push of new branch
86 # allow push of new branch
87 self.newbranch = newbranch
87 self.newbranch = newbranch
88 # did a local lock get acquired?
88 # did a local lock get acquired?
89 self.locallocked = None
89 self.locallocked = None
90 # step already performed
90 # step already performed
91 # (used to check what steps have been already performed through bundle2)
91 # (used to check what steps have been already performed through bundle2)
92 self.stepsdone = set()
92 self.stepsdone = set()
93 # Integer version of the changegroup push result
93 # Integer version of the changegroup push result
94 # - None means nothing to push
94 # - None means nothing to push
95 # - 0 means HTTP error
95 # - 0 means HTTP error
96 # - 1 means we pushed and remote head count is unchanged *or*
96 # - 1 means we pushed and remote head count is unchanged *or*
97 # we have outgoing changesets but refused to push
97 # we have outgoing changesets but refused to push
98 # - other values as described by addchangegroup()
98 # - other values as described by addchangegroup()
99 self.cgresult = None
99 self.cgresult = None
100 # Boolean value for the bookmark push
100 # Boolean value for the bookmark push
101 self.bkresult = None
101 self.bkresult = None
102 # discover.outgoing object (contains common and outgoing data)
102 # discover.outgoing object (contains common and outgoing data)
103 self.outgoing = None
103 self.outgoing = None
104 # all remote heads before the push
104 # all remote heads before the push
105 self.remoteheads = None
105 self.remoteheads = None
106 # testable as a boolean indicating if any nodes are missing locally.
106 # testable as a boolean indicating if any nodes are missing locally.
107 self.incoming = None
107 self.incoming = None
108 # phases changes that must be pushed along side the changesets
108 # phases changes that must be pushed along side the changesets
109 self.outdatedphases = None
109 self.outdatedphases = None
110 # phases changes that must be pushed if changeset push fails
110 # phases changes that must be pushed if changeset push fails
111 self.fallbackoutdatedphases = None
111 self.fallbackoutdatedphases = None
112 # outgoing obsmarkers
112 # outgoing obsmarkers
113 self.outobsmarkers = set()
113 self.outobsmarkers = set()
114 # outgoing bookmarks
114 # outgoing bookmarks
115 self.outbookmarks = []
115 self.outbookmarks = []
116 # transaction manager
116 # transaction manager
117 self.trmanager = None
117 self.trmanager = None
118
118
119 @util.propertycache
119 @util.propertycache
120 def futureheads(self):
120 def futureheads(self):
121 """future remote heads if the changeset push succeeds"""
121 """future remote heads if the changeset push succeeds"""
122 return self.outgoing.missingheads
122 return self.outgoing.missingheads
123
123
124 @util.propertycache
124 @util.propertycache
125 def fallbackheads(self):
125 def fallbackheads(self):
126 """future remote heads if the changeset push fails"""
126 """future remote heads if the changeset push fails"""
127 if self.revs is None:
127 if self.revs is None:
128 # not target to push, all common are relevant
128 # not target to push, all common are relevant
129 return self.outgoing.commonheads
129 return self.outgoing.commonheads
130 unfi = self.repo.unfiltered()
130 unfi = self.repo.unfiltered()
131 # I want cheads = heads(::missingheads and ::commonheads)
131 # I want cheads = heads(::missingheads and ::commonheads)
132 # (missingheads is revs with secret changeset filtered out)
132 # (missingheads is revs with secret changeset filtered out)
133 #
133 #
134 # This can be expressed as:
134 # This can be expressed as:
135 # cheads = ( (missingheads and ::commonheads)
135 # cheads = ( (missingheads and ::commonheads)
136 # + (commonheads and ::missingheads))"
136 # + (commonheads and ::missingheads))"
137 # )
137 # )
138 #
138 #
139 # while trying to push we already computed the following:
139 # while trying to push we already computed the following:
140 # common = (::commonheads)
140 # common = (::commonheads)
141 # missing = ((commonheads::missingheads) - commonheads)
141 # missing = ((commonheads::missingheads) - commonheads)
142 #
142 #
143 # We can pick:
143 # We can pick:
144 # * missingheads part of common (::commonheads)
144 # * missingheads part of common (::commonheads)
145 common = set(self.outgoing.common)
145 common = set(self.outgoing.common)
146 nm = self.repo.changelog.nodemap
146 nm = self.repo.changelog.nodemap
147 cheads = [node for node in self.revs if nm[node] in common]
147 cheads = [node for node in self.revs if nm[node] in common]
148 # and
148 # and
149 # * commonheads parents on missing
149 # * commonheads parents on missing
150 revset = unfi.set('%ln and parents(roots(%ln))',
150 revset = unfi.set('%ln and parents(roots(%ln))',
151 self.outgoing.commonheads,
151 self.outgoing.commonheads,
152 self.outgoing.missing)
152 self.outgoing.missing)
153 cheads.extend(c.node() for c in revset)
153 cheads.extend(c.node() for c in revset)
154 return cheads
154 return cheads
155
155
156 @property
156 @property
157 def commonheads(self):
157 def commonheads(self):
158 """set of all common heads after changeset bundle push"""
158 """set of all common heads after changeset bundle push"""
159 if self.cgresult:
159 if self.cgresult:
160 return self.futureheads
160 return self.futureheads
161 else:
161 else:
162 return self.fallbackheads
162 return self.fallbackheads
163
163
164 # mapping of message used when pushing bookmark
164 # mapping of message used when pushing bookmark
165 bookmsgmap = {'update': (_("updating bookmark %s\n"),
165 bookmsgmap = {'update': (_("updating bookmark %s\n"),
166 _('updating bookmark %s failed!\n')),
166 _('updating bookmark %s failed!\n')),
167 'export': (_("exporting bookmark %s\n"),
167 'export': (_("exporting bookmark %s\n"),
168 _('exporting bookmark %s failed!\n')),
168 _('exporting bookmark %s failed!\n')),
169 'delete': (_("deleting remote bookmark %s\n"),
169 'delete': (_("deleting remote bookmark %s\n"),
170 _('deleting remote bookmark %s failed!\n')),
170 _('deleting remote bookmark %s failed!\n')),
171 }
171 }
172
172
173
173
174 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
174 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
175 '''Push outgoing changesets (limited by revs) from a local
175 '''Push outgoing changesets (limited by revs) from a local
176 repository to remote. Return an integer:
176 repository to remote. Return an integer:
177 - None means nothing to push
177 - None means nothing to push
178 - 0 means HTTP error
178 - 0 means HTTP error
179 - 1 means we pushed and remote head count is unchanged *or*
179 - 1 means we pushed and remote head count is unchanged *or*
180 we have outgoing changesets but refused to push
180 we have outgoing changesets but refused to push
181 - other values as described by addchangegroup()
181 - other values as described by addchangegroup()
182 '''
182 '''
183 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
183 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
184 if pushop.remote.local():
184 if pushop.remote.local():
185 missing = (set(pushop.repo.requirements)
185 missing = (set(pushop.repo.requirements)
186 - pushop.remote.local().supported)
186 - pushop.remote.local().supported)
187 if missing:
187 if missing:
188 msg = _("required features are not"
188 msg = _("required features are not"
189 " supported in the destination:"
189 " supported in the destination:"
190 " %s") % (', '.join(sorted(missing)))
190 " %s") % (', '.join(sorted(missing)))
191 raise util.Abort(msg)
191 raise util.Abort(msg)
192
192
193 # there are two ways to push to remote repo:
193 # there are two ways to push to remote repo:
194 #
194 #
195 # addchangegroup assumes local user can lock remote
195 # addchangegroup assumes local user can lock remote
196 # repo (local filesystem, old ssh servers).
196 # repo (local filesystem, old ssh servers).
197 #
197 #
198 # unbundle assumes local user cannot lock remote repo (new ssh
198 # unbundle assumes local user cannot lock remote repo (new ssh
199 # servers, http servers).
199 # servers, http servers).
200
200
201 if not pushop.remote.canpush():
201 if not pushop.remote.canpush():
202 raise util.Abort(_("destination does not support push"))
202 raise util.Abort(_("destination does not support push"))
203 # get local lock as we might write phase data
203 # get local lock as we might write phase data
204 localwlock = locallock = None
204 localwlock = locallock = None
205 try:
205 try:
206 # bundle2 push may receive a reply bundle touching bookmarks or other
206 # bundle2 push may receive a reply bundle touching bookmarks or other
207 # things requiring the wlock. Take it now to ensure proper ordering.
207 # things requiring the wlock. Take it now to ensure proper ordering.
208 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
208 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
209 if _canusebundle2(pushop) and maypushback:
209 if _canusebundle2(pushop) and maypushback:
210 localwlock = pushop.repo.wlock()
210 localwlock = pushop.repo.wlock()
211 locallock = pushop.repo.lock()
211 locallock = pushop.repo.lock()
212 pushop.locallocked = True
212 pushop.locallocked = True
213 except IOError, err:
213 except IOError, err:
214 pushop.locallocked = False
214 pushop.locallocked = False
215 if err.errno != errno.EACCES:
215 if err.errno != errno.EACCES:
216 raise
216 raise
217 # source repo cannot be locked.
217 # source repo cannot be locked.
218 # We do not abort the push, but just disable the local phase
218 # We do not abort the push, but just disable the local phase
219 # synchronisation.
219 # synchronisation.
220 msg = 'cannot lock source repository: %s\n' % err
220 msg = 'cannot lock source repository: %s\n' % err
221 pushop.ui.debug(msg)
221 pushop.ui.debug(msg)
222 try:
222 try:
223 if pushop.locallocked:
223 if pushop.locallocked:
224 pushop.trmanager = transactionmanager(repo,
224 pushop.trmanager = transactionmanager(repo,
225 'push-response',
225 'push-response',
226 pushop.remote.url())
226 pushop.remote.url())
227 pushop.repo.checkpush(pushop)
227 pushop.repo.checkpush(pushop)
228 lock = None
228 lock = None
229 unbundle = pushop.remote.capable('unbundle')
229 unbundle = pushop.remote.capable('unbundle')
230 if not unbundle:
230 if not unbundle:
231 lock = pushop.remote.lock()
231 lock = pushop.remote.lock()
232 try:
232 try:
233 _pushdiscovery(pushop)
233 _pushdiscovery(pushop)
234 if _canusebundle2(pushop):
234 if _canusebundle2(pushop):
235 _pushbundle2(pushop)
235 _pushbundle2(pushop)
236 _pushchangeset(pushop)
236 _pushchangeset(pushop)
237 _pushsyncphase(pushop)
237 _pushsyncphase(pushop)
238 _pushobsolete(pushop)
238 _pushobsolete(pushop)
239 _pushbookmark(pushop)
239 _pushbookmark(pushop)
240 finally:
240 finally:
241 if lock is not None:
241 if lock is not None:
242 lock.release()
242 lock.release()
243 if pushop.trmanager:
243 if pushop.trmanager:
244 pushop.trmanager.close()
244 pushop.trmanager.close()
245 finally:
245 finally:
246 if pushop.trmanager:
246 if pushop.trmanager:
247 pushop.trmanager.release()
247 pushop.trmanager.release()
248 if locallock is not None:
248 if locallock is not None:
249 locallock.release()
249 locallock.release()
250 if localwlock is not None:
250 if localwlock is not None:
251 localwlock.release()
251 localwlock.release()
252
252
253 return pushop
253 return pushop
254
254
255 # list of steps to perform discovery before push
255 # list of steps to perform discovery before push
256 pushdiscoveryorder = []
256 pushdiscoveryorder = []
257
257
258 # Mapping between step name and function
258 # Mapping between step name and function
259 #
259 #
260 # This exists to help extensions wrap steps if necessary
260 # This exists to help extensions wrap steps if necessary
261 pushdiscoverymapping = {}
261 pushdiscoverymapping = {}
262
262
263 def pushdiscovery(stepname):
263 def pushdiscovery(stepname):
264 """decorator for function performing discovery before push
264 """decorator for function performing discovery before push
265
265
266 The function is added to the step -> function mapping and appended to the
266 The function is added to the step -> function mapping and appended to the
267 list of steps. Beware that decorated function will be added in order (this
267 list of steps. Beware that decorated function will be added in order (this
268 may matter).
268 may matter).
269
269
270 You can only use this decorator for a new step, if you want to wrap a step
270 You can only use this decorator for a new step, if you want to wrap a step
271 from an extension, change the pushdiscovery dictionary directly."""
271 from an extension, change the pushdiscovery dictionary directly."""
272 def dec(func):
272 def dec(func):
273 assert stepname not in pushdiscoverymapping
273 assert stepname not in pushdiscoverymapping
274 pushdiscoverymapping[stepname] = func
274 pushdiscoverymapping[stepname] = func
275 pushdiscoveryorder.append(stepname)
275 pushdiscoveryorder.append(stepname)
276 return func
276 return func
277 return dec
277 return dec
278
278
279 def _pushdiscovery(pushop):
279 def _pushdiscovery(pushop):
280 """Run all discovery steps"""
280 """Run all discovery steps"""
281 for stepname in pushdiscoveryorder:
281 for stepname in pushdiscoveryorder:
282 step = pushdiscoverymapping[stepname]
282 step = pushdiscoverymapping[stepname]
283 step(pushop)
283 step(pushop)
284
284
285 @pushdiscovery('changeset')
285 @pushdiscovery('changeset')
286 def _pushdiscoverychangeset(pushop):
286 def _pushdiscoverychangeset(pushop):
287 """discover the changeset that need to be pushed"""
287 """discover the changeset that need to be pushed"""
288 fci = discovery.findcommonincoming
288 fci = discovery.findcommonincoming
289 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
289 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
290 common, inc, remoteheads = commoninc
290 common, inc, remoteheads = commoninc
291 fco = discovery.findcommonoutgoing
291 fco = discovery.findcommonoutgoing
292 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
292 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
293 commoninc=commoninc, force=pushop.force)
293 commoninc=commoninc, force=pushop.force)
294 pushop.outgoing = outgoing
294 pushop.outgoing = outgoing
295 pushop.remoteheads = remoteheads
295 pushop.remoteheads = remoteheads
296 pushop.incoming = inc
296 pushop.incoming = inc
297
297
298 @pushdiscovery('phase')
298 @pushdiscovery('phase')
299 def _pushdiscoveryphase(pushop):
299 def _pushdiscoveryphase(pushop):
300 """discover the phase that needs to be pushed
300 """discover the phase that needs to be pushed
301
301
302 (computed for both success and failure case for changesets push)"""
302 (computed for both success and failure case for changesets push)"""
303 outgoing = pushop.outgoing
303 outgoing = pushop.outgoing
304 unfi = pushop.repo.unfiltered()
304 unfi = pushop.repo.unfiltered()
305 remotephases = pushop.remote.listkeys('phases')
305 remotephases = pushop.remote.listkeys('phases')
306 publishing = remotephases.get('publishing', False)
306 publishing = remotephases.get('publishing', False)
307 ana = phases.analyzeremotephases(pushop.repo,
307 ana = phases.analyzeremotephases(pushop.repo,
308 pushop.fallbackheads,
308 pushop.fallbackheads,
309 remotephases)
309 remotephases)
310 pheads, droots = ana
310 pheads, droots = ana
311 extracond = ''
311 extracond = ''
312 if not publishing:
312 if not publishing:
313 extracond = ' and public()'
313 extracond = ' and public()'
314 revset = 'heads((%%ln::%%ln) %s)' % extracond
314 revset = 'heads((%%ln::%%ln) %s)' % extracond
315 # Get the list of all revs draft on remote by public here.
315 # Get the list of all revs draft on remote by public here.
316 # XXX Beware that revset break if droots is not strictly
316 # XXX Beware that revset break if droots is not strictly
317 # XXX root we may want to ensure it is but it is costly
317 # XXX root we may want to ensure it is but it is costly
318 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
318 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
319 if not outgoing.missing:
319 if not outgoing.missing:
320 future = fallback
320 future = fallback
321 else:
321 else:
322 # adds changeset we are going to push as draft
322 # adds changeset we are going to push as draft
323 #
323 #
324 # should not be necessary for publishing server, but because of an
324 # should not be necessary for publishing server, but because of an
325 # issue fixed in xxxxx we have to do it anyway.
325 # issue fixed in xxxxx we have to do it anyway.
326 fdroots = list(unfi.set('roots(%ln + %ln::)',
326 fdroots = list(unfi.set('roots(%ln + %ln::)',
327 outgoing.missing, droots))
327 outgoing.missing, droots))
328 fdroots = [f.node() for f in fdroots]
328 fdroots = [f.node() for f in fdroots]
329 future = list(unfi.set(revset, fdroots, pushop.futureheads))
329 future = list(unfi.set(revset, fdroots, pushop.futureheads))
330 pushop.outdatedphases = future
330 pushop.outdatedphases = future
331 pushop.fallbackoutdatedphases = fallback
331 pushop.fallbackoutdatedphases = fallback
332
332
333 @pushdiscovery('obsmarker')
333 @pushdiscovery('obsmarker')
334 def _pushdiscoveryobsmarkers(pushop):
334 def _pushdiscoveryobsmarkers(pushop):
335 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
335 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
336 and pushop.repo.obsstore
336 and pushop.repo.obsstore
337 and 'obsolete' in pushop.remote.listkeys('namespaces')):
337 and 'obsolete' in pushop.remote.listkeys('namespaces')):
338 repo = pushop.repo
338 repo = pushop.repo
339 # very naive computation, that can be quite expensive on big repo.
339 # very naive computation, that can be quite expensive on big repo.
340 # However: evolution is currently slow on them anyway.
340 # However: evolution is currently slow on them anyway.
341 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
341 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
342 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
342 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
343
343
344 @pushdiscovery('bookmarks')
344 @pushdiscovery('bookmarks')
345 def _pushdiscoverybookmarks(pushop):
345 def _pushdiscoverybookmarks(pushop):
346 ui = pushop.ui
346 ui = pushop.ui
347 repo = pushop.repo.unfiltered()
347 repo = pushop.repo.unfiltered()
348 remote = pushop.remote
348 remote = pushop.remote
349 ui.debug("checking for updated bookmarks\n")
349 ui.debug("checking for updated bookmarks\n")
350 ancestors = ()
350 ancestors = ()
351 if pushop.revs:
351 if pushop.revs:
352 revnums = map(repo.changelog.rev, pushop.revs)
352 revnums = map(repo.changelog.rev, pushop.revs)
353 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
353 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
354 remotebookmark = remote.listkeys('bookmarks')
354 remotebookmark = remote.listkeys('bookmarks')
355
355
356 explicit = set(pushop.bookmarks)
356 explicit = set(pushop.bookmarks)
357
357
358 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
358 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
359 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
359 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
360 for b, scid, dcid in advsrc:
360 for b, scid, dcid in advsrc:
361 if b in explicit:
361 if b in explicit:
362 explicit.remove(b)
362 explicit.remove(b)
363 if not ancestors or repo[scid].rev() in ancestors:
363 if not ancestors or repo[scid].rev() in ancestors:
364 pushop.outbookmarks.append((b, dcid, scid))
364 pushop.outbookmarks.append((b, dcid, scid))
365 # search added bookmark
365 # search added bookmark
366 for b, scid, dcid in addsrc:
366 for b, scid, dcid in addsrc:
367 if b in explicit:
367 if b in explicit:
368 explicit.remove(b)
368 explicit.remove(b)
369 pushop.outbookmarks.append((b, '', scid))
369 pushop.outbookmarks.append((b, '', scid))
370 # search for overwritten bookmark
370 # search for overwritten bookmark
371 for b, scid, dcid in advdst + diverge + differ:
371 for b, scid, dcid in advdst + diverge + differ:
372 if b in explicit:
372 if b in explicit:
373 explicit.remove(b)
373 explicit.remove(b)
374 pushop.outbookmarks.append((b, dcid, scid))
374 pushop.outbookmarks.append((b, dcid, scid))
375 # search for bookmark to delete
375 # search for bookmark to delete
376 for b, scid, dcid in adddst:
376 for b, scid, dcid in adddst:
377 if b in explicit:
377 if b in explicit:
378 explicit.remove(b)
378 explicit.remove(b)
379 # treat as "deleted locally"
379 # treat as "deleted locally"
380 pushop.outbookmarks.append((b, dcid, ''))
380 pushop.outbookmarks.append((b, dcid, ''))
381 # identical bookmarks shouldn't get reported
381 # identical bookmarks shouldn't get reported
382 for b, scid, dcid in same:
382 for b, scid, dcid in same:
383 if b in explicit:
383 if b in explicit:
384 explicit.remove(b)
384 explicit.remove(b)
385
385
386 if explicit:
386 if explicit:
387 explicit = sorted(explicit)
387 explicit = sorted(explicit)
388 # we should probably list all of them
388 # we should probably list all of them
389 ui.warn(_('bookmark %s does not exist on the local '
389 ui.warn(_('bookmark %s does not exist on the local '
390 'or remote repository!\n') % explicit[0])
390 'or remote repository!\n') % explicit[0])
391 pushop.bkresult = 2
391 pushop.bkresult = 2
392
392
393 pushop.outbookmarks.sort()
393 pushop.outbookmarks.sort()
394
394
395 def _pushcheckoutgoing(pushop):
395 def _pushcheckoutgoing(pushop):
396 outgoing = pushop.outgoing
396 outgoing = pushop.outgoing
397 unfi = pushop.repo.unfiltered()
397 unfi = pushop.repo.unfiltered()
398 if not outgoing.missing:
398 if not outgoing.missing:
399 # nothing to push
399 # nothing to push
400 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
400 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
401 return False
401 return False
402 # something to push
402 # something to push
403 if not pushop.force:
403 if not pushop.force:
404 # if repo.obsstore == False --> no obsolete
404 # if repo.obsstore == False --> no obsolete
405 # then, save the iteration
405 # then, save the iteration
406 if unfi.obsstore:
406 if unfi.obsstore:
407 # this message are here for 80 char limit reason
407 # this message are here for 80 char limit reason
408 mso = _("push includes obsolete changeset: %s!")
408 mso = _("push includes obsolete changeset: %s!")
409 mst = {"unstable": _("push includes unstable changeset: %s!"),
409 mst = {"unstable": _("push includes unstable changeset: %s!"),
410 "bumped": _("push includes bumped changeset: %s!"),
410 "bumped": _("push includes bumped changeset: %s!"),
411 "divergent": _("push includes divergent changeset: %s!")}
411 "divergent": _("push includes divergent changeset: %s!")}
412 # If we are to push if there is at least one
412 # If we are to push if there is at least one
413 # obsolete or unstable changeset in missing, at
413 # obsolete or unstable changeset in missing, at
414 # least one of the missinghead will be obsolete or
414 # least one of the missinghead will be obsolete or
415 # unstable. So checking heads only is ok
415 # unstable. So checking heads only is ok
416 for node in outgoing.missingheads:
416 for node in outgoing.missingheads:
417 ctx = unfi[node]
417 ctx = unfi[node]
418 if ctx.obsolete():
418 if ctx.obsolete():
419 raise util.Abort(mso % ctx)
419 raise util.Abort(mso % ctx)
420 elif ctx.troubled():
420 elif ctx.troubled():
421 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
421 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
422 newbm = pushop.ui.configlist('bookmarks', 'pushing')
422 newbm = pushop.ui.configlist('bookmarks', 'pushing')
423 discovery.checkheads(unfi, pushop.remote, outgoing,
423 discovery.checkheads(unfi, pushop.remote, outgoing,
424 pushop.remoteheads,
424 pushop.remoteheads,
425 pushop.newbranch,
425 pushop.newbranch,
426 bool(pushop.incoming),
426 bool(pushop.incoming),
427 newbm)
427 newbm)
428 return True
428 return True
429
429
430 # List of names of steps to perform for an outgoing bundle2, order matters.
430 # List of names of steps to perform for an outgoing bundle2, order matters.
431 b2partsgenorder = []
431 b2partsgenorder = []
432
432
433 # Mapping between step name and function
433 # Mapping between step name and function
434 #
434 #
435 # This exists to help extensions wrap steps if necessary
435 # This exists to help extensions wrap steps if necessary
436 b2partsgenmapping = {}
436 b2partsgenmapping = {}
437
437
438 def b2partsgenerator(stepname, idx=None):
438 def b2partsgenerator(stepname, idx=None):
439 """decorator for function generating bundle2 part
439 """decorator for function generating bundle2 part
440
440
441 The function is added to the step -> function mapping and appended to the
441 The function is added to the step -> function mapping and appended to the
442 list of steps. Beware that decorated functions will be added in order
442 list of steps. Beware that decorated functions will be added in order
443 (this may matter).
443 (this may matter).
444
444
445 You can only use this decorator for new steps, if you want to wrap a step
445 You can only use this decorator for new steps, if you want to wrap a step
446 from an extension, attack the b2partsgenmapping dictionary directly."""
446 from an extension, attack the b2partsgenmapping dictionary directly."""
447 def dec(func):
447 def dec(func):
448 assert stepname not in b2partsgenmapping
448 assert stepname not in b2partsgenmapping
449 b2partsgenmapping[stepname] = func
449 b2partsgenmapping[stepname] = func
450 if idx is None:
450 if idx is None:
451 b2partsgenorder.append(stepname)
451 b2partsgenorder.append(stepname)
452 else:
452 else:
453 b2partsgenorder.insert(idx, stepname)
453 b2partsgenorder.insert(idx, stepname)
454 return func
454 return func
455 return dec
455 return dec
456
456
457 @b2partsgenerator('changeset')
457 @b2partsgenerator('changeset')
458 def _pushb2ctx(pushop, bundler):
458 def _pushb2ctx(pushop, bundler):
459 """handle changegroup push through bundle2
459 """handle changegroup push through bundle2
460
460
461 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
461 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
462 """
462 """
463 if 'changesets' in pushop.stepsdone:
463 if 'changesets' in pushop.stepsdone:
464 return
464 return
465 pushop.stepsdone.add('changesets')
465 pushop.stepsdone.add('changesets')
466 # Send known heads to the server for race detection.
466 # Send known heads to the server for race detection.
467 if not _pushcheckoutgoing(pushop):
467 if not _pushcheckoutgoing(pushop):
468 return
468 return
469 pushop.repo.prepushoutgoinghooks(pushop.repo,
469 pushop.repo.prepushoutgoinghooks(pushop.repo,
470 pushop.remote,
470 pushop.remote,
471 pushop.outgoing)
471 pushop.outgoing)
472 if not pushop.force:
472 if not pushop.force:
473 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
473 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
474 b2caps = bundle2.bundle2caps(pushop.remote)
474 b2caps = bundle2.bundle2caps(pushop.remote)
475 version = None
475 version = None
476 cgversions = b2caps.get('changegroup')
476 cgversions = b2caps.get('changegroup')
477 if not cgversions: # 3.1 and 3.2 ship with an empty value
477 if not cgversions: # 3.1 and 3.2 ship with an empty value
478 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
478 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
479 pushop.outgoing)
479 pushop.outgoing)
480 else:
480 else:
481 cgversions = [v for v in cgversions if v in changegroup.packermap]
481 cgversions = [v for v in cgversions if v in changegroup.packermap]
482 if not cgversions:
482 if not cgversions:
483 raise ValueError(_('no common changegroup version'))
483 raise ValueError(_('no common changegroup version'))
484 version = max(cgversions)
484 version = max(cgversions)
485 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
485 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
486 pushop.outgoing,
486 pushop.outgoing,
487 version=version)
487 version=version)
488 cgpart = bundler.newpart('changegroup', data=cg)
488 cgpart = bundler.newpart('changegroup', data=cg)
489 if version is not None:
489 if version is not None:
490 cgpart.addparam('version', version)
490 cgpart.addparam('version', version)
491 def handlereply(op):
491 def handlereply(op):
492 """extract addchangegroup returns from server reply"""
492 """extract addchangegroup returns from server reply"""
493 cgreplies = op.records.getreplies(cgpart.id)
493 cgreplies = op.records.getreplies(cgpart.id)
494 assert len(cgreplies['changegroup']) == 1
494 assert len(cgreplies['changegroup']) == 1
495 pushop.cgresult = cgreplies['changegroup'][0]['return']
495 pushop.cgresult = cgreplies['changegroup'][0]['return']
496 return handlereply
496 return handlereply
497
497
498 @b2partsgenerator('phase')
498 @b2partsgenerator('phase')
499 def _pushb2phases(pushop, bundler):
499 def _pushb2phases(pushop, bundler):
500 """handle phase push through bundle2"""
500 """handle phase push through bundle2"""
501 if 'phases' in pushop.stepsdone:
501 if 'phases' in pushop.stepsdone:
502 return
502 return
503 b2caps = bundle2.bundle2caps(pushop.remote)
503 b2caps = bundle2.bundle2caps(pushop.remote)
504 if not 'pushkey' in b2caps:
504 if not 'pushkey' in b2caps:
505 return
505 return
506 pushop.stepsdone.add('phases')
506 pushop.stepsdone.add('phases')
507 part2node = []
507 part2node = []
508 enc = pushkey.encode
508 enc = pushkey.encode
509 for newremotehead in pushop.outdatedphases:
509 for newremotehead in pushop.outdatedphases:
510 part = bundler.newpart('pushkey')
510 part = bundler.newpart('pushkey')
511 part.addparam('namespace', enc('phases'))
511 part.addparam('namespace', enc('phases'))
512 part.addparam('key', enc(newremotehead.hex()))
512 part.addparam('key', enc(newremotehead.hex()))
513 part.addparam('old', enc(str(phases.draft)))
513 part.addparam('old', enc(str(phases.draft)))
514 part.addparam('new', enc(str(phases.public)))
514 part.addparam('new', enc(str(phases.public)))
515 part2node.append((part.id, newremotehead))
515 part2node.append((part.id, newremotehead))
516 def handlereply(op):
516 def handlereply(op):
517 for partid, node in part2node:
517 for partid, node in part2node:
518 partrep = op.records.getreplies(partid)
518 partrep = op.records.getreplies(partid)
519 results = partrep['pushkey']
519 results = partrep['pushkey']
520 assert len(results) <= 1
520 assert len(results) <= 1
521 msg = None
521 msg = None
522 if not results:
522 if not results:
523 msg = _('server ignored update of %s to public!\n') % node
523 msg = _('server ignored update of %s to public!\n') % node
524 elif not int(results[0]['return']):
524 elif not int(results[0]['return']):
525 msg = _('updating %s to public failed!\n') % node
525 msg = _('updating %s to public failed!\n') % node
526 if msg is not None:
526 if msg is not None:
527 pushop.ui.warn(msg)
527 pushop.ui.warn(msg)
528 return handlereply
528 return handlereply
529
529
530 @b2partsgenerator('obsmarkers')
530 @b2partsgenerator('obsmarkers')
531 def _pushb2obsmarkers(pushop, bundler):
531 def _pushb2obsmarkers(pushop, bundler):
532 if 'obsmarkers' in pushop.stepsdone:
532 if 'obsmarkers' in pushop.stepsdone:
533 return
533 return
534 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
534 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
535 if obsolete.commonversion(remoteversions) is None:
535 if obsolete.commonversion(remoteversions) is None:
536 return
536 return
537 pushop.stepsdone.add('obsmarkers')
537 pushop.stepsdone.add('obsmarkers')
538 if pushop.outobsmarkers:
538 if pushop.outobsmarkers:
539 markers = sorted(pushop.outobsmarkers)
539 markers = sorted(pushop.outobsmarkers)
540 buildobsmarkerspart(bundler, markers)
540 buildobsmarkerspart(bundler, markers)
541
541
542 @b2partsgenerator('bookmarks')
542 @b2partsgenerator('bookmarks')
543 def _pushb2bookmarks(pushop, bundler):
543 def _pushb2bookmarks(pushop, bundler):
544 """handle phase push through bundle2"""
544 """handle phase push through bundle2"""
545 if 'bookmarks' in pushop.stepsdone:
545 if 'bookmarks' in pushop.stepsdone:
546 return
546 return
547 b2caps = bundle2.bundle2caps(pushop.remote)
547 b2caps = bundle2.bundle2caps(pushop.remote)
548 if 'pushkey' not in b2caps:
548 if 'pushkey' not in b2caps:
549 return
549 return
550 pushop.stepsdone.add('bookmarks')
550 pushop.stepsdone.add('bookmarks')
551 part2book = []
551 part2book = []
552 enc = pushkey.encode
552 enc = pushkey.encode
553 for book, old, new in pushop.outbookmarks:
553 for book, old, new in pushop.outbookmarks:
554 part = bundler.newpart('pushkey')
554 part = bundler.newpart('pushkey')
555 part.addparam('namespace', enc('bookmarks'))
555 part.addparam('namespace', enc('bookmarks'))
556 part.addparam('key', enc(book))
556 part.addparam('key', enc(book))
557 part.addparam('old', enc(old))
557 part.addparam('old', enc(old))
558 part.addparam('new', enc(new))
558 part.addparam('new', enc(new))
559 action = 'update'
559 action = 'update'
560 if not old:
560 if not old:
561 action = 'export'
561 action = 'export'
562 elif not new:
562 elif not new:
563 action = 'delete'
563 action = 'delete'
564 part2book.append((part.id, book, action))
564 part2book.append((part.id, book, action))
565
565
566
566
567 def handlereply(op):
567 def handlereply(op):
568 ui = pushop.ui
568 ui = pushop.ui
569 for partid, book, action in part2book:
569 for partid, book, action in part2book:
570 partrep = op.records.getreplies(partid)
570 partrep = op.records.getreplies(partid)
571 results = partrep['pushkey']
571 results = partrep['pushkey']
572 assert len(results) <= 1
572 assert len(results) <= 1
573 if not results:
573 if not results:
574 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
574 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
575 else:
575 else:
576 ret = int(results[0]['return'])
576 ret = int(results[0]['return'])
577 if ret:
577 if ret:
578 ui.status(bookmsgmap[action][0] % book)
578 ui.status(bookmsgmap[action][0] % book)
579 else:
579 else:
580 ui.warn(bookmsgmap[action][1] % book)
580 ui.warn(bookmsgmap[action][1] % book)
581 if pushop.bkresult is not None:
581 if pushop.bkresult is not None:
582 pushop.bkresult = 1
582 pushop.bkresult = 1
583 return handlereply
583 return handlereply
584
584
585
585
586 def _pushbundle2(pushop):
586 def _pushbundle2(pushop):
587 """push data to the remote using bundle2
587 """push data to the remote using bundle2
588
588
589 The only currently supported type of data is changegroup but this will
589 The only currently supported type of data is changegroup but this will
590 evolve in the future."""
590 evolve in the future."""
591 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
591 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
592 pushback = (pushop.trmanager
592 pushback = (pushop.trmanager
593 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
593 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
594
594
595 # create reply capability
595 # create reply capability
596 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
596 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
597 allowpushback=pushback))
597 allowpushback=pushback))
598 bundler.newpart('replycaps', data=capsblob)
598 bundler.newpart('replycaps', data=capsblob)
599 replyhandlers = []
599 replyhandlers = []
600 for partgenname in b2partsgenorder:
600 for partgenname in b2partsgenorder:
601 partgen = b2partsgenmapping[partgenname]
601 partgen = b2partsgenmapping[partgenname]
602 ret = partgen(pushop, bundler)
602 ret = partgen(pushop, bundler)
603 if callable(ret):
603 if callable(ret):
604 replyhandlers.append(ret)
604 replyhandlers.append(ret)
605 # do not push if nothing to push
605 # do not push if nothing to push
606 if bundler.nbparts <= 1:
606 if bundler.nbparts <= 1:
607 return
607 return
608 stream = util.chunkbuffer(bundler.getchunks())
608 stream = util.chunkbuffer(bundler.getchunks())
609 try:
609 try:
610 reply = pushop.remote.unbundle(stream, ['force'], 'push')
610 reply = pushop.remote.unbundle(stream, ['force'], 'push')
611 except error.BundleValueError, exc:
611 except error.BundleValueError, exc:
612 raise util.Abort('missing support for %s' % exc)
612 raise util.Abort('missing support for %s' % exc)
613 try:
613 try:
614 trgetter = None
614 trgetter = None
615 if pushback:
615 if pushback:
616 trgetter = pushop.trmanager.transaction
616 trgetter = pushop.trmanager.transaction
617 op = bundle2.processbundle(pushop.repo, reply, trgetter)
617 op = bundle2.processbundle(pushop.repo, reply, trgetter)
618 except error.BundleValueError, exc:
618 except error.BundleValueError, exc:
619 raise util.Abort('missing support for %s' % exc)
619 raise util.Abort('missing support for %s' % exc)
620 for rephand in replyhandlers:
620 for rephand in replyhandlers:
621 rephand(op)
621 rephand(op)
622
622
623 def _pushchangeset(pushop):
623 def _pushchangeset(pushop):
624 """Make the actual push of changeset bundle to remote repo"""
624 """Make the actual push of changeset bundle to remote repo"""
625 if 'changesets' in pushop.stepsdone:
625 if 'changesets' in pushop.stepsdone:
626 return
626 return
627 pushop.stepsdone.add('changesets')
627 pushop.stepsdone.add('changesets')
628 if not _pushcheckoutgoing(pushop):
628 if not _pushcheckoutgoing(pushop):
629 return
629 return
630 pushop.repo.prepushoutgoinghooks(pushop.repo,
630 pushop.repo.prepushoutgoinghooks(pushop.repo,
631 pushop.remote,
631 pushop.remote,
632 pushop.outgoing)
632 pushop.outgoing)
633 outgoing = pushop.outgoing
633 outgoing = pushop.outgoing
634 unbundle = pushop.remote.capable('unbundle')
634 unbundle = pushop.remote.capable('unbundle')
635 # TODO: get bundlecaps from remote
635 # TODO: get bundlecaps from remote
636 bundlecaps = None
636 bundlecaps = None
637 # create a changegroup from local
637 # create a changegroup from local
638 if pushop.revs is None and not (outgoing.excluded
638 if pushop.revs is None and not (outgoing.excluded
639 or pushop.repo.changelog.filteredrevs):
639 or pushop.repo.changelog.filteredrevs):
640 # push everything,
640 # push everything,
641 # use the fast path, no race possible on push
641 # use the fast path, no race possible on push
642 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
642 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
643 cg = changegroup.getsubset(pushop.repo,
643 cg = changegroup.getsubset(pushop.repo,
644 outgoing,
644 outgoing,
645 bundler,
645 bundler,
646 'push',
646 'push',
647 fastpath=True)
647 fastpath=True)
648 else:
648 else:
649 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
649 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
650 bundlecaps)
650 bundlecaps)
651
651
652 # apply changegroup to remote
652 # apply changegroup to remote
653 if unbundle:
653 if unbundle:
654 # local repo finds heads on server, finds out what
654 # local repo finds heads on server, finds out what
655 # revs it must push. once revs transferred, if server
655 # revs it must push. once revs transferred, if server
656 # finds it has different heads (someone else won
656 # finds it has different heads (someone else won
657 # commit/push race), server aborts.
657 # commit/push race), server aborts.
658 if pushop.force:
658 if pushop.force:
659 remoteheads = ['force']
659 remoteheads = ['force']
660 else:
660 else:
661 remoteheads = pushop.remoteheads
661 remoteheads = pushop.remoteheads
662 # ssh: return remote's addchangegroup()
662 # ssh: return remote's addchangegroup()
663 # http: return remote's addchangegroup() or 0 for error
663 # http: return remote's addchangegroup() or 0 for error
664 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
664 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
665 pushop.repo.url())
665 pushop.repo.url())
666 else:
666 else:
667 # we return an integer indicating remote head count
667 # we return an integer indicating remote head count
668 # change
668 # change
669 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
669 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
670 pushop.repo.url())
670 pushop.repo.url())
671
671
672 def _pushsyncphase(pushop):
672 def _pushsyncphase(pushop):
673 """synchronise phase information locally and remotely"""
673 """synchronise phase information locally and remotely"""
674 cheads = pushop.commonheads
674 cheads = pushop.commonheads
675 # even when we don't push, exchanging phase data is useful
675 # even when we don't push, exchanging phase data is useful
676 remotephases = pushop.remote.listkeys('phases')
676 remotephases = pushop.remote.listkeys('phases')
677 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
677 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
678 and remotephases # server supports phases
678 and remotephases # server supports phases
679 and pushop.cgresult is None # nothing was pushed
679 and pushop.cgresult is None # nothing was pushed
680 and remotephases.get('publishing', False)):
680 and remotephases.get('publishing', False)):
681 # When:
681 # When:
682 # - this is a subrepo push
682 # - this is a subrepo push
683 # - and remote support phase
683 # - and remote support phase
684 # - and no changeset was pushed
684 # - and no changeset was pushed
685 # - and remote is publishing
685 # - and remote is publishing
686 # We may be in issue 3871 case!
686 # We may be in issue 3871 case!
687 # We drop the possible phase synchronisation done by
687 # We drop the possible phase synchronisation done by
688 # courtesy to publish changesets possibly locally draft
688 # courtesy to publish changesets possibly locally draft
689 # on the remote.
689 # on the remote.
690 remotephases = {'publishing': 'True'}
690 remotephases = {'publishing': 'True'}
691 if not remotephases: # old server or public only reply from non-publishing
691 if not remotephases: # old server or public only reply from non-publishing
692 _localphasemove(pushop, cheads)
692 _localphasemove(pushop, cheads)
693 # don't push any phase data as there is nothing to push
693 # don't push any phase data as there is nothing to push
694 else:
694 else:
695 ana = phases.analyzeremotephases(pushop.repo, cheads,
695 ana = phases.analyzeremotephases(pushop.repo, cheads,
696 remotephases)
696 remotephases)
697 pheads, droots = ana
697 pheads, droots = ana
698 ### Apply remote phase on local
698 ### Apply remote phase on local
699 if remotephases.get('publishing', False):
699 if remotephases.get('publishing', False):
700 _localphasemove(pushop, cheads)
700 _localphasemove(pushop, cheads)
701 else: # publish = False
701 else: # publish = False
702 _localphasemove(pushop, pheads)
702 _localphasemove(pushop, pheads)
703 _localphasemove(pushop, cheads, phases.draft)
703 _localphasemove(pushop, cheads, phases.draft)
704 ### Apply local phase on remote
704 ### Apply local phase on remote
705
705
706 if pushop.cgresult:
706 if pushop.cgresult:
707 if 'phases' in pushop.stepsdone:
707 if 'phases' in pushop.stepsdone:
708 # phases already pushed though bundle2
708 # phases already pushed though bundle2
709 return
709 return
710 outdated = pushop.outdatedphases
710 outdated = pushop.outdatedphases
711 else:
711 else:
712 outdated = pushop.fallbackoutdatedphases
712 outdated = pushop.fallbackoutdatedphases
713
713
714 pushop.stepsdone.add('phases')
714 pushop.stepsdone.add('phases')
715
715
716 # filter heads already turned public by the push
716 # filter heads already turned public by the push
717 outdated = [c for c in outdated if c.node() not in pheads]
717 outdated = [c for c in outdated if c.node() not in pheads]
718 # fallback to independent pushkey command
718 # fallback to independent pushkey command
719 for newremotehead in outdated:
719 for newremotehead in outdated:
720 r = pushop.remote.pushkey('phases',
720 r = pushop.remote.pushkey('phases',
721 newremotehead.hex(),
721 newremotehead.hex(),
722 str(phases.draft),
722 str(phases.draft),
723 str(phases.public))
723 str(phases.public))
724 if not r:
724 if not r:
725 pushop.ui.warn(_('updating %s to public failed!\n')
725 pushop.ui.warn(_('updating %s to public failed!\n')
726 % newremotehead)
726 % newremotehead)
727
727
728 def _localphasemove(pushop, nodes, phase=phases.public):
728 def _localphasemove(pushop, nodes, phase=phases.public):
729 """move <nodes> to <phase> in the local source repo"""
729 """move <nodes> to <phase> in the local source repo"""
730 if pushop.trmanager:
730 if pushop.trmanager:
731 phases.advanceboundary(pushop.repo,
731 phases.advanceboundary(pushop.repo,
732 pushop.trmanager.transaction(),
732 pushop.trmanager.transaction(),
733 phase,
733 phase,
734 nodes)
734 nodes)
735 else:
735 else:
736 # repo is not locked, do not change any phases!
736 # repo is not locked, do not change any phases!
737 # Informs the user that phases should have been moved when
737 # Informs the user that phases should have been moved when
738 # applicable.
738 # applicable.
739 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
739 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
740 phasestr = phases.phasenames[phase]
740 phasestr = phases.phasenames[phase]
741 if actualmoves:
741 if actualmoves:
742 pushop.ui.status(_('cannot lock source repo, skipping '
742 pushop.ui.status(_('cannot lock source repo, skipping '
743 'local %s phase update\n') % phasestr)
743 'local %s phase update\n') % phasestr)
744
744
745 def _pushobsolete(pushop):
745 def _pushobsolete(pushop):
746 """utility function to push obsolete markers to a remote"""
746 """utility function to push obsolete markers to a remote"""
747 if 'obsmarkers' in pushop.stepsdone:
747 if 'obsmarkers' in pushop.stepsdone:
748 return
748 return
749 pushop.ui.debug('try to push obsolete markers to remote\n')
749 pushop.ui.debug('try to push obsolete markers to remote\n')
750 repo = pushop.repo
750 repo = pushop.repo
751 remote = pushop.remote
751 remote = pushop.remote
752 pushop.stepsdone.add('obsmarkers')
752 pushop.stepsdone.add('obsmarkers')
753 if pushop.outobsmarkers:
753 if pushop.outobsmarkers:
754 rslts = []
754 rslts = []
755 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
755 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
756 for key in sorted(remotedata, reverse=True):
756 for key in sorted(remotedata, reverse=True):
757 # reverse sort to ensure we end with dump0
757 # reverse sort to ensure we end with dump0
758 data = remotedata[key]
758 data = remotedata[key]
759 rslts.append(remote.pushkey('obsolete', key, '', data))
759 rslts.append(remote.pushkey('obsolete', key, '', data))
760 if [r for r in rslts if not r]:
760 if [r for r in rslts if not r]:
761 msg = _('failed to push some obsolete markers!\n')
761 msg = _('failed to push some obsolete markers!\n')
762 repo.ui.warn(msg)
762 repo.ui.warn(msg)
763
763
764 def _pushbookmark(pushop):
764 def _pushbookmark(pushop):
765 """Update bookmark position on remote"""
765 """Update bookmark position on remote"""
766 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
766 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
767 return
767 return
768 pushop.stepsdone.add('bookmarks')
768 pushop.stepsdone.add('bookmarks')
769 ui = pushop.ui
769 ui = pushop.ui
770 remote = pushop.remote
770 remote = pushop.remote
771
771
772 for b, old, new in pushop.outbookmarks:
772 for b, old, new in pushop.outbookmarks:
773 action = 'update'
773 action = 'update'
774 if not old:
774 if not old:
775 action = 'export'
775 action = 'export'
776 elif not new:
776 elif not new:
777 action = 'delete'
777 action = 'delete'
778 if remote.pushkey('bookmarks', b, old, new):
778 if remote.pushkey('bookmarks', b, old, new):
779 ui.status(bookmsgmap[action][0] % b)
779 ui.status(bookmsgmap[action][0] % b)
780 else:
780 else:
781 ui.warn(bookmsgmap[action][1] % b)
781 ui.warn(bookmsgmap[action][1] % b)
782 # discovery can have set the value form invalid entry
782 # discovery can have set the value form invalid entry
783 if pushop.bkresult is not None:
783 if pushop.bkresult is not None:
784 pushop.bkresult = 1
784 pushop.bkresult = 1
785
785
786 class pulloperation(object):
786 class pulloperation(object):
787 """A object that represent a single pull operation
787 """A object that represent a single pull operation
788
788
789 It purpose is to carry pull related state and very common operation.
789 It purpose is to carry pull related state and very common operation.
790
790
791 A new should be created at the beginning of each pull and discarded
791 A new should be created at the beginning of each pull and discarded
792 afterward.
792 afterward.
793 """
793 """
794
794
795 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
795 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
796 # repo we pull into
796 # repo we pull into
797 self.repo = repo
797 self.repo = repo
798 # repo we pull from
798 # repo we pull from
799 self.remote = remote
799 self.remote = remote
800 # revision we try to pull (None is "all")
800 # revision we try to pull (None is "all")
801 self.heads = heads
801 self.heads = heads
802 # bookmark pulled explicitly
802 # bookmark pulled explicitly
803 self.explicitbookmarks = bookmarks
803 self.explicitbookmarks = bookmarks
804 # do we force pull?
804 # do we force pull?
805 self.force = force
805 self.force = force
806 # transaction manager
806 # transaction manager
807 self.trmanager = None
807 self.trmanager = None
808 # set of common changeset between local and remote before pull
808 # set of common changeset between local and remote before pull
809 self.common = None
809 self.common = None
810 # set of pulled head
810 # set of pulled head
811 self.rheads = None
811 self.rheads = None
812 # list of missing changeset to fetch remotely
812 # list of missing changeset to fetch remotely
813 self.fetch = None
813 self.fetch = None
814 # remote bookmarks data
814 # remote bookmarks data
815 self.remotebookmarks = None
815 self.remotebookmarks = None
816 # result of changegroup pulling (used as return code by pull)
816 # result of changegroup pulling (used as return code by pull)
817 self.cgresult = None
817 self.cgresult = None
818 # list of step already done
818 # list of step already done
819 self.stepsdone = set()
819 self.stepsdone = set()
820
820
821 @util.propertycache
821 @util.propertycache
822 def pulledsubset(self):
822 def pulledsubset(self):
823 """heads of the set of changeset target by the pull"""
823 """heads of the set of changeset target by the pull"""
824 # compute target subset
824 # compute target subset
825 if self.heads is None:
825 if self.heads is None:
826 # We pulled every thing possible
826 # We pulled every thing possible
827 # sync on everything common
827 # sync on everything common
828 c = set(self.common)
828 c = set(self.common)
829 ret = list(self.common)
829 ret = list(self.common)
830 for n in self.rheads:
830 for n in self.rheads:
831 if n not in c:
831 if n not in c:
832 ret.append(n)
832 ret.append(n)
833 return ret
833 return ret
834 else:
834 else:
835 # We pulled a specific subset
835 # We pulled a specific subset
836 # sync on this subset
836 # sync on this subset
837 return self.heads
837 return self.heads
838
838
839 def gettransaction(self):
839 def gettransaction(self):
840 # deprecated; talk to trmanager directly
840 # deprecated; talk to trmanager directly
841 return self.trmanager.transaction()
841 return self.trmanager.transaction()
842
842
843 class transactionmanager(object):
843 class transactionmanager(object):
844 """An object to manage the life cycle of a transaction
844 """An object to manage the life cycle of a transaction
845
845
846 It creates the transaction on demand and calls the appropriate hooks when
846 It creates the transaction on demand and calls the appropriate hooks when
847 closing the transaction."""
847 closing the transaction."""
848 def __init__(self, repo, source, url):
848 def __init__(self, repo, source, url):
849 self.repo = repo
849 self.repo = repo
850 self.source = source
850 self.source = source
851 self.url = url
851 self.url = url
852 self._tr = None
852 self._tr = None
853
853
854 def transaction(self):
854 def transaction(self):
855 """Return an open transaction object, constructing if necessary"""
855 """Return an open transaction object, constructing if necessary"""
856 if not self._tr:
856 if not self._tr:
857 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
857 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
858 self._tr = self.repo.transaction(trname)
858 self._tr = self.repo.transaction(trname)
859 self._tr.hookargs['source'] = self.source
859 self._tr.hookargs['source'] = self.source
860 self._tr.hookargs['url'] = self.url
860 self._tr.hookargs['url'] = self.url
861 return self._tr
861 return self._tr
862
862
863 def close(self):
863 def close(self):
864 """close transaction if created"""
864 """close transaction if created"""
865 if self._tr is not None:
865 if self._tr is not None:
866 self._tr.close()
866 self._tr.close()
867
867
868 def release(self):
868 def release(self):
869 """release transaction if created"""
869 """release transaction if created"""
870 if self._tr is not None:
870 if self._tr is not None:
871 self._tr.release()
871 self._tr.release()
872
872
873 def pull(repo, remote, heads=None, force=False, bookmarks=()):
873 def pull(repo, remote, heads=None, force=False, bookmarks=()):
874 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
874 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
875 if pullop.remote.local():
875 if pullop.remote.local():
876 missing = set(pullop.remote.requirements) - pullop.repo.supported
876 missing = set(pullop.remote.requirements) - pullop.repo.supported
877 if missing:
877 if missing:
878 msg = _("required features are not"
878 msg = _("required features are not"
879 " supported in the destination:"
879 " supported in the destination:"
880 " %s") % (', '.join(sorted(missing)))
880 " %s") % (', '.join(sorted(missing)))
881 raise util.Abort(msg)
881 raise util.Abort(msg)
882
882
883 pullop.remotebookmarks = remote.listkeys('bookmarks')
883 pullop.remotebookmarks = remote.listkeys('bookmarks')
884 lock = pullop.repo.lock()
884 lock = pullop.repo.lock()
885 try:
885 try:
886 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
886 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
887 _pulldiscovery(pullop)
887 _pulldiscovery(pullop)
888 if _canusebundle2(pullop):
888 if _canusebundle2(pullop):
889 _pullbundle2(pullop)
889 _pullbundle2(pullop)
890 _pullchangeset(pullop)
890 _pullchangeset(pullop)
891 _pullphase(pullop)
891 _pullphase(pullop)
892 _pullbookmarks(pullop)
892 _pullbookmarks(pullop)
893 _pullobsolete(pullop)
893 _pullobsolete(pullop)
894 pullop.trmanager.close()
894 pullop.trmanager.close()
895 finally:
895 finally:
896 pullop.trmanager.release()
896 pullop.trmanager.release()
897 lock.release()
897 lock.release()
898
898
899 return pullop
899 return pullop
900
900
901 # list of steps to perform discovery before pull
901 # list of steps to perform discovery before pull
902 pulldiscoveryorder = []
902 pulldiscoveryorder = []
903
903
904 # Mapping between step name and function
904 # Mapping between step name and function
905 #
905 #
906 # This exists to help extensions wrap steps if necessary
906 # This exists to help extensions wrap steps if necessary
907 pulldiscoverymapping = {}
907 pulldiscoverymapping = {}
908
908
909 def pulldiscovery(stepname):
909 def pulldiscovery(stepname):
910 """decorator for function performing discovery before pull
910 """decorator for function performing discovery before pull
911
911
912 The function is added to the step -> function mapping and appended to the
912 The function is added to the step -> function mapping and appended to the
913 list of steps. Beware that decorated function will be added in order (this
913 list of steps. Beware that decorated function will be added in order (this
914 may matter).
914 may matter).
915
915
916 You can only use this decorator for a new step, if you want to wrap a step
916 You can only use this decorator for a new step, if you want to wrap a step
917 from an extension, change the pulldiscovery dictionary directly."""
917 from an extension, change the pulldiscovery dictionary directly."""
918 def dec(func):
918 def dec(func):
919 assert stepname not in pulldiscoverymapping
919 assert stepname not in pulldiscoverymapping
920 pulldiscoverymapping[stepname] = func
920 pulldiscoverymapping[stepname] = func
921 pulldiscoveryorder.append(stepname)
921 pulldiscoveryorder.append(stepname)
922 return func
922 return func
923 return dec
923 return dec
924
924
925 def _pulldiscovery(pullop):
925 def _pulldiscovery(pullop):
926 """Run all discovery steps"""
926 """Run all discovery steps"""
927 for stepname in pulldiscoveryorder:
927 for stepname in pulldiscoveryorder:
928 step = pulldiscoverymapping[stepname]
928 step = pulldiscoverymapping[stepname]
929 step(pullop)
929 step(pullop)
930
930
931 @pulldiscovery('changegroup')
931 @pulldiscovery('changegroup')
932 def _pulldiscoverychangegroup(pullop):
932 def _pulldiscoverychangegroup(pullop):
933 """discovery phase for the pull
933 """discovery phase for the pull
934
934
935 Current handle changeset discovery only, will change handle all discovery
935 Current handle changeset discovery only, will change handle all discovery
936 at some point."""
936 at some point."""
937 tmp = discovery.findcommonincoming(pullop.repo,
937 tmp = discovery.findcommonincoming(pullop.repo,
938 pullop.remote,
938 pullop.remote,
939 heads=pullop.heads,
939 heads=pullop.heads,
940 force=pullop.force)
940 force=pullop.force)
941 common, fetch, rheads = tmp
941 common, fetch, rheads = tmp
942 nm = pullop.repo.unfiltered().changelog.nodemap
942 nm = pullop.repo.unfiltered().changelog.nodemap
943 if fetch and rheads:
943 if fetch and rheads:
944 # If a remote heads in filtered locally, lets drop it from the unknown
944 # If a remote heads in filtered locally, lets drop it from the unknown
945 # remote heads and put in back in common.
945 # remote heads and put in back in common.
946 #
946 #
947 # This is a hackish solution to catch most of "common but locally
947 # This is a hackish solution to catch most of "common but locally
948 # hidden situation". We do not performs discovery on unfiltered
948 # hidden situation". We do not performs discovery on unfiltered
949 # repository because it end up doing a pathological amount of round
949 # repository because it end up doing a pathological amount of round
950 # trip for w huge amount of changeset we do not care about.
950 # trip for w huge amount of changeset we do not care about.
951 #
951 #
952 # If a set of such "common but filtered" changeset exist on the server
952 # If a set of such "common but filtered" changeset exist on the server
953 # but are not including a remote heads, we'll not be able to detect it,
953 # but are not including a remote heads, we'll not be able to detect it,
954 scommon = set(common)
954 scommon = set(common)
955 filteredrheads = []
955 filteredrheads = []
956 for n in rheads:
956 for n in rheads:
957 if n in nm:
957 if n in nm:
958 if n not in scommon:
958 if n not in scommon:
959 common.append(n)
959 common.append(n)
960 else:
960 else:
961 filteredrheads.append(n)
961 filteredrheads.append(n)
962 if not filteredrheads:
962 if not filteredrheads:
963 fetch = []
963 fetch = []
964 rheads = filteredrheads
964 rheads = filteredrheads
965 pullop.common = common
965 pullop.common = common
966 pullop.fetch = fetch
966 pullop.fetch = fetch
967 pullop.rheads = rheads
967 pullop.rheads = rheads
968
968
969 def _pullbundle2(pullop):
969 def _pullbundle2(pullop):
970 """pull data using bundle2
970 """pull data using bundle2
971
971
972 For now, the only supported data are changegroup."""
972 For now, the only supported data are changegroup."""
973 remotecaps = bundle2.bundle2caps(pullop.remote)
973 remotecaps = bundle2.bundle2caps(pullop.remote)
974 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
974 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
975 # pulling changegroup
975 # pulling changegroup
976 pullop.stepsdone.add('changegroup')
976 pullop.stepsdone.add('changegroup')
977
977
978 kwargs['common'] = pullop.common
978 kwargs['common'] = pullop.common
979 kwargs['heads'] = pullop.heads or pullop.rheads
979 kwargs['heads'] = pullop.heads or pullop.rheads
980 kwargs['cg'] = pullop.fetch
980 kwargs['cg'] = pullop.fetch
981 if 'listkeys' in remotecaps:
981 if 'listkeys' in remotecaps:
982 kwargs['listkeys'] = ['phase', 'bookmarks']
982 kwargs['listkeys'] = ['phase', 'bookmarks']
983 if not pullop.fetch:
983 if not pullop.fetch:
984 pullop.repo.ui.status(_("no changes found\n"))
984 pullop.repo.ui.status(_("no changes found\n"))
985 pullop.cgresult = 0
985 pullop.cgresult = 0
986 else:
986 else:
987 if pullop.heads is None and list(pullop.common) == [nullid]:
987 if pullop.heads is None and list(pullop.common) == [nullid]:
988 pullop.repo.ui.status(_("requesting all changes\n"))
988 pullop.repo.ui.status(_("requesting all changes\n"))
989 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
989 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
990 remoteversions = bundle2.obsmarkersversion(remotecaps)
990 remoteversions = bundle2.obsmarkersversion(remotecaps)
991 if obsolete.commonversion(remoteversions) is not None:
991 if obsolete.commonversion(remoteversions) is not None:
992 kwargs['obsmarkers'] = True
992 kwargs['obsmarkers'] = True
993 pullop.stepsdone.add('obsmarkers')
993 pullop.stepsdone.add('obsmarkers')
994 _pullbundle2extraprepare(pullop, kwargs)
994 _pullbundle2extraprepare(pullop, kwargs)
995 bundle = pullop.remote.getbundle('pull', **kwargs)
995 bundle = pullop.remote.getbundle('pull', **kwargs)
996 try:
996 try:
997 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
997 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
998 except error.BundleValueError, exc:
998 except error.BundleValueError, exc:
999 raise util.Abort('missing support for %s' % exc)
999 raise util.Abort('missing support for %s' % exc)
1000
1000
1001 if pullop.fetch:
1001 if pullop.fetch:
1002 results = [cg['return'] for cg in op.records['changegroup']]
1002 results = [cg['return'] for cg in op.records['changegroup']]
1003 pullop.cgresult = changegroup.combineresults(results)
1003 pullop.cgresult = changegroup.combineresults(results)
1004
1004
1005 # processing phases change
1005 # processing phases change
1006 for namespace, value in op.records['listkeys']:
1006 for namespace, value in op.records['listkeys']:
1007 if namespace == 'phases':
1007 if namespace == 'phases':
1008 _pullapplyphases(pullop, value)
1008 _pullapplyphases(pullop, value)
1009
1009
1010 # processing bookmark update
1010 # processing bookmark update
1011 for namespace, value in op.records['listkeys']:
1011 for namespace, value in op.records['listkeys']:
1012 if namespace == 'bookmarks':
1012 if namespace == 'bookmarks':
1013 pullop.remotebookmarks = value
1013 pullop.remotebookmarks = value
1014 _pullbookmarks(pullop)
1014 _pullbookmarks(pullop)
1015
1015
1016 def _pullbundle2extraprepare(pullop, kwargs):
1016 def _pullbundle2extraprepare(pullop, kwargs):
1017 """hook function so that extensions can extend the getbundle call"""
1017 """hook function so that extensions can extend the getbundle call"""
1018 pass
1018 pass
1019
1019
1020 def _pullchangeset(pullop):
1020 def _pullchangeset(pullop):
1021 """pull changeset from unbundle into the local repo"""
1021 """pull changeset from unbundle into the local repo"""
1022 # We delay the open of the transaction as late as possible so we
1022 # We delay the open of the transaction as late as possible so we
1023 # don't open transaction for nothing or you break future useful
1023 # don't open transaction for nothing or you break future useful
1024 # rollback call
1024 # rollback call
1025 if 'changegroup' in pullop.stepsdone:
1025 if 'changegroup' in pullop.stepsdone:
1026 return
1026 return
1027 pullop.stepsdone.add('changegroup')
1027 pullop.stepsdone.add('changegroup')
1028 if not pullop.fetch:
1028 if not pullop.fetch:
1029 pullop.repo.ui.status(_("no changes found\n"))
1029 pullop.repo.ui.status(_("no changes found\n"))
1030 pullop.cgresult = 0
1030 pullop.cgresult = 0
1031 return
1031 return
1032 pullop.gettransaction()
1032 pullop.gettransaction()
1033 if pullop.heads is None and list(pullop.common) == [nullid]:
1033 if pullop.heads is None and list(pullop.common) == [nullid]:
1034 pullop.repo.ui.status(_("requesting all changes\n"))
1034 pullop.repo.ui.status(_("requesting all changes\n"))
1035 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1035 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1036 # issue1320, avoid a race if remote changed after discovery
1036 # issue1320, avoid a race if remote changed after discovery
1037 pullop.heads = pullop.rheads
1037 pullop.heads = pullop.rheads
1038
1038
1039 if pullop.remote.capable('getbundle'):
1039 if pullop.remote.capable('getbundle'):
1040 # TODO: get bundlecaps from remote
1040 # TODO: get bundlecaps from remote
1041 cg = pullop.remote.getbundle('pull', common=pullop.common,
1041 cg = pullop.remote.getbundle('pull', common=pullop.common,
1042 heads=pullop.heads or pullop.rheads)
1042 heads=pullop.heads or pullop.rheads)
1043 elif pullop.heads is None:
1043 elif pullop.heads is None:
1044 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1044 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1045 elif not pullop.remote.capable('changegroupsubset'):
1045 elif not pullop.remote.capable('changegroupsubset'):
1046 raise util.Abort(_("partial pull cannot be done because "
1046 raise util.Abort(_("partial pull cannot be done because "
1047 "other repository doesn't support "
1047 "other repository doesn't support "
1048 "changegroupsubset."))
1048 "changegroupsubset."))
1049 else:
1049 else:
1050 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1050 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1051 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1051 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1052 pullop.remote.url())
1052 pullop.remote.url())
1053
1053
1054 def _pullphase(pullop):
1054 def _pullphase(pullop):
1055 # Get remote phases data from remote
1055 # Get remote phases data from remote
1056 if 'phases' in pullop.stepsdone:
1056 if 'phases' in pullop.stepsdone:
1057 return
1057 return
1058 remotephases = pullop.remote.listkeys('phases')
1058 remotephases = pullop.remote.listkeys('phases')
1059 _pullapplyphases(pullop, remotephases)
1059 _pullapplyphases(pullop, remotephases)
1060
1060
1061 def _pullapplyphases(pullop, remotephases):
1061 def _pullapplyphases(pullop, remotephases):
1062 """apply phase movement from observed remote state"""
1062 """apply phase movement from observed remote state"""
1063 if 'phases' in pullop.stepsdone:
1063 if 'phases' in pullop.stepsdone:
1064 return
1064 return
1065 pullop.stepsdone.add('phases')
1065 pullop.stepsdone.add('phases')
1066 publishing = bool(remotephases.get('publishing', False))
1066 publishing = bool(remotephases.get('publishing', False))
1067 if remotephases and not publishing:
1067 if remotephases and not publishing:
1068 # remote is new and unpublishing
1068 # remote is new and unpublishing
1069 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1069 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1070 pullop.pulledsubset,
1070 pullop.pulledsubset,
1071 remotephases)
1071 remotephases)
1072 dheads = pullop.pulledsubset
1072 dheads = pullop.pulledsubset
1073 else:
1073 else:
1074 # Remote is old or publishing all common changesets
1074 # Remote is old or publishing all common changesets
1075 # should be seen as public
1075 # should be seen as public
1076 pheads = pullop.pulledsubset
1076 pheads = pullop.pulledsubset
1077 dheads = []
1077 dheads = []
1078 unfi = pullop.repo.unfiltered()
1078 unfi = pullop.repo.unfiltered()
1079 phase = unfi._phasecache.phase
1079 phase = unfi._phasecache.phase
1080 rev = unfi.changelog.nodemap.get
1080 rev = unfi.changelog.nodemap.get
1081 public = phases.public
1081 public = phases.public
1082 draft = phases.draft
1082 draft = phases.draft
1083
1083
1084 # exclude changesets already public locally and update the others
1084 # exclude changesets already public locally and update the others
1085 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1085 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1086 if pheads:
1086 if pheads:
1087 tr = pullop.gettransaction()
1087 tr = pullop.gettransaction()
1088 phases.advanceboundary(pullop.repo, tr, public, pheads)
1088 phases.advanceboundary(pullop.repo, tr, public, pheads)
1089
1089
1090 # exclude changesets already draft locally and update the others
1090 # exclude changesets already draft locally and update the others
1091 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1091 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1092 if dheads:
1092 if dheads:
1093 tr = pullop.gettransaction()
1093 tr = pullop.gettransaction()
1094 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1094 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1095
1095
1096 def _pullbookmarks(pullop):
1096 def _pullbookmarks(pullop):
1097 """process the remote bookmark information to update the local one"""
1097 """process the remote bookmark information to update the local one"""
1098 if 'bookmarks' in pullop.stepsdone:
1098 if 'bookmarks' in pullop.stepsdone:
1099 return
1099 return
1100 pullop.stepsdone.add('bookmarks')
1100 pullop.stepsdone.add('bookmarks')
1101 repo = pullop.repo
1101 repo = pullop.repo
1102 remotebookmarks = pullop.remotebookmarks
1102 remotebookmarks = pullop.remotebookmarks
1103 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1103 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1104 pullop.remote.url(),
1104 pullop.remote.url(),
1105 pullop.gettransaction,
1105 pullop.gettransaction,
1106 explicit=pullop.explicitbookmarks)
1106 explicit=pullop.explicitbookmarks)
1107
1107
1108 def _pullobsolete(pullop):
1108 def _pullobsolete(pullop):
1109 """utility function to pull obsolete markers from a remote
1109 """utility function to pull obsolete markers from a remote
1110
1110
1111 The `gettransaction` is function that return the pull transaction, creating
1111 The `gettransaction` is function that return the pull transaction, creating
1112 one if necessary. We return the transaction to inform the calling code that
1112 one if necessary. We return the transaction to inform the calling code that
1113 a new transaction have been created (when applicable).
1113 a new transaction have been created (when applicable).
1114
1114
1115 Exists mostly to allow overriding for experimentation purpose"""
1115 Exists mostly to allow overriding for experimentation purpose"""
1116 if 'obsmarkers' in pullop.stepsdone:
1116 if 'obsmarkers' in pullop.stepsdone:
1117 return
1117 return
1118 pullop.stepsdone.add('obsmarkers')
1118 pullop.stepsdone.add('obsmarkers')
1119 tr = None
1119 tr = None
1120 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1120 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1121 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1121 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1122 remoteobs = pullop.remote.listkeys('obsolete')
1122 remoteobs = pullop.remote.listkeys('obsolete')
1123 if 'dump0' in remoteobs:
1123 if 'dump0' in remoteobs:
1124 tr = pullop.gettransaction()
1124 tr = pullop.gettransaction()
1125 for key in sorted(remoteobs, reverse=True):
1125 for key in sorted(remoteobs, reverse=True):
1126 if key.startswith('dump'):
1126 if key.startswith('dump'):
1127 data = base85.b85decode(remoteobs[key])
1127 data = base85.b85decode(remoteobs[key])
1128 pullop.repo.obsstore.mergemarkers(tr, data)
1128 pullop.repo.obsstore.mergemarkers(tr, data)
1129 pullop.repo.invalidatevolatilesets()
1129 pullop.repo.invalidatevolatilesets()
1130 return tr
1130 return tr
1131
1131
1132 def caps20to10(repo):
1132 def caps20to10(repo):
1133 """return a set with appropriate options to use bundle20 during getbundle"""
1133 """return a set with appropriate options to use bundle20 during getbundle"""
1134 caps = set(['HG20'])
1134 caps = set(['HG20'])
1135 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1135 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1136 caps.add('bundle2=' + urllib.quote(capsblob))
1136 caps.add('bundle2=' + urllib.quote(capsblob))
1137 return caps
1137 return caps
1138
1138
1139 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1139 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1140 getbundle2partsorder = []
1140 getbundle2partsorder = []
1141
1141
1142 # Mapping between step name and function
1142 # Mapping between step name and function
1143 #
1143 #
1144 # This exists to help extensions wrap steps if necessary
1144 # This exists to help extensions wrap steps if necessary
1145 getbundle2partsmapping = {}
1145 getbundle2partsmapping = {}
1146
1146
1147 def getbundle2partsgenerator(stepname, idx=None):
1147 def getbundle2partsgenerator(stepname, idx=None):
1148 """decorator for function generating bundle2 part for getbundle
1148 """decorator for function generating bundle2 part for getbundle
1149
1149
1150 The function is added to the step -> function mapping and appended to the
1150 The function is added to the step -> function mapping and appended to the
1151 list of steps. Beware that decorated functions will be added in order
1151 list of steps. Beware that decorated functions will be added in order
1152 (this may matter).
1152 (this may matter).
1153
1153
1154 You can only use this decorator for new steps, if you want to wrap a step
1154 You can only use this decorator for new steps, if you want to wrap a step
1155 from an extension, attack the getbundle2partsmapping dictionary directly."""
1155 from an extension, attack the getbundle2partsmapping dictionary directly."""
1156 def dec(func):
1156 def dec(func):
1157 assert stepname not in getbundle2partsmapping
1157 assert stepname not in getbundle2partsmapping
1158 getbundle2partsmapping[stepname] = func
1158 getbundle2partsmapping[stepname] = func
1159 if idx is None:
1159 if idx is None:
1160 getbundle2partsorder.append(stepname)
1160 getbundle2partsorder.append(stepname)
1161 else:
1161 else:
1162 getbundle2partsorder.insert(idx, stepname)
1162 getbundle2partsorder.insert(idx, stepname)
1163 return func
1163 return func
1164 return dec
1164 return dec
1165
1165
1166 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1166 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1167 **kwargs):
1167 **kwargs):
1168 """return a full bundle (with potentially multiple kind of parts)
1168 """return a full bundle (with potentially multiple kind of parts)
1169
1169
1170 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1170 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1171 passed. For now, the bundle can contain only changegroup, but this will
1171 passed. For now, the bundle can contain only changegroup, but this will
1172 changes when more part type will be available for bundle2.
1172 changes when more part type will be available for bundle2.
1173
1173
1174 This is different from changegroup.getchangegroup that only returns an HG10
1174 This is different from changegroup.getchangegroup that only returns an HG10
1175 changegroup bundle. They may eventually get reunited in the future when we
1175 changegroup bundle. They may eventually get reunited in the future when we
1176 have a clearer idea of the API we what to query different data.
1176 have a clearer idea of the API we what to query different data.
1177
1177
1178 The implementation is at a very early stage and will get massive rework
1178 The implementation is at a very early stage and will get massive rework
1179 when the API of bundle is refined.
1179 when the API of bundle is refined.
1180 """
1180 """
1181 # bundle10 case
1181 # bundle10 case
1182 usebundle2 = False
1182 usebundle2 = False
1183 if bundlecaps is not None:
1183 if bundlecaps is not None:
1184 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1184 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1185 if not usebundle2:
1185 if not usebundle2:
1186 if bundlecaps and not kwargs.get('cg', True):
1186 if bundlecaps and not kwargs.get('cg', True):
1187 raise ValueError(_('request for bundle10 must include changegroup'))
1187 raise ValueError(_('request for bundle10 must include changegroup'))
1188
1188
1189 if kwargs:
1189 if kwargs:
1190 raise ValueError(_('unsupported getbundle arguments: %s')
1190 raise ValueError(_('unsupported getbundle arguments: %s')
1191 % ', '.join(sorted(kwargs.keys())))
1191 % ', '.join(sorted(kwargs.keys())))
1192 return changegroup.getchangegroup(repo, source, heads=heads,
1192 return changegroup.getchangegroup(repo, source, heads=heads,
1193 common=common, bundlecaps=bundlecaps)
1193 common=common, bundlecaps=bundlecaps)
1194
1194
1195 # bundle20 case
1195 # bundle20 case
1196 b2caps = {}
1196 b2caps = {}
1197 for bcaps in bundlecaps:
1197 for bcaps in bundlecaps:
1198 if bcaps.startswith('bundle2='):
1198 if bcaps.startswith('bundle2='):
1199 blob = urllib.unquote(bcaps[len('bundle2='):])
1199 blob = urllib.unquote(bcaps[len('bundle2='):])
1200 b2caps.update(bundle2.decodecaps(blob))
1200 b2caps.update(bundle2.decodecaps(blob))
1201 bundler = bundle2.bundle20(repo.ui, b2caps)
1201 bundler = bundle2.bundle20(repo.ui, b2caps)
1202
1202
1203 kwargs['heads'] = heads
1203 kwargs['heads'] = heads
1204 kwargs['common'] = common
1204 kwargs['common'] = common
1205
1205
1206 for name in getbundle2partsorder:
1206 for name in getbundle2partsorder:
1207 func = getbundle2partsmapping[name]
1207 func = getbundle2partsmapping[name]
1208 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1208 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1209 **kwargs)
1209 **kwargs)
1210
1210
1211 return util.chunkbuffer(bundler.getchunks())
1211 return util.chunkbuffer(bundler.getchunks())
1212
1212
1213 @getbundle2partsgenerator('changegroup')
1213 @getbundle2partsgenerator('changegroup')
1214 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1214 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1215 b2caps=None, heads=None, common=None, **kwargs):
1215 b2caps=None, heads=None, common=None, **kwargs):
1216 """add a changegroup part to the requested bundle"""
1216 """add a changegroup part to the requested bundle"""
1217 cg = None
1217 cg = None
1218 if kwargs.get('cg', True):
1218 if kwargs.get('cg', True):
1219 # build changegroup bundle here.
1219 # build changegroup bundle here.
1220 version = None
1220 version = None
1221 cgversions = b2caps.get('changegroup')
1221 cgversions = b2caps.get('changegroup')
1222 if not cgversions: # 3.1 and 3.2 ship with an empty value
1222 if not cgversions: # 3.1 and 3.2 ship with an empty value
1223 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1223 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1224 common=common,
1224 common=common,
1225 bundlecaps=bundlecaps)
1225 bundlecaps=bundlecaps)
1226 else:
1226 else:
1227 cgversions = [v for v in cgversions if v in changegroup.packermap]
1227 cgversions = [v for v in cgversions if v in changegroup.packermap]
1228 if not cgversions:
1228 if not cgversions:
1229 raise ValueError(_('no common changegroup version'))
1229 raise ValueError(_('no common changegroup version'))
1230 version = max(cgversions)
1230 version = max(cgversions)
1231 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1231 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1232 common=common,
1232 common=common,
1233 bundlecaps=bundlecaps,
1233 bundlecaps=bundlecaps,
1234 version=version)
1234 version=version)
1235
1235
1236 if cg:
1236 if cg:
1237 part = bundler.newpart('changegroup', data=cg)
1237 part = bundler.newpart('changegroup', data=cg)
1238 if version is not None:
1238 if version is not None:
1239 part.addparam('version', version)
1239 part.addparam('version', version)
1240
1240
1241 @getbundle2partsgenerator('listkeys')
1241 @getbundle2partsgenerator('listkeys')
1242 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1242 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1243 b2caps=None, **kwargs):
1243 b2caps=None, **kwargs):
1244 """add parts containing listkeys namespaces to the requested bundle"""
1244 """add parts containing listkeys namespaces to the requested bundle"""
1245 listkeys = kwargs.get('listkeys', ())
1245 listkeys = kwargs.get('listkeys', ())
1246 for namespace in listkeys:
1246 for namespace in listkeys:
1247 part = bundler.newpart('listkeys')
1247 part = bundler.newpart('listkeys')
1248 part.addparam('namespace', namespace)
1248 part.addparam('namespace', namespace)
1249 keys = repo.listkeys(namespace).items()
1249 keys = repo.listkeys(namespace).items()
1250 part.data = pushkey.encodekeys(keys)
1250 part.data = pushkey.encodekeys(keys)
1251
1251
1252 @getbundle2partsgenerator('obsmarkers')
1252 @getbundle2partsgenerator('obsmarkers')
1253 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1253 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1254 b2caps=None, heads=None, **kwargs):
1254 b2caps=None, heads=None, **kwargs):
1255 """add an obsolescence markers part to the requested bundle"""
1255 """add an obsolescence markers part to the requested bundle"""
1256 if kwargs.get('obsmarkers', False):
1256 if kwargs.get('obsmarkers', False):
1257 if heads is None:
1257 if heads is None:
1258 heads = repo.heads()
1258 heads = repo.heads()
1259 subset = [c.node() for c in repo.set('::%ln', heads)]
1259 subset = [c.node() for c in repo.set('::%ln', heads)]
1260 markers = repo.obsstore.relevantmarkers(subset)
1260 markers = repo.obsstore.relevantmarkers(subset)
1261 markers = sorted(markers)
1261 markers = sorted(markers)
1262 buildobsmarkerspart(bundler, markers)
1262 buildobsmarkerspart(bundler, markers)
1263
1263
1264 def check_heads(repo, their_heads, context):
1264 def check_heads(repo, their_heads, context):
1265 """check if the heads of a repo have been modified
1265 """check if the heads of a repo have been modified
1266
1266
1267 Used by peer for unbundling.
1267 Used by peer for unbundling.
1268 """
1268 """
1269 heads = repo.heads()
1269 heads = repo.heads()
1270 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1270 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1271 if not (their_heads == ['force'] or their_heads == heads or
1271 if not (their_heads == ['force'] or their_heads == heads or
1272 their_heads == ['hashed', heads_hash]):
1272 their_heads == ['hashed', heads_hash]):
1273 # someone else committed/pushed/unbundled while we
1273 # someone else committed/pushed/unbundled while we
1274 # were transferring data
1274 # were transferring data
1275 raise error.PushRaced('repository changed while %s - '
1275 raise error.PushRaced('repository changed while %s - '
1276 'please try again' % context)
1276 'please try again' % context)
1277
1277
1278 def unbundle(repo, cg, heads, source, url):
1278 def unbundle(repo, cg, heads, source, url):
1279 """Apply a bundle to a repo.
1279 """Apply a bundle to a repo.
1280
1280
1281 this function makes sure the repo is locked during the application and have
1281 this function makes sure the repo is locked during the application and have
1282 mechanism to check that no push race occurred between the creation of the
1282 mechanism to check that no push race occurred between the creation of the
1283 bundle and its application.
1283 bundle and its application.
1284
1284
1285 If the push was raced as PushRaced exception is raised."""
1285 If the push was raced as PushRaced exception is raised."""
1286 r = 0
1286 r = 0
1287 # need a transaction when processing a bundle2 stream
1287 # need a transaction when processing a bundle2 stream
1288 wlock = lock = tr = None
1288 wlock = lock = tr = None
1289 recordout = None
1289 recordout = None
1290 # quick fix for output mismatch with bundle2 in 3.4
1290 # quick fix for output mismatch with bundle2 in 3.4
1291 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1291 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1292 False)
1292 False)
1293 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1293 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1294 captureoutput = True
1294 captureoutput = True
1295 try:
1295 try:
1296 check_heads(repo, heads, 'uploading changes')
1296 check_heads(repo, heads, 'uploading changes')
1297 # push can proceed
1297 # push can proceed
1298 if util.safehasattr(cg, 'params'):
1298 if util.safehasattr(cg, 'params'):
1299 r = None
1299 r = None
1300 try:
1300 try:
1301 wlock = repo.wlock()
1301 wlock = repo.wlock()
1302 lock = repo.lock()
1302 lock = repo.lock()
1303 tr = repo.transaction(source)
1303 tr = repo.transaction(source)
1304 tr.hookargs['source'] = source
1304 tr.hookargs['source'] = source
1305 tr.hookargs['url'] = url
1305 tr.hookargs['url'] = url
1306 tr.hookargs['bundle2'] = '1'
1306 tr.hookargs['bundle2'] = '1'
1307 op = bundle2.bundleoperation(repo, lambda: tr,
1307 op = bundle2.bundleoperation(repo, lambda: tr,
1308 captureoutput=captureoutput)
1308 captureoutput=captureoutput)
1309 try:
1309 try:
1310 r = bundle2.processbundle(repo, cg, op=op)
1310 r = bundle2.processbundle(repo, cg, op=op)
1311 finally:
1311 finally:
1312 r = op.reply
1312 r = op.reply
1313 if captureoutput and r is not None:
1313 if captureoutput and r is not None:
1314 repo.ui.pushbuffer(error=True, subproc=True)
1314 repo.ui.pushbuffer(error=True, subproc=True)
1315 def recordout(output):
1315 def recordout(output):
1316 r.newpart('output', data=output, mandatory=False)
1316 r.newpart('output', data=output, mandatory=False)
1317 tr.close()
1317 tr.close()
1318 except Exception, exc:
1318 except BaseException, exc:
1319 exc.duringunbundle2 = True
1319 exc.duringunbundle2 = True
1320 if captureoutput and r is not None:
1320 if captureoutput and r is not None:
1321 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1321 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1322 def recordout(output):
1322 def recordout(output):
1323 part = bundle2.bundlepart('output', data=output,
1323 part = bundle2.bundlepart('output', data=output,
1324 mandatory=False)
1324 mandatory=False)
1325 parts.append(part)
1325 parts.append(part)
1326 raise
1326 raise
1327 else:
1327 else:
1328 lock = repo.lock()
1328 lock = repo.lock()
1329 r = changegroup.addchangegroup(repo, cg, source, url)
1329 r = changegroup.addchangegroup(repo, cg, source, url)
1330 finally:
1330 finally:
1331 lockmod.release(tr, lock, wlock)
1331 lockmod.release(tr, lock, wlock)
1332 if recordout is not None:
1332 if recordout is not None:
1333 recordout(repo.ui.popbuffer())
1333 recordout(repo.ui.popbuffer())
1334 return r
1334 return r
General Comments 0
You need to be logged in to leave comments. Login now