##// END OF EJS Templates
push: stop independent usage of bundle2 in syncphase (issue4454)...
Pierre-Yves David -
r23376:2e65da5f stable
parent child Browse files
Show More
@@ -1,1270 +1,1234 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.cg1unpacker(fh, alg)
34 return changegroup.cg1unpacker(fh, alg)
35 elif version == '2Y':
35 elif version == '2Y':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40 def buildobsmarkerspart(bundler, markers):
40 def buildobsmarkerspart(bundler, markers):
41 """add an obsmarker part to the bundler with <markers>
41 """add an obsmarker part to the bundler with <markers>
42
42
43 No part is created if markers is empty.
43 No part is created if markers is empty.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 """
45 """
46 if markers:
46 if markers:
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 version = obsolete.commonversion(remoteversions)
48 version = obsolete.commonversion(remoteversions)
49 if version is None:
49 if version is None:
50 raise ValueError('bundler do not support common obsmarker format')
50 raise ValueError('bundler do not support common obsmarker format')
51 stream = obsolete.encodemarkers(markers, True, version=version)
51 stream = obsolete.encodemarkers(markers, True, version=version)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 return None
53 return None
54
54
55 class pushoperation(object):
55 class pushoperation(object):
56 """A object that represent a single push operation
56 """A object that represent a single push operation
57
57
58 It purpose is to carry push related state and very common operation.
58 It purpose is to carry push related state and very common operation.
59
59
60 A new should be created at the beginning of each push and discarded
60 A new should be created at the beginning of each push and discarded
61 afterward.
61 afterward.
62 """
62 """
63
63
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
65 bookmarks=()):
65 bookmarks=()):
66 # repo we push from
66 # repo we push from
67 self.repo = repo
67 self.repo = repo
68 self.ui = repo.ui
68 self.ui = repo.ui
69 # repo we push to
69 # repo we push to
70 self.remote = remote
70 self.remote = remote
71 # force option provided
71 # force option provided
72 self.force = force
72 self.force = force
73 # revs to be pushed (None is "all")
73 # revs to be pushed (None is "all")
74 self.revs = revs
74 self.revs = revs
75 # bookmark explicitly pushed
75 # bookmark explicitly pushed
76 self.bookmarks = bookmarks
76 self.bookmarks = bookmarks
77 # allow push of new branch
77 # allow push of new branch
78 self.newbranch = newbranch
78 self.newbranch = newbranch
79 # did a local lock get acquired?
79 # did a local lock get acquired?
80 self.locallocked = None
80 self.locallocked = None
81 # step already performed
81 # step already performed
82 # (used to check what steps have been already performed through bundle2)
82 # (used to check what steps have been already performed through bundle2)
83 self.stepsdone = set()
83 self.stepsdone = set()
84 # Integer version of the changegroup push result
84 # Integer version of the changegroup push result
85 # - None means nothing to push
85 # - None means nothing to push
86 # - 0 means HTTP error
86 # - 0 means HTTP error
87 # - 1 means we pushed and remote head count is unchanged *or*
87 # - 1 means we pushed and remote head count is unchanged *or*
88 # we have outgoing changesets but refused to push
88 # we have outgoing changesets but refused to push
89 # - other values as described by addchangegroup()
89 # - other values as described by addchangegroup()
90 self.cgresult = None
90 self.cgresult = None
91 # Boolean value for the bookmark push
91 # Boolean value for the bookmark push
92 self.bkresult = None
92 self.bkresult = None
93 # discover.outgoing object (contains common and outgoing data)
93 # discover.outgoing object (contains common and outgoing data)
94 self.outgoing = None
94 self.outgoing = None
95 # all remote heads before the push
95 # all remote heads before the push
96 self.remoteheads = None
96 self.remoteheads = None
97 # testable as a boolean indicating if any nodes are missing locally.
97 # testable as a boolean indicating if any nodes are missing locally.
98 self.incoming = None
98 self.incoming = None
99 # phases changes that must be pushed along side the changesets
99 # phases changes that must be pushed along side the changesets
100 self.outdatedphases = None
100 self.outdatedphases = None
101 # phases changes that must be pushed if changeset push fails
101 # phases changes that must be pushed if changeset push fails
102 self.fallbackoutdatedphases = None
102 self.fallbackoutdatedphases = None
103 # outgoing obsmarkers
103 # outgoing obsmarkers
104 self.outobsmarkers = set()
104 self.outobsmarkers = set()
105 # outgoing bookmarks
105 # outgoing bookmarks
106 self.outbookmarks = []
106 self.outbookmarks = []
107
107
108 @util.propertycache
108 @util.propertycache
109 def futureheads(self):
109 def futureheads(self):
110 """future remote heads if the changeset push succeeds"""
110 """future remote heads if the changeset push succeeds"""
111 return self.outgoing.missingheads
111 return self.outgoing.missingheads
112
112
113 @util.propertycache
113 @util.propertycache
114 def fallbackheads(self):
114 def fallbackheads(self):
115 """future remote heads if the changeset push fails"""
115 """future remote heads if the changeset push fails"""
116 if self.revs is None:
116 if self.revs is None:
117 # not target to push, all common are relevant
117 # not target to push, all common are relevant
118 return self.outgoing.commonheads
118 return self.outgoing.commonheads
119 unfi = self.repo.unfiltered()
119 unfi = self.repo.unfiltered()
120 # I want cheads = heads(::missingheads and ::commonheads)
120 # I want cheads = heads(::missingheads and ::commonheads)
121 # (missingheads is revs with secret changeset filtered out)
121 # (missingheads is revs with secret changeset filtered out)
122 #
122 #
123 # This can be expressed as:
123 # This can be expressed as:
124 # cheads = ( (missingheads and ::commonheads)
124 # cheads = ( (missingheads and ::commonheads)
125 # + (commonheads and ::missingheads))"
125 # + (commonheads and ::missingheads))"
126 # )
126 # )
127 #
127 #
128 # while trying to push we already computed the following:
128 # while trying to push we already computed the following:
129 # common = (::commonheads)
129 # common = (::commonheads)
130 # missing = ((commonheads::missingheads) - commonheads)
130 # missing = ((commonheads::missingheads) - commonheads)
131 #
131 #
132 # We can pick:
132 # We can pick:
133 # * missingheads part of common (::commonheads)
133 # * missingheads part of common (::commonheads)
134 common = set(self.outgoing.common)
134 common = set(self.outgoing.common)
135 nm = self.repo.changelog.nodemap
135 nm = self.repo.changelog.nodemap
136 cheads = [node for node in self.revs if nm[node] in common]
136 cheads = [node for node in self.revs if nm[node] in common]
137 # and
137 # and
138 # * commonheads parents on missing
138 # * commonheads parents on missing
139 revset = unfi.set('%ln and parents(roots(%ln))',
139 revset = unfi.set('%ln and parents(roots(%ln))',
140 self.outgoing.commonheads,
140 self.outgoing.commonheads,
141 self.outgoing.missing)
141 self.outgoing.missing)
142 cheads.extend(c.node() for c in revset)
142 cheads.extend(c.node() for c in revset)
143 return cheads
143 return cheads
144
144
145 @property
145 @property
146 def commonheads(self):
146 def commonheads(self):
147 """set of all common heads after changeset bundle push"""
147 """set of all common heads after changeset bundle push"""
148 if self.cgresult:
148 if self.cgresult:
149 return self.futureheads
149 return self.futureheads
150 else:
150 else:
151 return self.fallbackheads
151 return self.fallbackheads
152
152
153 # mapping of message used when pushing bookmark
153 # mapping of message used when pushing bookmark
154 bookmsgmap = {'update': (_("updating bookmark %s\n"),
154 bookmsgmap = {'update': (_("updating bookmark %s\n"),
155 _('updating bookmark %s failed!\n')),
155 _('updating bookmark %s failed!\n')),
156 'export': (_("exporting bookmark %s\n"),
156 'export': (_("exporting bookmark %s\n"),
157 _('exporting bookmark %s failed!\n')),
157 _('exporting bookmark %s failed!\n')),
158 'delete': (_("deleting remote bookmark %s\n"),
158 'delete': (_("deleting remote bookmark %s\n"),
159 _('deleting remote bookmark %s failed!\n')),
159 _('deleting remote bookmark %s failed!\n')),
160 }
160 }
161
161
162
162
163 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
163 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
164 '''Push outgoing changesets (limited by revs) from a local
164 '''Push outgoing changesets (limited by revs) from a local
165 repository to remote. Return an integer:
165 repository to remote. Return an integer:
166 - None means nothing to push
166 - None means nothing to push
167 - 0 means HTTP error
167 - 0 means HTTP error
168 - 1 means we pushed and remote head count is unchanged *or*
168 - 1 means we pushed and remote head count is unchanged *or*
169 we have outgoing changesets but refused to push
169 we have outgoing changesets but refused to push
170 - other values as described by addchangegroup()
170 - other values as described by addchangegroup()
171 '''
171 '''
172 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
172 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
173 if pushop.remote.local():
173 if pushop.remote.local():
174 missing = (set(pushop.repo.requirements)
174 missing = (set(pushop.repo.requirements)
175 - pushop.remote.local().supported)
175 - pushop.remote.local().supported)
176 if missing:
176 if missing:
177 msg = _("required features are not"
177 msg = _("required features are not"
178 " supported in the destination:"
178 " supported in the destination:"
179 " %s") % (', '.join(sorted(missing)))
179 " %s") % (', '.join(sorted(missing)))
180 raise util.Abort(msg)
180 raise util.Abort(msg)
181
181
182 # there are two ways to push to remote repo:
182 # there are two ways to push to remote repo:
183 #
183 #
184 # addchangegroup assumes local user can lock remote
184 # addchangegroup assumes local user can lock remote
185 # repo (local filesystem, old ssh servers).
185 # repo (local filesystem, old ssh servers).
186 #
186 #
187 # unbundle assumes local user cannot lock remote repo (new ssh
187 # unbundle assumes local user cannot lock remote repo (new ssh
188 # servers, http servers).
188 # servers, http servers).
189
189
190 if not pushop.remote.canpush():
190 if not pushop.remote.canpush():
191 raise util.Abort(_("destination does not support push"))
191 raise util.Abort(_("destination does not support push"))
192 # get local lock as we might write phase data
192 # get local lock as we might write phase data
193 locallock = None
193 locallock = None
194 try:
194 try:
195 locallock = pushop.repo.lock()
195 locallock = pushop.repo.lock()
196 pushop.locallocked = True
196 pushop.locallocked = True
197 except IOError, err:
197 except IOError, err:
198 pushop.locallocked = False
198 pushop.locallocked = False
199 if err.errno != errno.EACCES:
199 if err.errno != errno.EACCES:
200 raise
200 raise
201 # source repo cannot be locked.
201 # source repo cannot be locked.
202 # We do not abort the push, but just disable the local phase
202 # We do not abort the push, but just disable the local phase
203 # synchronisation.
203 # synchronisation.
204 msg = 'cannot lock source repository: %s\n' % err
204 msg = 'cannot lock source repository: %s\n' % err
205 pushop.ui.debug(msg)
205 pushop.ui.debug(msg)
206 try:
206 try:
207 pushop.repo.checkpush(pushop)
207 pushop.repo.checkpush(pushop)
208 lock = None
208 lock = None
209 unbundle = pushop.remote.capable('unbundle')
209 unbundle = pushop.remote.capable('unbundle')
210 if not unbundle:
210 if not unbundle:
211 lock = pushop.remote.lock()
211 lock = pushop.remote.lock()
212 try:
212 try:
213 _pushdiscovery(pushop)
213 _pushdiscovery(pushop)
214 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
214 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
215 False)
215 False)
216 and pushop.remote.capable('bundle2-exp')):
216 and pushop.remote.capable('bundle2-exp')):
217 _pushbundle2(pushop)
217 _pushbundle2(pushop)
218 _pushchangeset(pushop)
218 _pushchangeset(pushop)
219 _pushsyncphase(pushop)
219 _pushsyncphase(pushop)
220 _pushobsolete(pushop)
220 _pushobsolete(pushop)
221 _pushbookmark(pushop)
221 _pushbookmark(pushop)
222 finally:
222 finally:
223 if lock is not None:
223 if lock is not None:
224 lock.release()
224 lock.release()
225 finally:
225 finally:
226 if locallock is not None:
226 if locallock is not None:
227 locallock.release()
227 locallock.release()
228
228
229 return pushop
229 return pushop
230
230
231 # list of steps to perform discovery before push
231 # list of steps to perform discovery before push
232 pushdiscoveryorder = []
232 pushdiscoveryorder = []
233
233
234 # Mapping between step name and function
234 # Mapping between step name and function
235 #
235 #
236 # This exists to help extensions wrap steps if necessary
236 # This exists to help extensions wrap steps if necessary
237 pushdiscoverymapping = {}
237 pushdiscoverymapping = {}
238
238
239 def pushdiscovery(stepname):
239 def pushdiscovery(stepname):
240 """decorator for function performing discovery before push
240 """decorator for function performing discovery before push
241
241
242 The function is added to the step -> function mapping and appended to the
242 The function is added to the step -> function mapping and appended to the
243 list of steps. Beware that decorated function will be added in order (this
243 list of steps. Beware that decorated function will be added in order (this
244 may matter).
244 may matter).
245
245
246 You can only use this decorator for a new step, if you want to wrap a step
246 You can only use this decorator for a new step, if you want to wrap a step
247 from an extension, change the pushdiscovery dictionary directly."""
247 from an extension, change the pushdiscovery dictionary directly."""
248 def dec(func):
248 def dec(func):
249 assert stepname not in pushdiscoverymapping
249 assert stepname not in pushdiscoverymapping
250 pushdiscoverymapping[stepname] = func
250 pushdiscoverymapping[stepname] = func
251 pushdiscoveryorder.append(stepname)
251 pushdiscoveryorder.append(stepname)
252 return func
252 return func
253 return dec
253 return dec
254
254
255 def _pushdiscovery(pushop):
255 def _pushdiscovery(pushop):
256 """Run all discovery steps"""
256 """Run all discovery steps"""
257 for stepname in pushdiscoveryorder:
257 for stepname in pushdiscoveryorder:
258 step = pushdiscoverymapping[stepname]
258 step = pushdiscoverymapping[stepname]
259 step(pushop)
259 step(pushop)
260
260
261 @pushdiscovery('changeset')
261 @pushdiscovery('changeset')
262 def _pushdiscoverychangeset(pushop):
262 def _pushdiscoverychangeset(pushop):
263 """discover the changeset that need to be pushed"""
263 """discover the changeset that need to be pushed"""
264 unfi = pushop.repo.unfiltered()
264 unfi = pushop.repo.unfiltered()
265 fci = discovery.findcommonincoming
265 fci = discovery.findcommonincoming
266 commoninc = fci(unfi, pushop.remote, force=pushop.force)
266 commoninc = fci(unfi, pushop.remote, force=pushop.force)
267 common, inc, remoteheads = commoninc
267 common, inc, remoteheads = commoninc
268 fco = discovery.findcommonoutgoing
268 fco = discovery.findcommonoutgoing
269 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
269 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
270 commoninc=commoninc, force=pushop.force)
270 commoninc=commoninc, force=pushop.force)
271 pushop.outgoing = outgoing
271 pushop.outgoing = outgoing
272 pushop.remoteheads = remoteheads
272 pushop.remoteheads = remoteheads
273 pushop.incoming = inc
273 pushop.incoming = inc
274
274
275 @pushdiscovery('phase')
275 @pushdiscovery('phase')
276 def _pushdiscoveryphase(pushop):
276 def _pushdiscoveryphase(pushop):
277 """discover the phase that needs to be pushed
277 """discover the phase that needs to be pushed
278
278
279 (computed for both success and failure case for changesets push)"""
279 (computed for both success and failure case for changesets push)"""
280 outgoing = pushop.outgoing
280 outgoing = pushop.outgoing
281 unfi = pushop.repo.unfiltered()
281 unfi = pushop.repo.unfiltered()
282 remotephases = pushop.remote.listkeys('phases')
282 remotephases = pushop.remote.listkeys('phases')
283 publishing = remotephases.get('publishing', False)
283 publishing = remotephases.get('publishing', False)
284 ana = phases.analyzeremotephases(pushop.repo,
284 ana = phases.analyzeremotephases(pushop.repo,
285 pushop.fallbackheads,
285 pushop.fallbackheads,
286 remotephases)
286 remotephases)
287 pheads, droots = ana
287 pheads, droots = ana
288 extracond = ''
288 extracond = ''
289 if not publishing:
289 if not publishing:
290 extracond = ' and public()'
290 extracond = ' and public()'
291 revset = 'heads((%%ln::%%ln) %s)' % extracond
291 revset = 'heads((%%ln::%%ln) %s)' % extracond
292 # Get the list of all revs draft on remote by public here.
292 # Get the list of all revs draft on remote by public here.
293 # XXX Beware that revset break if droots is not strictly
293 # XXX Beware that revset break if droots is not strictly
294 # XXX root we may want to ensure it is but it is costly
294 # XXX root we may want to ensure it is but it is costly
295 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
295 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
296 if not outgoing.missing:
296 if not outgoing.missing:
297 future = fallback
297 future = fallback
298 else:
298 else:
299 # adds changeset we are going to push as draft
299 # adds changeset we are going to push as draft
300 #
300 #
301 # should not be necessary for pushblishing server, but because of an
301 # should not be necessary for pushblishing server, but because of an
302 # issue fixed in xxxxx we have to do it anyway.
302 # issue fixed in xxxxx we have to do it anyway.
303 fdroots = list(unfi.set('roots(%ln + %ln::)',
303 fdroots = list(unfi.set('roots(%ln + %ln::)',
304 outgoing.missing, droots))
304 outgoing.missing, droots))
305 fdroots = [f.node() for f in fdroots]
305 fdroots = [f.node() for f in fdroots]
306 future = list(unfi.set(revset, fdroots, pushop.futureheads))
306 future = list(unfi.set(revset, fdroots, pushop.futureheads))
307 pushop.outdatedphases = future
307 pushop.outdatedphases = future
308 pushop.fallbackoutdatedphases = fallback
308 pushop.fallbackoutdatedphases = fallback
309
309
310 @pushdiscovery('obsmarker')
310 @pushdiscovery('obsmarker')
311 def _pushdiscoveryobsmarkers(pushop):
311 def _pushdiscoveryobsmarkers(pushop):
312 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
312 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
313 and pushop.repo.obsstore
313 and pushop.repo.obsstore
314 and 'obsolete' in pushop.remote.listkeys('namespaces')):
314 and 'obsolete' in pushop.remote.listkeys('namespaces')):
315 repo = pushop.repo
315 repo = pushop.repo
316 # very naive computation, that can be quite expensive on big repo.
316 # very naive computation, that can be quite expensive on big repo.
317 # However: evolution is currently slow on them anyway.
317 # However: evolution is currently slow on them anyway.
318 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
318 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
319 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
319 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
320
320
321 @pushdiscovery('bookmarks')
321 @pushdiscovery('bookmarks')
322 def _pushdiscoverybookmarks(pushop):
322 def _pushdiscoverybookmarks(pushop):
323 ui = pushop.ui
323 ui = pushop.ui
324 repo = pushop.repo.unfiltered()
324 repo = pushop.repo.unfiltered()
325 remote = pushop.remote
325 remote = pushop.remote
326 ui.debug("checking for updated bookmarks\n")
326 ui.debug("checking for updated bookmarks\n")
327 ancestors = ()
327 ancestors = ()
328 if pushop.revs:
328 if pushop.revs:
329 revnums = map(repo.changelog.rev, pushop.revs)
329 revnums = map(repo.changelog.rev, pushop.revs)
330 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
330 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
331 remotebookmark = remote.listkeys('bookmarks')
331 remotebookmark = remote.listkeys('bookmarks')
332
332
333 explicit = set(pushop.bookmarks)
333 explicit = set(pushop.bookmarks)
334
334
335 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
335 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
336 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
336 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
337 for b, scid, dcid in advsrc:
337 for b, scid, dcid in advsrc:
338 if b in explicit:
338 if b in explicit:
339 explicit.remove(b)
339 explicit.remove(b)
340 if not ancestors or repo[scid].rev() in ancestors:
340 if not ancestors or repo[scid].rev() in ancestors:
341 pushop.outbookmarks.append((b, dcid, scid))
341 pushop.outbookmarks.append((b, dcid, scid))
342 # search added bookmark
342 # search added bookmark
343 for b, scid, dcid in addsrc:
343 for b, scid, dcid in addsrc:
344 if b in explicit:
344 if b in explicit:
345 explicit.remove(b)
345 explicit.remove(b)
346 pushop.outbookmarks.append((b, '', scid))
346 pushop.outbookmarks.append((b, '', scid))
347 # search for overwritten bookmark
347 # search for overwritten bookmark
348 for b, scid, dcid in advdst + diverge + differ:
348 for b, scid, dcid in advdst + diverge + differ:
349 if b in explicit:
349 if b in explicit:
350 explicit.remove(b)
350 explicit.remove(b)
351 pushop.outbookmarks.append((b, dcid, scid))
351 pushop.outbookmarks.append((b, dcid, scid))
352 # search for bookmark to delete
352 # search for bookmark to delete
353 for b, scid, dcid in adddst:
353 for b, scid, dcid in adddst:
354 if b in explicit:
354 if b in explicit:
355 explicit.remove(b)
355 explicit.remove(b)
356 # treat as "deleted locally"
356 # treat as "deleted locally"
357 pushop.outbookmarks.append((b, dcid, ''))
357 pushop.outbookmarks.append((b, dcid, ''))
358 # identical bookmarks shouldn't get reported
358 # identical bookmarks shouldn't get reported
359 for b, scid, dcid in same:
359 for b, scid, dcid in same:
360 if b in explicit:
360 if b in explicit:
361 explicit.remove(b)
361 explicit.remove(b)
362
362
363 if explicit:
363 if explicit:
364 explicit = sorted(explicit)
364 explicit = sorted(explicit)
365 # we should probably list all of them
365 # we should probably list all of them
366 ui.warn(_('bookmark %s does not exist on the local '
366 ui.warn(_('bookmark %s does not exist on the local '
367 'or remote repository!\n') % explicit[0])
367 'or remote repository!\n') % explicit[0])
368 pushop.bkresult = 2
368 pushop.bkresult = 2
369
369
370 pushop.outbookmarks.sort()
370 pushop.outbookmarks.sort()
371
371
372 def _pushcheckoutgoing(pushop):
372 def _pushcheckoutgoing(pushop):
373 outgoing = pushop.outgoing
373 outgoing = pushop.outgoing
374 unfi = pushop.repo.unfiltered()
374 unfi = pushop.repo.unfiltered()
375 if not outgoing.missing:
375 if not outgoing.missing:
376 # nothing to push
376 # nothing to push
377 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
377 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
378 return False
378 return False
379 # something to push
379 # something to push
380 if not pushop.force:
380 if not pushop.force:
381 # if repo.obsstore == False --> no obsolete
381 # if repo.obsstore == False --> no obsolete
382 # then, save the iteration
382 # then, save the iteration
383 if unfi.obsstore:
383 if unfi.obsstore:
384 # this message are here for 80 char limit reason
384 # this message are here for 80 char limit reason
385 mso = _("push includes obsolete changeset: %s!")
385 mso = _("push includes obsolete changeset: %s!")
386 mst = {"unstable": _("push includes unstable changeset: %s!"),
386 mst = {"unstable": _("push includes unstable changeset: %s!"),
387 "bumped": _("push includes bumped changeset: %s!"),
387 "bumped": _("push includes bumped changeset: %s!"),
388 "divergent": _("push includes divergent changeset: %s!")}
388 "divergent": _("push includes divergent changeset: %s!")}
389 # If we are to push if there is at least one
389 # If we are to push if there is at least one
390 # obsolete or unstable changeset in missing, at
390 # obsolete or unstable changeset in missing, at
391 # least one of the missinghead will be obsolete or
391 # least one of the missinghead will be obsolete or
392 # unstable. So checking heads only is ok
392 # unstable. So checking heads only is ok
393 for node in outgoing.missingheads:
393 for node in outgoing.missingheads:
394 ctx = unfi[node]
394 ctx = unfi[node]
395 if ctx.obsolete():
395 if ctx.obsolete():
396 raise util.Abort(mso % ctx)
396 raise util.Abort(mso % ctx)
397 elif ctx.troubled():
397 elif ctx.troubled():
398 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
398 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
399 newbm = pushop.ui.configlist('bookmarks', 'pushing')
399 newbm = pushop.ui.configlist('bookmarks', 'pushing')
400 discovery.checkheads(unfi, pushop.remote, outgoing,
400 discovery.checkheads(unfi, pushop.remote, outgoing,
401 pushop.remoteheads,
401 pushop.remoteheads,
402 pushop.newbranch,
402 pushop.newbranch,
403 bool(pushop.incoming),
403 bool(pushop.incoming),
404 newbm)
404 newbm)
405 return True
405 return True
406
406
407 # List of names of steps to perform for an outgoing bundle2, order matters.
407 # List of names of steps to perform for an outgoing bundle2, order matters.
408 b2partsgenorder = []
408 b2partsgenorder = []
409
409
410 # Mapping between step name and function
410 # Mapping between step name and function
411 #
411 #
412 # This exists to help extensions wrap steps if necessary
412 # This exists to help extensions wrap steps if necessary
413 b2partsgenmapping = {}
413 b2partsgenmapping = {}
414
414
415 def b2partsgenerator(stepname):
415 def b2partsgenerator(stepname):
416 """decorator for function generating bundle2 part
416 """decorator for function generating bundle2 part
417
417
418 The function is added to the step -> function mapping and appended to the
418 The function is added to the step -> function mapping and appended to the
419 list of steps. Beware that decorated functions will be added in order
419 list of steps. Beware that decorated functions will be added in order
420 (this may matter).
420 (this may matter).
421
421
422 You can only use this decorator for new steps, if you want to wrap a step
422 You can only use this decorator for new steps, if you want to wrap a step
423 from an extension, attack the b2partsgenmapping dictionary directly."""
423 from an extension, attack the b2partsgenmapping dictionary directly."""
424 def dec(func):
424 def dec(func):
425 assert stepname not in b2partsgenmapping
425 assert stepname not in b2partsgenmapping
426 b2partsgenmapping[stepname] = func
426 b2partsgenmapping[stepname] = func
427 b2partsgenorder.append(stepname)
427 b2partsgenorder.append(stepname)
428 return func
428 return func
429 return dec
429 return dec
430
430
431 @b2partsgenerator('changeset')
431 @b2partsgenerator('changeset')
432 def _pushb2ctx(pushop, bundler):
432 def _pushb2ctx(pushop, bundler):
433 """handle changegroup push through bundle2
433 """handle changegroup push through bundle2
434
434
435 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
435 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
436 """
436 """
437 if 'changesets' in pushop.stepsdone:
437 if 'changesets' in pushop.stepsdone:
438 return
438 return
439 pushop.stepsdone.add('changesets')
439 pushop.stepsdone.add('changesets')
440 # Send known heads to the server for race detection.
440 # Send known heads to the server for race detection.
441 if not _pushcheckoutgoing(pushop):
441 if not _pushcheckoutgoing(pushop):
442 return
442 return
443 pushop.repo.prepushoutgoinghooks(pushop.repo,
443 pushop.repo.prepushoutgoinghooks(pushop.repo,
444 pushop.remote,
444 pushop.remote,
445 pushop.outgoing)
445 pushop.outgoing)
446 if not pushop.force:
446 if not pushop.force:
447 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
447 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
448 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
448 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
449 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
449 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
450 def handlereply(op):
450 def handlereply(op):
451 """extract addchangroup returns from server reply"""
451 """extract addchangroup returns from server reply"""
452 cgreplies = op.records.getreplies(cgpart.id)
452 cgreplies = op.records.getreplies(cgpart.id)
453 assert len(cgreplies['changegroup']) == 1
453 assert len(cgreplies['changegroup']) == 1
454 pushop.cgresult = cgreplies['changegroup'][0]['return']
454 pushop.cgresult = cgreplies['changegroup'][0]['return']
455 return handlereply
455 return handlereply
456
456
457 @b2partsgenerator('phase')
457 @b2partsgenerator('phase')
458 def _pushb2phases(pushop, bundler):
458 def _pushb2phases(pushop, bundler):
459 """handle phase push through bundle2"""
459 """handle phase push through bundle2"""
460 if 'phases' in pushop.stepsdone:
460 if 'phases' in pushop.stepsdone:
461 return
461 return
462 b2caps = bundle2.bundle2caps(pushop.remote)
462 b2caps = bundle2.bundle2caps(pushop.remote)
463 if not 'b2x:pushkey' in b2caps:
463 if not 'b2x:pushkey' in b2caps:
464 return
464 return
465 pushop.stepsdone.add('phases')
465 pushop.stepsdone.add('phases')
466 part2node = []
466 part2node = []
467 enc = pushkey.encode
467 enc = pushkey.encode
468 for newremotehead in pushop.outdatedphases:
468 for newremotehead in pushop.outdatedphases:
469 part = bundler.newpart('b2x:pushkey')
469 part = bundler.newpart('b2x:pushkey')
470 part.addparam('namespace', enc('phases'))
470 part.addparam('namespace', enc('phases'))
471 part.addparam('key', enc(newremotehead.hex()))
471 part.addparam('key', enc(newremotehead.hex()))
472 part.addparam('old', enc(str(phases.draft)))
472 part.addparam('old', enc(str(phases.draft)))
473 part.addparam('new', enc(str(phases.public)))
473 part.addparam('new', enc(str(phases.public)))
474 part2node.append((part.id, newremotehead))
474 part2node.append((part.id, newremotehead))
475 def handlereply(op):
475 def handlereply(op):
476 for partid, node in part2node:
476 for partid, node in part2node:
477 partrep = op.records.getreplies(partid)
477 partrep = op.records.getreplies(partid)
478 results = partrep['pushkey']
478 results = partrep['pushkey']
479 assert len(results) <= 1
479 assert len(results) <= 1
480 msg = None
480 msg = None
481 if not results:
481 if not results:
482 msg = _('server ignored update of %s to public!\n') % node
482 msg = _('server ignored update of %s to public!\n') % node
483 elif not int(results[0]['return']):
483 elif not int(results[0]['return']):
484 msg = _('updating %s to public failed!\n') % node
484 msg = _('updating %s to public failed!\n') % node
485 if msg is not None:
485 if msg is not None:
486 pushop.ui.warn(msg)
486 pushop.ui.warn(msg)
487 return handlereply
487 return handlereply
488
488
489 @b2partsgenerator('obsmarkers')
489 @b2partsgenerator('obsmarkers')
490 def _pushb2obsmarkers(pushop, bundler):
490 def _pushb2obsmarkers(pushop, bundler):
491 if 'obsmarkers' in pushop.stepsdone:
491 if 'obsmarkers' in pushop.stepsdone:
492 return
492 return
493 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
493 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
494 if obsolete.commonversion(remoteversions) is None:
494 if obsolete.commonversion(remoteversions) is None:
495 return
495 return
496 pushop.stepsdone.add('obsmarkers')
496 pushop.stepsdone.add('obsmarkers')
497 if pushop.outobsmarkers:
497 if pushop.outobsmarkers:
498 buildobsmarkerspart(bundler, pushop.outobsmarkers)
498 buildobsmarkerspart(bundler, pushop.outobsmarkers)
499
499
500 @b2partsgenerator('bookmarks')
500 @b2partsgenerator('bookmarks')
501 def _pushb2bookmarks(pushop, bundler):
501 def _pushb2bookmarks(pushop, bundler):
502 """handle phase push through bundle2"""
502 """handle phase push through bundle2"""
503 if 'bookmarks' in pushop.stepsdone:
503 if 'bookmarks' in pushop.stepsdone:
504 return
504 return
505 b2caps = bundle2.bundle2caps(pushop.remote)
505 b2caps = bundle2.bundle2caps(pushop.remote)
506 if 'b2x:pushkey' not in b2caps:
506 if 'b2x:pushkey' not in b2caps:
507 return
507 return
508 pushop.stepsdone.add('bookmarks')
508 pushop.stepsdone.add('bookmarks')
509 part2book = []
509 part2book = []
510 enc = pushkey.encode
510 enc = pushkey.encode
511 for book, old, new in pushop.outbookmarks:
511 for book, old, new in pushop.outbookmarks:
512 part = bundler.newpart('b2x:pushkey')
512 part = bundler.newpart('b2x:pushkey')
513 part.addparam('namespace', enc('bookmarks'))
513 part.addparam('namespace', enc('bookmarks'))
514 part.addparam('key', enc(book))
514 part.addparam('key', enc(book))
515 part.addparam('old', enc(old))
515 part.addparam('old', enc(old))
516 part.addparam('new', enc(new))
516 part.addparam('new', enc(new))
517 action = 'update'
517 action = 'update'
518 if not old:
518 if not old:
519 action = 'export'
519 action = 'export'
520 elif not new:
520 elif not new:
521 action = 'delete'
521 action = 'delete'
522 part2book.append((part.id, book, action))
522 part2book.append((part.id, book, action))
523
523
524
524
525 def handlereply(op):
525 def handlereply(op):
526 ui = pushop.ui
526 ui = pushop.ui
527 for partid, book, action in part2book:
527 for partid, book, action in part2book:
528 partrep = op.records.getreplies(partid)
528 partrep = op.records.getreplies(partid)
529 results = partrep['pushkey']
529 results = partrep['pushkey']
530 assert len(results) <= 1
530 assert len(results) <= 1
531 if not results:
531 if not results:
532 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
532 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
533 else:
533 else:
534 ret = int(results[0]['return'])
534 ret = int(results[0]['return'])
535 if ret:
535 if ret:
536 ui.status(bookmsgmap[action][0] % book)
536 ui.status(bookmsgmap[action][0] % book)
537 else:
537 else:
538 ui.warn(bookmsgmap[action][1] % book)
538 ui.warn(bookmsgmap[action][1] % book)
539 if pushop.bkresult is not None:
539 if pushop.bkresult is not None:
540 pushop.bkresult = 1
540 pushop.bkresult = 1
541 return handlereply
541 return handlereply
542
542
543
543
544 def _pushbundle2(pushop):
544 def _pushbundle2(pushop):
545 """push data to the remote using bundle2
545 """push data to the remote using bundle2
546
546
547 The only currently supported type of data is changegroup but this will
547 The only currently supported type of data is changegroup but this will
548 evolve in the future."""
548 evolve in the future."""
549 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
549 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
550 # create reply capability
550 # create reply capability
551 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
551 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
552 bundler.newpart('b2x:replycaps', data=capsblob)
552 bundler.newpart('b2x:replycaps', data=capsblob)
553 replyhandlers = []
553 replyhandlers = []
554 for partgenname in b2partsgenorder:
554 for partgenname in b2partsgenorder:
555 partgen = b2partsgenmapping[partgenname]
555 partgen = b2partsgenmapping[partgenname]
556 ret = partgen(pushop, bundler)
556 ret = partgen(pushop, bundler)
557 if callable(ret):
557 if callable(ret):
558 replyhandlers.append(ret)
558 replyhandlers.append(ret)
559 # do not push if nothing to push
559 # do not push if nothing to push
560 if bundler.nbparts <= 1:
560 if bundler.nbparts <= 1:
561 return
561 return
562 stream = util.chunkbuffer(bundler.getchunks())
562 stream = util.chunkbuffer(bundler.getchunks())
563 try:
563 try:
564 reply = pushop.remote.unbundle(stream, ['force'], 'push')
564 reply = pushop.remote.unbundle(stream, ['force'], 'push')
565 except error.BundleValueError, exc:
565 except error.BundleValueError, exc:
566 raise util.Abort('missing support for %s' % exc)
566 raise util.Abort('missing support for %s' % exc)
567 try:
567 try:
568 op = bundle2.processbundle(pushop.repo, reply)
568 op = bundle2.processbundle(pushop.repo, reply)
569 except error.BundleValueError, exc:
569 except error.BundleValueError, exc:
570 raise util.Abort('missing support for %s' % exc)
570 raise util.Abort('missing support for %s' % exc)
571 for rephand in replyhandlers:
571 for rephand in replyhandlers:
572 rephand(op)
572 rephand(op)
573
573
574 def _pushchangeset(pushop):
574 def _pushchangeset(pushop):
575 """Make the actual push of changeset bundle to remote repo"""
575 """Make the actual push of changeset bundle to remote repo"""
576 if 'changesets' in pushop.stepsdone:
576 if 'changesets' in pushop.stepsdone:
577 return
577 return
578 pushop.stepsdone.add('changesets')
578 pushop.stepsdone.add('changesets')
579 if not _pushcheckoutgoing(pushop):
579 if not _pushcheckoutgoing(pushop):
580 return
580 return
581 pushop.repo.prepushoutgoinghooks(pushop.repo,
581 pushop.repo.prepushoutgoinghooks(pushop.repo,
582 pushop.remote,
582 pushop.remote,
583 pushop.outgoing)
583 pushop.outgoing)
584 outgoing = pushop.outgoing
584 outgoing = pushop.outgoing
585 unbundle = pushop.remote.capable('unbundle')
585 unbundle = pushop.remote.capable('unbundle')
586 # TODO: get bundlecaps from remote
586 # TODO: get bundlecaps from remote
587 bundlecaps = None
587 bundlecaps = None
588 # create a changegroup from local
588 # create a changegroup from local
589 if pushop.revs is None and not (outgoing.excluded
589 if pushop.revs is None and not (outgoing.excluded
590 or pushop.repo.changelog.filteredrevs):
590 or pushop.repo.changelog.filteredrevs):
591 # push everything,
591 # push everything,
592 # use the fast path, no race possible on push
592 # use the fast path, no race possible on push
593 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
593 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
594 cg = changegroup.getsubset(pushop.repo,
594 cg = changegroup.getsubset(pushop.repo,
595 outgoing,
595 outgoing,
596 bundler,
596 bundler,
597 'push',
597 'push',
598 fastpath=True)
598 fastpath=True)
599 else:
599 else:
600 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
600 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
601 bundlecaps)
601 bundlecaps)
602
602
603 # apply changegroup to remote
603 # apply changegroup to remote
604 if unbundle:
604 if unbundle:
605 # local repo finds heads on server, finds out what
605 # local repo finds heads on server, finds out what
606 # revs it must push. once revs transferred, if server
606 # revs it must push. once revs transferred, if server
607 # finds it has different heads (someone else won
607 # finds it has different heads (someone else won
608 # commit/push race), server aborts.
608 # commit/push race), server aborts.
609 if pushop.force:
609 if pushop.force:
610 remoteheads = ['force']
610 remoteheads = ['force']
611 else:
611 else:
612 remoteheads = pushop.remoteheads
612 remoteheads = pushop.remoteheads
613 # ssh: return remote's addchangegroup()
613 # ssh: return remote's addchangegroup()
614 # http: return remote's addchangegroup() or 0 for error
614 # http: return remote's addchangegroup() or 0 for error
615 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
615 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
616 pushop.repo.url())
616 pushop.repo.url())
617 else:
617 else:
618 # we return an integer indicating remote head count
618 # we return an integer indicating remote head count
619 # change
619 # change
620 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
620 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
621 pushop.repo.url())
621 pushop.repo.url())
622
622
623 def _pushsyncphase(pushop):
623 def _pushsyncphase(pushop):
624 """synchronise phase information locally and remotely"""
624 """synchronise phase information locally and remotely"""
625 cheads = pushop.commonheads
625 cheads = pushop.commonheads
626 # even when we don't push, exchanging phase data is useful
626 # even when we don't push, exchanging phase data is useful
627 remotephases = pushop.remote.listkeys('phases')
627 remotephases = pushop.remote.listkeys('phases')
628 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
628 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
629 and remotephases # server supports phases
629 and remotephases # server supports phases
630 and pushop.cgresult is None # nothing was pushed
630 and pushop.cgresult is None # nothing was pushed
631 and remotephases.get('publishing', False)):
631 and remotephases.get('publishing', False)):
632 # When:
632 # When:
633 # - this is a subrepo push
633 # - this is a subrepo push
634 # - and remote support phase
634 # - and remote support phase
635 # - and no changeset was pushed
635 # - and no changeset was pushed
636 # - and remote is publishing
636 # - and remote is publishing
637 # We may be in issue 3871 case!
637 # We may be in issue 3871 case!
638 # We drop the possible phase synchronisation done by
638 # We drop the possible phase synchronisation done by
639 # courtesy to publish changesets possibly locally draft
639 # courtesy to publish changesets possibly locally draft
640 # on the remote.
640 # on the remote.
641 remotephases = {'publishing': 'True'}
641 remotephases = {'publishing': 'True'}
642 if not remotephases: # old server or public only reply from non-publishing
642 if not remotephases: # old server or public only reply from non-publishing
643 _localphasemove(pushop, cheads)
643 _localphasemove(pushop, cheads)
644 # don't push any phase data as there is nothing to push
644 # don't push any phase data as there is nothing to push
645 else:
645 else:
646 ana = phases.analyzeremotephases(pushop.repo, cheads,
646 ana = phases.analyzeremotephases(pushop.repo, cheads,
647 remotephases)
647 remotephases)
648 pheads, droots = ana
648 pheads, droots = ana
649 ### Apply remote phase on local
649 ### Apply remote phase on local
650 if remotephases.get('publishing', False):
650 if remotephases.get('publishing', False):
651 _localphasemove(pushop, cheads)
651 _localphasemove(pushop, cheads)
652 else: # publish = False
652 else: # publish = False
653 _localphasemove(pushop, pheads)
653 _localphasemove(pushop, pheads)
654 _localphasemove(pushop, cheads, phases.draft)
654 _localphasemove(pushop, cheads, phases.draft)
655 ### Apply local phase on remote
655 ### Apply local phase on remote
656
656
657 if pushop.cgresult:
657 if pushop.cgresult:
658 if 'phases' in pushop.stepsdone:
658 if 'phases' in pushop.stepsdone:
659 # phases already pushed though bundle2
659 # phases already pushed though bundle2
660 return
660 return
661 outdated = pushop.outdatedphases
661 outdated = pushop.outdatedphases
662 else:
662 else:
663 outdated = pushop.fallbackoutdatedphases
663 outdated = pushop.fallbackoutdatedphases
664
664
665 pushop.stepsdone.add('phases')
665 pushop.stepsdone.add('phases')
666
666
667 # filter heads already turned public by the push
667 # filter heads already turned public by the push
668 outdated = [c for c in outdated if c.node() not in pheads]
668 outdated = [c for c in outdated if c.node() not in pheads]
669 b2caps = bundle2.bundle2caps(pushop.remote)
669 # fallback to independent pushkey command
670 if 'b2x:pushkey' in b2caps:
671 # server supports bundle2, let's do a batched push through it
672 #
673 # This will eventually be unified with the changesets bundle2 push
674 bundler = bundle2.bundle20(pushop.ui, b2caps)
675 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
676 bundler.newpart('b2x:replycaps', data=capsblob)
677 part2node = []
678 enc = pushkey.encode
679 for newremotehead in outdated:
680 part = bundler.newpart('b2x:pushkey')
681 part.addparam('namespace', enc('phases'))
682 part.addparam('key', enc(newremotehead.hex()))
683 part.addparam('old', enc(str(phases.draft)))
684 part.addparam('new', enc(str(phases.public)))
685 part2node.append((part.id, newremotehead))
686 stream = util.chunkbuffer(bundler.getchunks())
687 try:
688 reply = pushop.remote.unbundle(stream, ['force'], 'push')
689 op = bundle2.processbundle(pushop.repo, reply)
690 except error.BundleValueError, exc:
691 raise util.Abort('missing support for %s' % exc)
692 for partid, node in part2node:
693 partrep = op.records.getreplies(partid)
694 results = partrep['pushkey']
695 assert len(results) <= 1
696 msg = None
697 if not results:
698 msg = _('server ignored update of %s to public!\n') % node
699 elif not int(results[0]['return']):
700 msg = _('updating %s to public failed!\n') % node
701 if msg is not None:
702 pushop.ui.warn(msg)
703
704 else:
705 # fallback to independant pushkey command
706 for newremotehead in outdated:
670 for newremotehead in outdated:
707 r = pushop.remote.pushkey('phases',
671 r = pushop.remote.pushkey('phases',
708 newremotehead.hex(),
672 newremotehead.hex(),
709 str(phases.draft),
673 str(phases.draft),
710 str(phases.public))
674 str(phases.public))
711 if not r:
675 if not r:
712 pushop.ui.warn(_('updating %s to public failed!\n')
676 pushop.ui.warn(_('updating %s to public failed!\n')
713 % newremotehead)
677 % newremotehead)
714
678
715 def _localphasemove(pushop, nodes, phase=phases.public):
679 def _localphasemove(pushop, nodes, phase=phases.public):
716 """move <nodes> to <phase> in the local source repo"""
680 """move <nodes> to <phase> in the local source repo"""
717 if pushop.locallocked:
681 if pushop.locallocked:
718 tr = pushop.repo.transaction('push-phase-sync')
682 tr = pushop.repo.transaction('push-phase-sync')
719 try:
683 try:
720 phases.advanceboundary(pushop.repo, tr, phase, nodes)
684 phases.advanceboundary(pushop.repo, tr, phase, nodes)
721 tr.close()
685 tr.close()
722 finally:
686 finally:
723 tr.release()
687 tr.release()
724 else:
688 else:
725 # repo is not locked, do not change any phases!
689 # repo is not locked, do not change any phases!
726 # Informs the user that phases should have been moved when
690 # Informs the user that phases should have been moved when
727 # applicable.
691 # applicable.
728 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
692 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
729 phasestr = phases.phasenames[phase]
693 phasestr = phases.phasenames[phase]
730 if actualmoves:
694 if actualmoves:
731 pushop.ui.status(_('cannot lock source repo, skipping '
695 pushop.ui.status(_('cannot lock source repo, skipping '
732 'local %s phase update\n') % phasestr)
696 'local %s phase update\n') % phasestr)
733
697
734 def _pushobsolete(pushop):
698 def _pushobsolete(pushop):
735 """utility function to push obsolete markers to a remote"""
699 """utility function to push obsolete markers to a remote"""
736 if 'obsmarkers' in pushop.stepsdone:
700 if 'obsmarkers' in pushop.stepsdone:
737 return
701 return
738 pushop.ui.debug('try to push obsolete markers to remote\n')
702 pushop.ui.debug('try to push obsolete markers to remote\n')
739 repo = pushop.repo
703 repo = pushop.repo
740 remote = pushop.remote
704 remote = pushop.remote
741 pushop.stepsdone.add('obsmarkers')
705 pushop.stepsdone.add('obsmarkers')
742 if pushop.outobsmarkers:
706 if pushop.outobsmarkers:
743 rslts = []
707 rslts = []
744 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
708 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
745 for key in sorted(remotedata, reverse=True):
709 for key in sorted(remotedata, reverse=True):
746 # reverse sort to ensure we end with dump0
710 # reverse sort to ensure we end with dump0
747 data = remotedata[key]
711 data = remotedata[key]
748 rslts.append(remote.pushkey('obsolete', key, '', data))
712 rslts.append(remote.pushkey('obsolete', key, '', data))
749 if [r for r in rslts if not r]:
713 if [r for r in rslts if not r]:
750 msg = _('failed to push some obsolete markers!\n')
714 msg = _('failed to push some obsolete markers!\n')
751 repo.ui.warn(msg)
715 repo.ui.warn(msg)
752
716
753 def _pushbookmark(pushop):
717 def _pushbookmark(pushop):
754 """Update bookmark position on remote"""
718 """Update bookmark position on remote"""
755 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
719 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
756 return
720 return
757 pushop.stepsdone.add('bookmarks')
721 pushop.stepsdone.add('bookmarks')
758 ui = pushop.ui
722 ui = pushop.ui
759 remote = pushop.remote
723 remote = pushop.remote
760
724
761 for b, old, new in pushop.outbookmarks:
725 for b, old, new in pushop.outbookmarks:
762 action = 'update'
726 action = 'update'
763 if not old:
727 if not old:
764 action = 'export'
728 action = 'export'
765 elif not new:
729 elif not new:
766 action = 'delete'
730 action = 'delete'
767 if remote.pushkey('bookmarks', b, old, new):
731 if remote.pushkey('bookmarks', b, old, new):
768 ui.status(bookmsgmap[action][0] % b)
732 ui.status(bookmsgmap[action][0] % b)
769 else:
733 else:
770 ui.warn(bookmsgmap[action][1] % b)
734 ui.warn(bookmsgmap[action][1] % b)
771 # discovery can have set the value form invalid entry
735 # discovery can have set the value form invalid entry
772 if pushop.bkresult is not None:
736 if pushop.bkresult is not None:
773 pushop.bkresult = 1
737 pushop.bkresult = 1
774
738
775 class pulloperation(object):
739 class pulloperation(object):
776 """A object that represent a single pull operation
740 """A object that represent a single pull operation
777
741
778 It purpose is to carry push related state and very common operation.
742 It purpose is to carry push related state and very common operation.
779
743
780 A new should be created at the beginning of each pull and discarded
744 A new should be created at the beginning of each pull and discarded
781 afterward.
745 afterward.
782 """
746 """
783
747
784 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
748 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
785 # repo we pull into
749 # repo we pull into
786 self.repo = repo
750 self.repo = repo
787 # repo we pull from
751 # repo we pull from
788 self.remote = remote
752 self.remote = remote
789 # revision we try to pull (None is "all")
753 # revision we try to pull (None is "all")
790 self.heads = heads
754 self.heads = heads
791 # bookmark pulled explicitly
755 # bookmark pulled explicitly
792 self.explicitbookmarks = bookmarks
756 self.explicitbookmarks = bookmarks
793 # do we force pull?
757 # do we force pull?
794 self.force = force
758 self.force = force
795 # the name the pull transaction
759 # the name the pull transaction
796 self._trname = 'pull\n' + util.hidepassword(remote.url())
760 self._trname = 'pull\n' + util.hidepassword(remote.url())
797 # hold the transaction once created
761 # hold the transaction once created
798 self._tr = None
762 self._tr = None
799 # set of common changeset between local and remote before pull
763 # set of common changeset between local and remote before pull
800 self.common = None
764 self.common = None
801 # set of pulled head
765 # set of pulled head
802 self.rheads = None
766 self.rheads = None
803 # list of missing changeset to fetch remotely
767 # list of missing changeset to fetch remotely
804 self.fetch = None
768 self.fetch = None
805 # remote bookmarks data
769 # remote bookmarks data
806 self.remotebookmarks = None
770 self.remotebookmarks = None
807 # result of changegroup pulling (used as return code by pull)
771 # result of changegroup pulling (used as return code by pull)
808 self.cgresult = None
772 self.cgresult = None
809 # list of step already done
773 # list of step already done
810 self.stepsdone = set()
774 self.stepsdone = set()
811
775
812 @util.propertycache
776 @util.propertycache
813 def pulledsubset(self):
777 def pulledsubset(self):
814 """heads of the set of changeset target by the pull"""
778 """heads of the set of changeset target by the pull"""
815 # compute target subset
779 # compute target subset
816 if self.heads is None:
780 if self.heads is None:
817 # We pulled every thing possible
781 # We pulled every thing possible
818 # sync on everything common
782 # sync on everything common
819 c = set(self.common)
783 c = set(self.common)
820 ret = list(self.common)
784 ret = list(self.common)
821 for n in self.rheads:
785 for n in self.rheads:
822 if n not in c:
786 if n not in c:
823 ret.append(n)
787 ret.append(n)
824 return ret
788 return ret
825 else:
789 else:
826 # We pulled a specific subset
790 # We pulled a specific subset
827 # sync on this subset
791 # sync on this subset
828 return self.heads
792 return self.heads
829
793
830 def gettransaction(self):
794 def gettransaction(self):
831 """get appropriate pull transaction, creating it if needed"""
795 """get appropriate pull transaction, creating it if needed"""
832 if self._tr is None:
796 if self._tr is None:
833 self._tr = self.repo.transaction(self._trname)
797 self._tr = self.repo.transaction(self._trname)
834 self._tr.hookargs['source'] = 'pull'
798 self._tr.hookargs['source'] = 'pull'
835 self._tr.hookargs['url'] = self.remote.url()
799 self._tr.hookargs['url'] = self.remote.url()
836 return self._tr
800 return self._tr
837
801
838 def closetransaction(self):
802 def closetransaction(self):
839 """close transaction if created"""
803 """close transaction if created"""
840 if self._tr is not None:
804 if self._tr is not None:
841 repo = self.repo
805 repo = self.repo
842 cl = repo.unfiltered().changelog
806 cl = repo.unfiltered().changelog
843 p = cl.writepending() and repo.root or ""
807 p = cl.writepending() and repo.root or ""
844 p = cl.writepending() and repo.root or ""
808 p = cl.writepending() and repo.root or ""
845 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
809 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
846 **self._tr.hookargs)
810 **self._tr.hookargs)
847 self._tr.close()
811 self._tr.close()
848 hookargs = dict(self._tr.hookargs)
812 hookargs = dict(self._tr.hookargs)
849 def runhooks():
813 def runhooks():
850 repo.hook('b2x-transactionclose', **hookargs)
814 repo.hook('b2x-transactionclose', **hookargs)
851 repo._afterlock(runhooks)
815 repo._afterlock(runhooks)
852
816
853 def releasetransaction(self):
817 def releasetransaction(self):
854 """release transaction if created"""
818 """release transaction if created"""
855 if self._tr is not None:
819 if self._tr is not None:
856 self._tr.release()
820 self._tr.release()
857
821
858 def pull(repo, remote, heads=None, force=False, bookmarks=()):
822 def pull(repo, remote, heads=None, force=False, bookmarks=()):
859 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
823 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
860 if pullop.remote.local():
824 if pullop.remote.local():
861 missing = set(pullop.remote.requirements) - pullop.repo.supported
825 missing = set(pullop.remote.requirements) - pullop.repo.supported
862 if missing:
826 if missing:
863 msg = _("required features are not"
827 msg = _("required features are not"
864 " supported in the destination:"
828 " supported in the destination:"
865 " %s") % (', '.join(sorted(missing)))
829 " %s") % (', '.join(sorted(missing)))
866 raise util.Abort(msg)
830 raise util.Abort(msg)
867
831
868 pullop.remotebookmarks = remote.listkeys('bookmarks')
832 pullop.remotebookmarks = remote.listkeys('bookmarks')
869 lock = pullop.repo.lock()
833 lock = pullop.repo.lock()
870 try:
834 try:
871 _pulldiscovery(pullop)
835 _pulldiscovery(pullop)
872 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
836 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
873 and pullop.remote.capable('bundle2-exp')):
837 and pullop.remote.capable('bundle2-exp')):
874 _pullbundle2(pullop)
838 _pullbundle2(pullop)
875 _pullchangeset(pullop)
839 _pullchangeset(pullop)
876 _pullphase(pullop)
840 _pullphase(pullop)
877 _pullbookmarks(pullop)
841 _pullbookmarks(pullop)
878 _pullobsolete(pullop)
842 _pullobsolete(pullop)
879 pullop.closetransaction()
843 pullop.closetransaction()
880 finally:
844 finally:
881 pullop.releasetransaction()
845 pullop.releasetransaction()
882 lock.release()
846 lock.release()
883
847
884 return pullop
848 return pullop
885
849
886 # list of steps to perform discovery before pull
850 # list of steps to perform discovery before pull
887 pulldiscoveryorder = []
851 pulldiscoveryorder = []
888
852
889 # Mapping between step name and function
853 # Mapping between step name and function
890 #
854 #
891 # This exists to help extensions wrap steps if necessary
855 # This exists to help extensions wrap steps if necessary
892 pulldiscoverymapping = {}
856 pulldiscoverymapping = {}
893
857
894 def pulldiscovery(stepname):
858 def pulldiscovery(stepname):
895 """decorator for function performing discovery before pull
859 """decorator for function performing discovery before pull
896
860
897 The function is added to the step -> function mapping and appended to the
861 The function is added to the step -> function mapping and appended to the
898 list of steps. Beware that decorated function will be added in order (this
862 list of steps. Beware that decorated function will be added in order (this
899 may matter).
863 may matter).
900
864
901 You can only use this decorator for a new step, if you want to wrap a step
865 You can only use this decorator for a new step, if you want to wrap a step
902 from an extension, change the pulldiscovery dictionary directly."""
866 from an extension, change the pulldiscovery dictionary directly."""
903 def dec(func):
867 def dec(func):
904 assert stepname not in pulldiscoverymapping
868 assert stepname not in pulldiscoverymapping
905 pulldiscoverymapping[stepname] = func
869 pulldiscoverymapping[stepname] = func
906 pulldiscoveryorder.append(stepname)
870 pulldiscoveryorder.append(stepname)
907 return func
871 return func
908 return dec
872 return dec
909
873
910 def _pulldiscovery(pullop):
874 def _pulldiscovery(pullop):
911 """Run all discovery steps"""
875 """Run all discovery steps"""
912 for stepname in pulldiscoveryorder:
876 for stepname in pulldiscoveryorder:
913 step = pulldiscoverymapping[stepname]
877 step = pulldiscoverymapping[stepname]
914 step(pullop)
878 step(pullop)
915
879
916 @pulldiscovery('changegroup')
880 @pulldiscovery('changegroup')
917 def _pulldiscoverychangegroup(pullop):
881 def _pulldiscoverychangegroup(pullop):
918 """discovery phase for the pull
882 """discovery phase for the pull
919
883
920 Current handle changeset discovery only, will change handle all discovery
884 Current handle changeset discovery only, will change handle all discovery
921 at some point."""
885 at some point."""
922 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
886 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
923 pullop.remote,
887 pullop.remote,
924 heads=pullop.heads,
888 heads=pullop.heads,
925 force=pullop.force)
889 force=pullop.force)
926 pullop.common, pullop.fetch, pullop.rheads = tmp
890 pullop.common, pullop.fetch, pullop.rheads = tmp
927
891
928 def _pullbundle2(pullop):
892 def _pullbundle2(pullop):
929 """pull data using bundle2
893 """pull data using bundle2
930
894
931 For now, the only supported data are changegroup."""
895 For now, the only supported data are changegroup."""
932 remotecaps = bundle2.bundle2caps(pullop.remote)
896 remotecaps = bundle2.bundle2caps(pullop.remote)
933 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
897 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
934 # pulling changegroup
898 # pulling changegroup
935 pullop.stepsdone.add('changegroup')
899 pullop.stepsdone.add('changegroup')
936
900
937 kwargs['common'] = pullop.common
901 kwargs['common'] = pullop.common
938 kwargs['heads'] = pullop.heads or pullop.rheads
902 kwargs['heads'] = pullop.heads or pullop.rheads
939 kwargs['cg'] = pullop.fetch
903 kwargs['cg'] = pullop.fetch
940 if 'b2x:listkeys' in remotecaps:
904 if 'b2x:listkeys' in remotecaps:
941 kwargs['listkeys'] = ['phase', 'bookmarks']
905 kwargs['listkeys'] = ['phase', 'bookmarks']
942 if not pullop.fetch:
906 if not pullop.fetch:
943 pullop.repo.ui.status(_("no changes found\n"))
907 pullop.repo.ui.status(_("no changes found\n"))
944 pullop.cgresult = 0
908 pullop.cgresult = 0
945 else:
909 else:
946 if pullop.heads is None and list(pullop.common) == [nullid]:
910 if pullop.heads is None and list(pullop.common) == [nullid]:
947 pullop.repo.ui.status(_("requesting all changes\n"))
911 pullop.repo.ui.status(_("requesting all changes\n"))
948 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
912 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
949 remoteversions = bundle2.obsmarkersversion(remotecaps)
913 remoteversions = bundle2.obsmarkersversion(remotecaps)
950 if obsolete.commonversion(remoteversions) is not None:
914 if obsolete.commonversion(remoteversions) is not None:
951 kwargs['obsmarkers'] = True
915 kwargs['obsmarkers'] = True
952 pullop.stepsdone.add('obsmarkers')
916 pullop.stepsdone.add('obsmarkers')
953 _pullbundle2extraprepare(pullop, kwargs)
917 _pullbundle2extraprepare(pullop, kwargs)
954 if kwargs.keys() == ['format']:
918 if kwargs.keys() == ['format']:
955 return # nothing to pull
919 return # nothing to pull
956 bundle = pullop.remote.getbundle('pull', **kwargs)
920 bundle = pullop.remote.getbundle('pull', **kwargs)
957 try:
921 try:
958 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
922 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
959 except error.BundleValueError, exc:
923 except error.BundleValueError, exc:
960 raise util.Abort('missing support for %s' % exc)
924 raise util.Abort('missing support for %s' % exc)
961
925
962 if pullop.fetch:
926 if pullop.fetch:
963 changedheads = 0
927 changedheads = 0
964 pullop.cgresult = 1
928 pullop.cgresult = 1
965 for cg in op.records['changegroup']:
929 for cg in op.records['changegroup']:
966 ret = cg['return']
930 ret = cg['return']
967 # If any changegroup result is 0, return 0
931 # If any changegroup result is 0, return 0
968 if ret == 0:
932 if ret == 0:
969 pullop.cgresult = 0
933 pullop.cgresult = 0
970 break
934 break
971 if ret < -1:
935 if ret < -1:
972 changedheads += ret + 1
936 changedheads += ret + 1
973 elif ret > 1:
937 elif ret > 1:
974 changedheads += ret - 1
938 changedheads += ret - 1
975 if changedheads > 0:
939 if changedheads > 0:
976 pullop.cgresult = 1 + changedheads
940 pullop.cgresult = 1 + changedheads
977 elif changedheads < 0:
941 elif changedheads < 0:
978 pullop.cgresult = -1 + changedheads
942 pullop.cgresult = -1 + changedheads
979
943
980 # processing phases change
944 # processing phases change
981 for namespace, value in op.records['listkeys']:
945 for namespace, value in op.records['listkeys']:
982 if namespace == 'phases':
946 if namespace == 'phases':
983 _pullapplyphases(pullop, value)
947 _pullapplyphases(pullop, value)
984
948
985 # processing bookmark update
949 # processing bookmark update
986 for namespace, value in op.records['listkeys']:
950 for namespace, value in op.records['listkeys']:
987 if namespace == 'bookmarks':
951 if namespace == 'bookmarks':
988 pullop.remotebookmarks = value
952 pullop.remotebookmarks = value
989 _pullbookmarks(pullop)
953 _pullbookmarks(pullop)
990
954
991 def _pullbundle2extraprepare(pullop, kwargs):
955 def _pullbundle2extraprepare(pullop, kwargs):
992 """hook function so that extensions can extend the getbundle call"""
956 """hook function so that extensions can extend the getbundle call"""
993 pass
957 pass
994
958
995 def _pullchangeset(pullop):
959 def _pullchangeset(pullop):
996 """pull changeset from unbundle into the local repo"""
960 """pull changeset from unbundle into the local repo"""
997 # We delay the open of the transaction as late as possible so we
961 # We delay the open of the transaction as late as possible so we
998 # don't open transaction for nothing or you break future useful
962 # don't open transaction for nothing or you break future useful
999 # rollback call
963 # rollback call
1000 if 'changegroup' in pullop.stepsdone:
964 if 'changegroup' in pullop.stepsdone:
1001 return
965 return
1002 pullop.stepsdone.add('changegroup')
966 pullop.stepsdone.add('changegroup')
1003 if not pullop.fetch:
967 if not pullop.fetch:
1004 pullop.repo.ui.status(_("no changes found\n"))
968 pullop.repo.ui.status(_("no changes found\n"))
1005 pullop.cgresult = 0
969 pullop.cgresult = 0
1006 return
970 return
1007 pullop.gettransaction()
971 pullop.gettransaction()
1008 if pullop.heads is None and list(pullop.common) == [nullid]:
972 if pullop.heads is None and list(pullop.common) == [nullid]:
1009 pullop.repo.ui.status(_("requesting all changes\n"))
973 pullop.repo.ui.status(_("requesting all changes\n"))
1010 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
974 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1011 # issue1320, avoid a race if remote changed after discovery
975 # issue1320, avoid a race if remote changed after discovery
1012 pullop.heads = pullop.rheads
976 pullop.heads = pullop.rheads
1013
977
1014 if pullop.remote.capable('getbundle'):
978 if pullop.remote.capable('getbundle'):
1015 # TODO: get bundlecaps from remote
979 # TODO: get bundlecaps from remote
1016 cg = pullop.remote.getbundle('pull', common=pullop.common,
980 cg = pullop.remote.getbundle('pull', common=pullop.common,
1017 heads=pullop.heads or pullop.rheads)
981 heads=pullop.heads or pullop.rheads)
1018 elif pullop.heads is None:
982 elif pullop.heads is None:
1019 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
983 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1020 elif not pullop.remote.capable('changegroupsubset'):
984 elif not pullop.remote.capable('changegroupsubset'):
1021 raise util.Abort(_("partial pull cannot be done because "
985 raise util.Abort(_("partial pull cannot be done because "
1022 "other repository doesn't support "
986 "other repository doesn't support "
1023 "changegroupsubset."))
987 "changegroupsubset."))
1024 else:
988 else:
1025 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
989 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1026 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
990 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1027 pullop.remote.url())
991 pullop.remote.url())
1028
992
1029 def _pullphase(pullop):
993 def _pullphase(pullop):
1030 # Get remote phases data from remote
994 # Get remote phases data from remote
1031 if 'phases' in pullop.stepsdone:
995 if 'phases' in pullop.stepsdone:
1032 return
996 return
1033 remotephases = pullop.remote.listkeys('phases')
997 remotephases = pullop.remote.listkeys('phases')
1034 _pullapplyphases(pullop, remotephases)
998 _pullapplyphases(pullop, remotephases)
1035
999
1036 def _pullapplyphases(pullop, remotephases):
1000 def _pullapplyphases(pullop, remotephases):
1037 """apply phase movement from observed remote state"""
1001 """apply phase movement from observed remote state"""
1038 if 'phases' in pullop.stepsdone:
1002 if 'phases' in pullop.stepsdone:
1039 return
1003 return
1040 pullop.stepsdone.add('phases')
1004 pullop.stepsdone.add('phases')
1041 publishing = bool(remotephases.get('publishing', False))
1005 publishing = bool(remotephases.get('publishing', False))
1042 if remotephases and not publishing:
1006 if remotephases and not publishing:
1043 # remote is new and unpublishing
1007 # remote is new and unpublishing
1044 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1008 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1045 pullop.pulledsubset,
1009 pullop.pulledsubset,
1046 remotephases)
1010 remotephases)
1047 dheads = pullop.pulledsubset
1011 dheads = pullop.pulledsubset
1048 else:
1012 else:
1049 # Remote is old or publishing all common changesets
1013 # Remote is old or publishing all common changesets
1050 # should be seen as public
1014 # should be seen as public
1051 pheads = pullop.pulledsubset
1015 pheads = pullop.pulledsubset
1052 dheads = []
1016 dheads = []
1053 unfi = pullop.repo.unfiltered()
1017 unfi = pullop.repo.unfiltered()
1054 phase = unfi._phasecache.phase
1018 phase = unfi._phasecache.phase
1055 rev = unfi.changelog.nodemap.get
1019 rev = unfi.changelog.nodemap.get
1056 public = phases.public
1020 public = phases.public
1057 draft = phases.draft
1021 draft = phases.draft
1058
1022
1059 # exclude changesets already public locally and update the others
1023 # exclude changesets already public locally and update the others
1060 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1024 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1061 if pheads:
1025 if pheads:
1062 tr = pullop.gettransaction()
1026 tr = pullop.gettransaction()
1063 phases.advanceboundary(pullop.repo, tr, public, pheads)
1027 phases.advanceboundary(pullop.repo, tr, public, pheads)
1064
1028
1065 # exclude changesets already draft locally and update the others
1029 # exclude changesets already draft locally and update the others
1066 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1030 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1067 if dheads:
1031 if dheads:
1068 tr = pullop.gettransaction()
1032 tr = pullop.gettransaction()
1069 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1033 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1070
1034
1071 def _pullbookmarks(pullop):
1035 def _pullbookmarks(pullop):
1072 """process the remote bookmark information to update the local one"""
1036 """process the remote bookmark information to update the local one"""
1073 if 'bookmarks' in pullop.stepsdone:
1037 if 'bookmarks' in pullop.stepsdone:
1074 return
1038 return
1075 pullop.stepsdone.add('bookmarks')
1039 pullop.stepsdone.add('bookmarks')
1076 repo = pullop.repo
1040 repo = pullop.repo
1077 remotebookmarks = pullop.remotebookmarks
1041 remotebookmarks = pullop.remotebookmarks
1078 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1042 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1079 pullop.remote.url(),
1043 pullop.remote.url(),
1080 pullop.gettransaction,
1044 pullop.gettransaction,
1081 explicit=pullop.explicitbookmarks)
1045 explicit=pullop.explicitbookmarks)
1082
1046
1083 def _pullobsolete(pullop):
1047 def _pullobsolete(pullop):
1084 """utility function to pull obsolete markers from a remote
1048 """utility function to pull obsolete markers from a remote
1085
1049
1086 The `gettransaction` is function that return the pull transaction, creating
1050 The `gettransaction` is function that return the pull transaction, creating
1087 one if necessary. We return the transaction to inform the calling code that
1051 one if necessary. We return the transaction to inform the calling code that
1088 a new transaction have been created (when applicable).
1052 a new transaction have been created (when applicable).
1089
1053
1090 Exists mostly to allow overriding for experimentation purpose"""
1054 Exists mostly to allow overriding for experimentation purpose"""
1091 if 'obsmarkers' in pullop.stepsdone:
1055 if 'obsmarkers' in pullop.stepsdone:
1092 return
1056 return
1093 pullop.stepsdone.add('obsmarkers')
1057 pullop.stepsdone.add('obsmarkers')
1094 tr = None
1058 tr = None
1095 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1059 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1096 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1060 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1097 remoteobs = pullop.remote.listkeys('obsolete')
1061 remoteobs = pullop.remote.listkeys('obsolete')
1098 if 'dump0' in remoteobs:
1062 if 'dump0' in remoteobs:
1099 tr = pullop.gettransaction()
1063 tr = pullop.gettransaction()
1100 for key in sorted(remoteobs, reverse=True):
1064 for key in sorted(remoteobs, reverse=True):
1101 if key.startswith('dump'):
1065 if key.startswith('dump'):
1102 data = base85.b85decode(remoteobs[key])
1066 data = base85.b85decode(remoteobs[key])
1103 pullop.repo.obsstore.mergemarkers(tr, data)
1067 pullop.repo.obsstore.mergemarkers(tr, data)
1104 pullop.repo.invalidatevolatilesets()
1068 pullop.repo.invalidatevolatilesets()
1105 return tr
1069 return tr
1106
1070
1107 def caps20to10(repo):
1071 def caps20to10(repo):
1108 """return a set with appropriate options to use bundle20 during getbundle"""
1072 """return a set with appropriate options to use bundle20 during getbundle"""
1109 caps = set(['HG2Y'])
1073 caps = set(['HG2Y'])
1110 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1074 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1111 caps.add('bundle2=' + urllib.quote(capsblob))
1075 caps.add('bundle2=' + urllib.quote(capsblob))
1112 return caps
1076 return caps
1113
1077
1114 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1078 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1115 getbundle2partsorder = []
1079 getbundle2partsorder = []
1116
1080
1117 # Mapping between step name and function
1081 # Mapping between step name and function
1118 #
1082 #
1119 # This exists to help extensions wrap steps if necessary
1083 # This exists to help extensions wrap steps if necessary
1120 getbundle2partsmapping = {}
1084 getbundle2partsmapping = {}
1121
1085
1122 def getbundle2partsgenerator(stepname):
1086 def getbundle2partsgenerator(stepname):
1123 """decorator for function generating bundle2 part for getbundle
1087 """decorator for function generating bundle2 part for getbundle
1124
1088
1125 The function is added to the step -> function mapping and appended to the
1089 The function is added to the step -> function mapping and appended to the
1126 list of steps. Beware that decorated functions will be added in order
1090 list of steps. Beware that decorated functions will be added in order
1127 (this may matter).
1091 (this may matter).
1128
1092
1129 You can only use this decorator for new steps, if you want to wrap a step
1093 You can only use this decorator for new steps, if you want to wrap a step
1130 from an extension, attack the getbundle2partsmapping dictionary directly."""
1094 from an extension, attack the getbundle2partsmapping dictionary directly."""
1131 def dec(func):
1095 def dec(func):
1132 assert stepname not in getbundle2partsmapping
1096 assert stepname not in getbundle2partsmapping
1133 getbundle2partsmapping[stepname] = func
1097 getbundle2partsmapping[stepname] = func
1134 getbundle2partsorder.append(stepname)
1098 getbundle2partsorder.append(stepname)
1135 return func
1099 return func
1136 return dec
1100 return dec
1137
1101
1138 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1102 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1139 **kwargs):
1103 **kwargs):
1140 """return a full bundle (with potentially multiple kind of parts)
1104 """return a full bundle (with potentially multiple kind of parts)
1141
1105
1142 Could be a bundle HG10 or a bundle HG2Y depending on bundlecaps
1106 Could be a bundle HG10 or a bundle HG2Y depending on bundlecaps
1143 passed. For now, the bundle can contain only changegroup, but this will
1107 passed. For now, the bundle can contain only changegroup, but this will
1144 changes when more part type will be available for bundle2.
1108 changes when more part type will be available for bundle2.
1145
1109
1146 This is different from changegroup.getchangegroup that only returns an HG10
1110 This is different from changegroup.getchangegroup that only returns an HG10
1147 changegroup bundle. They may eventually get reunited in the future when we
1111 changegroup bundle. They may eventually get reunited in the future when we
1148 have a clearer idea of the API we what to query different data.
1112 have a clearer idea of the API we what to query different data.
1149
1113
1150 The implementation is at a very early stage and will get massive rework
1114 The implementation is at a very early stage and will get massive rework
1151 when the API of bundle is refined.
1115 when the API of bundle is refined.
1152 """
1116 """
1153 # bundle10 case
1117 # bundle10 case
1154 if bundlecaps is None or 'HG2Y' not in bundlecaps:
1118 if bundlecaps is None or 'HG2Y' not in bundlecaps:
1155 if bundlecaps and not kwargs.get('cg', True):
1119 if bundlecaps and not kwargs.get('cg', True):
1156 raise ValueError(_('request for bundle10 must include changegroup'))
1120 raise ValueError(_('request for bundle10 must include changegroup'))
1157
1121
1158 if kwargs:
1122 if kwargs:
1159 raise ValueError(_('unsupported getbundle arguments: %s')
1123 raise ValueError(_('unsupported getbundle arguments: %s')
1160 % ', '.join(sorted(kwargs.keys())))
1124 % ', '.join(sorted(kwargs.keys())))
1161 return changegroup.getchangegroup(repo, source, heads=heads,
1125 return changegroup.getchangegroup(repo, source, heads=heads,
1162 common=common, bundlecaps=bundlecaps)
1126 common=common, bundlecaps=bundlecaps)
1163
1127
1164 # bundle20 case
1128 # bundle20 case
1165 b2caps = {}
1129 b2caps = {}
1166 for bcaps in bundlecaps:
1130 for bcaps in bundlecaps:
1167 if bcaps.startswith('bundle2='):
1131 if bcaps.startswith('bundle2='):
1168 blob = urllib.unquote(bcaps[len('bundle2='):])
1132 blob = urllib.unquote(bcaps[len('bundle2='):])
1169 b2caps.update(bundle2.decodecaps(blob))
1133 b2caps.update(bundle2.decodecaps(blob))
1170 bundler = bundle2.bundle20(repo.ui, b2caps)
1134 bundler = bundle2.bundle20(repo.ui, b2caps)
1171
1135
1172 for name in getbundle2partsorder:
1136 for name in getbundle2partsorder:
1173 func = getbundle2partsmapping[name]
1137 func = getbundle2partsmapping[name]
1174 kwargs['heads'] = heads
1138 kwargs['heads'] = heads
1175 kwargs['common'] = common
1139 kwargs['common'] = common
1176 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1140 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1177 **kwargs)
1141 **kwargs)
1178
1142
1179 return util.chunkbuffer(bundler.getchunks())
1143 return util.chunkbuffer(bundler.getchunks())
1180
1144
1181 @getbundle2partsgenerator('changegroup')
1145 @getbundle2partsgenerator('changegroup')
1182 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1146 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1183 b2caps=None, heads=None, common=None, **kwargs):
1147 b2caps=None, heads=None, common=None, **kwargs):
1184 """add a changegroup part to the requested bundle"""
1148 """add a changegroup part to the requested bundle"""
1185 cg = None
1149 cg = None
1186 if kwargs.get('cg', True):
1150 if kwargs.get('cg', True):
1187 # build changegroup bundle here.
1151 # build changegroup bundle here.
1188 cg = changegroup.getchangegroup(repo, source, heads=heads,
1152 cg = changegroup.getchangegroup(repo, source, heads=heads,
1189 common=common, bundlecaps=bundlecaps)
1153 common=common, bundlecaps=bundlecaps)
1190
1154
1191 if cg:
1155 if cg:
1192 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1156 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1193
1157
1194 @getbundle2partsgenerator('listkeys')
1158 @getbundle2partsgenerator('listkeys')
1195 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1159 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1196 b2caps=None, **kwargs):
1160 b2caps=None, **kwargs):
1197 """add parts containing listkeys namespaces to the requested bundle"""
1161 """add parts containing listkeys namespaces to the requested bundle"""
1198 listkeys = kwargs.get('listkeys', ())
1162 listkeys = kwargs.get('listkeys', ())
1199 for namespace in listkeys:
1163 for namespace in listkeys:
1200 part = bundler.newpart('b2x:listkeys')
1164 part = bundler.newpart('b2x:listkeys')
1201 part.addparam('namespace', namespace)
1165 part.addparam('namespace', namespace)
1202 keys = repo.listkeys(namespace).items()
1166 keys = repo.listkeys(namespace).items()
1203 part.data = pushkey.encodekeys(keys)
1167 part.data = pushkey.encodekeys(keys)
1204
1168
1205 @getbundle2partsgenerator('obsmarkers')
1169 @getbundle2partsgenerator('obsmarkers')
1206 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1170 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1207 b2caps=None, heads=None, **kwargs):
1171 b2caps=None, heads=None, **kwargs):
1208 """add an obsolescence markers part to the requested bundle"""
1172 """add an obsolescence markers part to the requested bundle"""
1209 if kwargs.get('obsmarkers', False):
1173 if kwargs.get('obsmarkers', False):
1210 if heads is None:
1174 if heads is None:
1211 heads = repo.heads()
1175 heads = repo.heads()
1212 subset = [c.node() for c in repo.set('::%ln', heads)]
1176 subset = [c.node() for c in repo.set('::%ln', heads)]
1213 markers = repo.obsstore.relevantmarkers(subset)
1177 markers = repo.obsstore.relevantmarkers(subset)
1214 buildobsmarkerspart(bundler, markers)
1178 buildobsmarkerspart(bundler, markers)
1215
1179
1216 def check_heads(repo, their_heads, context):
1180 def check_heads(repo, their_heads, context):
1217 """check if the heads of a repo have been modified
1181 """check if the heads of a repo have been modified
1218
1182
1219 Used by peer for unbundling.
1183 Used by peer for unbundling.
1220 """
1184 """
1221 heads = repo.heads()
1185 heads = repo.heads()
1222 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1186 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1223 if not (their_heads == ['force'] or their_heads == heads or
1187 if not (their_heads == ['force'] or their_heads == heads or
1224 their_heads == ['hashed', heads_hash]):
1188 their_heads == ['hashed', heads_hash]):
1225 # someone else committed/pushed/unbundled while we
1189 # someone else committed/pushed/unbundled while we
1226 # were transferring data
1190 # were transferring data
1227 raise error.PushRaced('repository changed while %s - '
1191 raise error.PushRaced('repository changed while %s - '
1228 'please try again' % context)
1192 'please try again' % context)
1229
1193
1230 def unbundle(repo, cg, heads, source, url):
1194 def unbundle(repo, cg, heads, source, url):
1231 """Apply a bundle to a repo.
1195 """Apply a bundle to a repo.
1232
1196
1233 this function makes sure the repo is locked during the application and have
1197 this function makes sure the repo is locked during the application and have
1234 mechanism to check that no push race occurred between the creation of the
1198 mechanism to check that no push race occurred between the creation of the
1235 bundle and its application.
1199 bundle and its application.
1236
1200
1237 If the push was raced as PushRaced exception is raised."""
1201 If the push was raced as PushRaced exception is raised."""
1238 r = 0
1202 r = 0
1239 # need a transaction when processing a bundle2 stream
1203 # need a transaction when processing a bundle2 stream
1240 tr = None
1204 tr = None
1241 lock = repo.lock()
1205 lock = repo.lock()
1242 try:
1206 try:
1243 check_heads(repo, heads, 'uploading changes')
1207 check_heads(repo, heads, 'uploading changes')
1244 # push can proceed
1208 # push can proceed
1245 if util.safehasattr(cg, 'params'):
1209 if util.safehasattr(cg, 'params'):
1246 try:
1210 try:
1247 tr = repo.transaction('unbundle')
1211 tr = repo.transaction('unbundle')
1248 tr.hookargs['source'] = source
1212 tr.hookargs['source'] = source
1249 tr.hookargs['url'] = url
1213 tr.hookargs['url'] = url
1250 tr.hookargs['bundle2-exp'] = '1'
1214 tr.hookargs['bundle2-exp'] = '1'
1251 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1215 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1252 cl = repo.unfiltered().changelog
1216 cl = repo.unfiltered().changelog
1253 p = cl.writepending() and repo.root or ""
1217 p = cl.writepending() and repo.root or ""
1254 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
1218 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
1255 **tr.hookargs)
1219 **tr.hookargs)
1256 tr.close()
1220 tr.close()
1257 hookargs = dict(tr.hookargs)
1221 hookargs = dict(tr.hookargs)
1258 def runhooks():
1222 def runhooks():
1259 repo.hook('b2x-transactionclose', **hookargs)
1223 repo.hook('b2x-transactionclose', **hookargs)
1260 repo._afterlock(runhooks)
1224 repo._afterlock(runhooks)
1261 except Exception, exc:
1225 except Exception, exc:
1262 exc.duringunbundle2 = True
1226 exc.duringunbundle2 = True
1263 raise
1227 raise
1264 else:
1228 else:
1265 r = changegroup.addchangegroup(repo, cg, source, url)
1229 r = changegroup.addchangegroup(repo, cg, source, url)
1266 finally:
1230 finally:
1267 if tr is not None:
1231 if tr is not None:
1268 tr.release()
1232 tr.release()
1269 lock.release()
1233 lock.release()
1270 return r
1234 return r
General Comments 0
You need to be logged in to leave comments. Login now