##// END OF EJS Templates
push: prepare the issue of multiple kinds of messages...
Pierre-Yves David -
r22650:36952c91 default
parent child Browse files
Show More
@@ -1,1143 +1,1170 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.cg1unpacker(fh, alg)
34 return changegroup.cg1unpacker(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40 def buildobsmarkerspart(bundler, markers):
40 def buildobsmarkerspart(bundler, markers):
41 """add an obsmarker part to the bundler with <markers>
41 """add an obsmarker part to the bundler with <markers>
42
42
43 No part is created if markers is empty.
43 No part is created if markers is empty.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 """
45 """
46 if markers:
46 if markers:
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 version = obsolete.commonversion(remoteversions)
48 version = obsolete.commonversion(remoteversions)
49 if version is None:
49 if version is None:
50 raise ValueError('bundler do not support common obsmarker format')
50 raise ValueError('bundler do not support common obsmarker format')
51 stream = obsolete.encodemarkers(markers, True, version=version)
51 stream = obsolete.encodemarkers(markers, True, version=version)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 return None
53 return None
54
54
55 class pushoperation(object):
55 class pushoperation(object):
56 """A object that represent a single push operation
56 """A object that represent a single push operation
57
57
58 It purpose is to carry push related state and very common operation.
58 It purpose is to carry push related state and very common operation.
59
59
60 A new should be created at the beginning of each push and discarded
60 A new should be created at the beginning of each push and discarded
61 afterward.
61 afterward.
62 """
62 """
63
63
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
65 bookmarks=()):
65 bookmarks=()):
66 # repo we push from
66 # repo we push from
67 self.repo = repo
67 self.repo = repo
68 self.ui = repo.ui
68 self.ui = repo.ui
69 # repo we push to
69 # repo we push to
70 self.remote = remote
70 self.remote = remote
71 # force option provided
71 # force option provided
72 self.force = force
72 self.force = force
73 # revs to be pushed (None is "all")
73 # revs to be pushed (None is "all")
74 self.revs = revs
74 self.revs = revs
75 # bookmark explicitly pushed
75 # bookmark explicitly pushed
76 self.bookmarks = bookmarks
76 self.bookmarks = bookmarks
77 # allow push of new branch
77 # allow push of new branch
78 self.newbranch = newbranch
78 self.newbranch = newbranch
79 # did a local lock get acquired?
79 # did a local lock get acquired?
80 self.locallocked = None
80 self.locallocked = None
81 # step already performed
81 # step already performed
82 # (used to check what steps have been already performed through bundle2)
82 # (used to check what steps have been already performed through bundle2)
83 self.stepsdone = set()
83 self.stepsdone = set()
84 # Integer version of the changegroup push result
84 # Integer version of the changegroup push result
85 # - None means nothing to push
85 # - None means nothing to push
86 # - 0 means HTTP error
86 # - 0 means HTTP error
87 # - 1 means we pushed and remote head count is unchanged *or*
87 # - 1 means we pushed and remote head count is unchanged *or*
88 # we have outgoing changesets but refused to push
88 # we have outgoing changesets but refused to push
89 # - other values as described by addchangegroup()
89 # - other values as described by addchangegroup()
90 self.cgresult = None
90 self.cgresult = None
91 # Boolean value for the bookmark push
91 # Boolean value for the bookmark push
92 self.bkresult = None
92 self.bkresult = None
93 # discover.outgoing object (contains common and outgoing data)
93 # discover.outgoing object (contains common and outgoing data)
94 self.outgoing = None
94 self.outgoing = None
95 # all remote heads before the push
95 # all remote heads before the push
96 self.remoteheads = None
96 self.remoteheads = None
97 # testable as a boolean indicating if any nodes are missing locally.
97 # testable as a boolean indicating if any nodes are missing locally.
98 self.incoming = None
98 self.incoming = None
99 # phases changes that must be pushed along side the changesets
99 # phases changes that must be pushed along side the changesets
100 self.outdatedphases = None
100 self.outdatedphases = None
101 # phases changes that must be pushed if changeset push fails
101 # phases changes that must be pushed if changeset push fails
102 self.fallbackoutdatedphases = None
102 self.fallbackoutdatedphases = None
103 # outgoing obsmarkers
103 # outgoing obsmarkers
104 self.outobsmarkers = set()
104 self.outobsmarkers = set()
105 # outgoing bookmarks
105 # outgoing bookmarks
106 self.outbookmarks = []
106 self.outbookmarks = []
107
107
108 @util.propertycache
108 @util.propertycache
109 def futureheads(self):
109 def futureheads(self):
110 """future remote heads if the changeset push succeeds"""
110 """future remote heads if the changeset push succeeds"""
111 return self.outgoing.missingheads
111 return self.outgoing.missingheads
112
112
113 @util.propertycache
113 @util.propertycache
114 def fallbackheads(self):
114 def fallbackheads(self):
115 """future remote heads if the changeset push fails"""
115 """future remote heads if the changeset push fails"""
116 if self.revs is None:
116 if self.revs is None:
117 # not target to push, all common are relevant
117 # not target to push, all common are relevant
118 return self.outgoing.commonheads
118 return self.outgoing.commonheads
119 unfi = self.repo.unfiltered()
119 unfi = self.repo.unfiltered()
120 # I want cheads = heads(::missingheads and ::commonheads)
120 # I want cheads = heads(::missingheads and ::commonheads)
121 # (missingheads is revs with secret changeset filtered out)
121 # (missingheads is revs with secret changeset filtered out)
122 #
122 #
123 # This can be expressed as:
123 # This can be expressed as:
124 # cheads = ( (missingheads and ::commonheads)
124 # cheads = ( (missingheads and ::commonheads)
125 # + (commonheads and ::missingheads))"
125 # + (commonheads and ::missingheads))"
126 # )
126 # )
127 #
127 #
128 # while trying to push we already computed the following:
128 # while trying to push we already computed the following:
129 # common = (::commonheads)
129 # common = (::commonheads)
130 # missing = ((commonheads::missingheads) - commonheads)
130 # missing = ((commonheads::missingheads) - commonheads)
131 #
131 #
132 # We can pick:
132 # We can pick:
133 # * missingheads part of common (::commonheads)
133 # * missingheads part of common (::commonheads)
134 common = set(self.outgoing.common)
134 common = set(self.outgoing.common)
135 nm = self.repo.changelog.nodemap
135 nm = self.repo.changelog.nodemap
136 cheads = [node for node in self.revs if nm[node] in common]
136 cheads = [node for node in self.revs if nm[node] in common]
137 # and
137 # and
138 # * commonheads parents on missing
138 # * commonheads parents on missing
139 revset = unfi.set('%ln and parents(roots(%ln))',
139 revset = unfi.set('%ln and parents(roots(%ln))',
140 self.outgoing.commonheads,
140 self.outgoing.commonheads,
141 self.outgoing.missing)
141 self.outgoing.missing)
142 cheads.extend(c.node() for c in revset)
142 cheads.extend(c.node() for c in revset)
143 return cheads
143 return cheads
144
144
145 @property
145 @property
146 def commonheads(self):
146 def commonheads(self):
147 """set of all common heads after changeset bundle push"""
147 """set of all common heads after changeset bundle push"""
148 if self.cgresult:
148 if self.cgresult:
149 return self.futureheads
149 return self.futureheads
150 else:
150 else:
151 return self.fallbackheads
151 return self.fallbackheads
152
152
153 # mapping of message used when pushing bookmark
154 bookmsgmap = {'update': (_("updating bookmark %s\n"),
155 _('updating bookmark %s failed!\n')),
156 'export': (_("exporting bookmark %s\n"),
157 _('exporting bookmark %s failed!\n')),
158 'delete': (_("deleting remote bookmark %s\n"),
159 _('deleting remote bookmark %s failed!\n')),
160 }
161
162
153 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
163 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
154 '''Push outgoing changesets (limited by revs) from a local
164 '''Push outgoing changesets (limited by revs) from a local
155 repository to remote. Return an integer:
165 repository to remote. Return an integer:
156 - None means nothing to push
166 - None means nothing to push
157 - 0 means HTTP error
167 - 0 means HTTP error
158 - 1 means we pushed and remote head count is unchanged *or*
168 - 1 means we pushed and remote head count is unchanged *or*
159 we have outgoing changesets but refused to push
169 we have outgoing changesets but refused to push
160 - other values as described by addchangegroup()
170 - other values as described by addchangegroup()
161 '''
171 '''
162 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
172 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
163 if pushop.remote.local():
173 if pushop.remote.local():
164 missing = (set(pushop.repo.requirements)
174 missing = (set(pushop.repo.requirements)
165 - pushop.remote.local().supported)
175 - pushop.remote.local().supported)
166 if missing:
176 if missing:
167 msg = _("required features are not"
177 msg = _("required features are not"
168 " supported in the destination:"
178 " supported in the destination:"
169 " %s") % (', '.join(sorted(missing)))
179 " %s") % (', '.join(sorted(missing)))
170 raise util.Abort(msg)
180 raise util.Abort(msg)
171
181
172 # there are two ways to push to remote repo:
182 # there are two ways to push to remote repo:
173 #
183 #
174 # addchangegroup assumes local user can lock remote
184 # addchangegroup assumes local user can lock remote
175 # repo (local filesystem, old ssh servers).
185 # repo (local filesystem, old ssh servers).
176 #
186 #
177 # unbundle assumes local user cannot lock remote repo (new ssh
187 # unbundle assumes local user cannot lock remote repo (new ssh
178 # servers, http servers).
188 # servers, http servers).
179
189
180 if not pushop.remote.canpush():
190 if not pushop.remote.canpush():
181 raise util.Abort(_("destination does not support push"))
191 raise util.Abort(_("destination does not support push"))
182 # get local lock as we might write phase data
192 # get local lock as we might write phase data
183 locallock = None
193 locallock = None
184 try:
194 try:
185 locallock = pushop.repo.lock()
195 locallock = pushop.repo.lock()
186 pushop.locallocked = True
196 pushop.locallocked = True
187 except IOError, err:
197 except IOError, err:
188 pushop.locallocked = False
198 pushop.locallocked = False
189 if err.errno != errno.EACCES:
199 if err.errno != errno.EACCES:
190 raise
200 raise
191 # source repo cannot be locked.
201 # source repo cannot be locked.
192 # We do not abort the push, but just disable the local phase
202 # We do not abort the push, but just disable the local phase
193 # synchronisation.
203 # synchronisation.
194 msg = 'cannot lock source repository: %s\n' % err
204 msg = 'cannot lock source repository: %s\n' % err
195 pushop.ui.debug(msg)
205 pushop.ui.debug(msg)
196 try:
206 try:
197 pushop.repo.checkpush(pushop)
207 pushop.repo.checkpush(pushop)
198 lock = None
208 lock = None
199 unbundle = pushop.remote.capable('unbundle')
209 unbundle = pushop.remote.capable('unbundle')
200 if not unbundle:
210 if not unbundle:
201 lock = pushop.remote.lock()
211 lock = pushop.remote.lock()
202 try:
212 try:
203 _pushdiscovery(pushop)
213 _pushdiscovery(pushop)
204 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
214 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
205 False)
215 False)
206 and pushop.remote.capable('bundle2-exp')):
216 and pushop.remote.capable('bundle2-exp')):
207 _pushbundle2(pushop)
217 _pushbundle2(pushop)
208 _pushchangeset(pushop)
218 _pushchangeset(pushop)
209 _pushsyncphase(pushop)
219 _pushsyncphase(pushop)
210 _pushobsolete(pushop)
220 _pushobsolete(pushop)
211 _pushbookmark(pushop)
221 _pushbookmark(pushop)
212 finally:
222 finally:
213 if lock is not None:
223 if lock is not None:
214 lock.release()
224 lock.release()
215 finally:
225 finally:
216 if locallock is not None:
226 if locallock is not None:
217 locallock.release()
227 locallock.release()
218
228
219 if pushop.bookmarks:
229 if pushop.bookmarks:
220 pushop.bkresult = bookmod.pushtoremote(repo.ui, repo, remote,
230 pushop.bkresult = bookmod.pushtoremote(repo.ui, repo, remote,
221 pushop.bookmarks)
231 pushop.bookmarks)
222
232
223 return pushop
233 return pushop
224
234
225 # list of steps to perform discovery before push
235 # list of steps to perform discovery before push
226 pushdiscoveryorder = []
236 pushdiscoveryorder = []
227
237
228 # Mapping between step name and function
238 # Mapping between step name and function
229 #
239 #
230 # This exists to help extensions wrap steps if necessary
240 # This exists to help extensions wrap steps if necessary
231 pushdiscoverymapping = {}
241 pushdiscoverymapping = {}
232
242
233 def pushdiscovery(stepname):
243 def pushdiscovery(stepname):
234 """decorator for function performing discovery before push
244 """decorator for function performing discovery before push
235
245
236 The function is added to the step -> function mapping and appended to the
246 The function is added to the step -> function mapping and appended to the
237 list of steps. Beware that decorated function will be added in order (this
247 list of steps. Beware that decorated function will be added in order (this
238 may matter).
248 may matter).
239
249
240 You can only use this decorator for a new step, if you want to wrap a step
250 You can only use this decorator for a new step, if you want to wrap a step
241 from an extension, change the pushdiscovery dictionary directly."""
251 from an extension, change the pushdiscovery dictionary directly."""
242 def dec(func):
252 def dec(func):
243 assert stepname not in pushdiscoverymapping
253 assert stepname not in pushdiscoverymapping
244 pushdiscoverymapping[stepname] = func
254 pushdiscoverymapping[stepname] = func
245 pushdiscoveryorder.append(stepname)
255 pushdiscoveryorder.append(stepname)
246 return func
256 return func
247 return dec
257 return dec
248
258
249 def _pushdiscovery(pushop):
259 def _pushdiscovery(pushop):
250 """Run all discovery steps"""
260 """Run all discovery steps"""
251 for stepname in pushdiscoveryorder:
261 for stepname in pushdiscoveryorder:
252 step = pushdiscoverymapping[stepname]
262 step = pushdiscoverymapping[stepname]
253 step(pushop)
263 step(pushop)
254
264
255 @pushdiscovery('changeset')
265 @pushdiscovery('changeset')
256 def _pushdiscoverychangeset(pushop):
266 def _pushdiscoverychangeset(pushop):
257 """discover the changeset that need to be pushed"""
267 """discover the changeset that need to be pushed"""
258 unfi = pushop.repo.unfiltered()
268 unfi = pushop.repo.unfiltered()
259 fci = discovery.findcommonincoming
269 fci = discovery.findcommonincoming
260 commoninc = fci(unfi, pushop.remote, force=pushop.force)
270 commoninc = fci(unfi, pushop.remote, force=pushop.force)
261 common, inc, remoteheads = commoninc
271 common, inc, remoteheads = commoninc
262 fco = discovery.findcommonoutgoing
272 fco = discovery.findcommonoutgoing
263 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
273 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
264 commoninc=commoninc, force=pushop.force)
274 commoninc=commoninc, force=pushop.force)
265 pushop.outgoing = outgoing
275 pushop.outgoing = outgoing
266 pushop.remoteheads = remoteheads
276 pushop.remoteheads = remoteheads
267 pushop.incoming = inc
277 pushop.incoming = inc
268
278
269 @pushdiscovery('phase')
279 @pushdiscovery('phase')
270 def _pushdiscoveryphase(pushop):
280 def _pushdiscoveryphase(pushop):
271 """discover the phase that needs to be pushed
281 """discover the phase that needs to be pushed
272
282
273 (computed for both success and failure case for changesets push)"""
283 (computed for both success and failure case for changesets push)"""
274 outgoing = pushop.outgoing
284 outgoing = pushop.outgoing
275 unfi = pushop.repo.unfiltered()
285 unfi = pushop.repo.unfiltered()
276 remotephases = pushop.remote.listkeys('phases')
286 remotephases = pushop.remote.listkeys('phases')
277 publishing = remotephases.get('publishing', False)
287 publishing = remotephases.get('publishing', False)
278 ana = phases.analyzeremotephases(pushop.repo,
288 ana = phases.analyzeremotephases(pushop.repo,
279 pushop.fallbackheads,
289 pushop.fallbackheads,
280 remotephases)
290 remotephases)
281 pheads, droots = ana
291 pheads, droots = ana
282 extracond = ''
292 extracond = ''
283 if not publishing:
293 if not publishing:
284 extracond = ' and public()'
294 extracond = ' and public()'
285 revset = 'heads((%%ln::%%ln) %s)' % extracond
295 revset = 'heads((%%ln::%%ln) %s)' % extracond
286 # Get the list of all revs draft on remote by public here.
296 # Get the list of all revs draft on remote by public here.
287 # XXX Beware that revset break if droots is not strictly
297 # XXX Beware that revset break if droots is not strictly
288 # XXX root we may want to ensure it is but it is costly
298 # XXX root we may want to ensure it is but it is costly
289 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
299 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
290 if not outgoing.missing:
300 if not outgoing.missing:
291 future = fallback
301 future = fallback
292 else:
302 else:
293 # adds changeset we are going to push as draft
303 # adds changeset we are going to push as draft
294 #
304 #
295 # should not be necessary for pushblishing server, but because of an
305 # should not be necessary for pushblishing server, but because of an
296 # issue fixed in xxxxx we have to do it anyway.
306 # issue fixed in xxxxx we have to do it anyway.
297 fdroots = list(unfi.set('roots(%ln + %ln::)',
307 fdroots = list(unfi.set('roots(%ln + %ln::)',
298 outgoing.missing, droots))
308 outgoing.missing, droots))
299 fdroots = [f.node() for f in fdroots]
309 fdroots = [f.node() for f in fdroots]
300 future = list(unfi.set(revset, fdroots, pushop.futureheads))
310 future = list(unfi.set(revset, fdroots, pushop.futureheads))
301 pushop.outdatedphases = future
311 pushop.outdatedphases = future
302 pushop.fallbackoutdatedphases = fallback
312 pushop.fallbackoutdatedphases = fallback
303
313
304 @pushdiscovery('obsmarker')
314 @pushdiscovery('obsmarker')
305 def _pushdiscoveryobsmarkers(pushop):
315 def _pushdiscoveryobsmarkers(pushop):
306 if (obsolete._enabled
316 if (obsolete._enabled
307 and pushop.repo.obsstore
317 and pushop.repo.obsstore
308 and 'obsolete' in pushop.remote.listkeys('namespaces')):
318 and 'obsolete' in pushop.remote.listkeys('namespaces')):
309 repo = pushop.repo
319 repo = pushop.repo
310 # very naive computation, that can be quite expensive on big repo.
320 # very naive computation, that can be quite expensive on big repo.
311 # However: evolution is currently slow on them anyway.
321 # However: evolution is currently slow on them anyway.
312 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
322 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
313 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
323 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
314
324
315 @pushdiscovery('bookmarks')
325 @pushdiscovery('bookmarks')
316 def _pushdiscoverybookmarks(pushop):
326 def _pushdiscoverybookmarks(pushop):
317 ui = pushop.ui
327 ui = pushop.ui
318 repo = pushop.repo.unfiltered()
328 repo = pushop.repo.unfiltered()
319 remote = pushop.remote
329 remote = pushop.remote
320 ui.debug("checking for updated bookmarks\n")
330 ui.debug("checking for updated bookmarks\n")
321 ancestors = ()
331 ancestors = ()
322 if pushop.revs:
332 if pushop.revs:
323 revnums = map(repo.changelog.rev, pushop.revs)
333 revnums = map(repo.changelog.rev, pushop.revs)
324 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
334 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
325 remotebookmark = remote.listkeys('bookmarks')
335 remotebookmark = remote.listkeys('bookmarks')
326
336
327 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
337 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
328 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
338 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
329 for b, scid, dcid in advsrc:
339 for b, scid, dcid in advsrc:
330 if not ancestors or repo[scid].rev() in ancestors:
340 if not ancestors or repo[scid].rev() in ancestors:
331 pushop.outbookmarks.append((b, dcid, scid))
341 pushop.outbookmarks.append((b, dcid, scid))
332
342
333 def _pushcheckoutgoing(pushop):
343 def _pushcheckoutgoing(pushop):
334 outgoing = pushop.outgoing
344 outgoing = pushop.outgoing
335 unfi = pushop.repo.unfiltered()
345 unfi = pushop.repo.unfiltered()
336 if not outgoing.missing:
346 if not outgoing.missing:
337 # nothing to push
347 # nothing to push
338 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
348 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
339 return False
349 return False
340 # something to push
350 # something to push
341 if not pushop.force:
351 if not pushop.force:
342 # if repo.obsstore == False --> no obsolete
352 # if repo.obsstore == False --> no obsolete
343 # then, save the iteration
353 # then, save the iteration
344 if unfi.obsstore:
354 if unfi.obsstore:
345 # this message are here for 80 char limit reason
355 # this message are here for 80 char limit reason
346 mso = _("push includes obsolete changeset: %s!")
356 mso = _("push includes obsolete changeset: %s!")
347 mst = {"unstable": _("push includes unstable changeset: %s!"),
357 mst = {"unstable": _("push includes unstable changeset: %s!"),
348 "bumped": _("push includes bumped changeset: %s!"),
358 "bumped": _("push includes bumped changeset: %s!"),
349 "divergent": _("push includes divergent changeset: %s!")}
359 "divergent": _("push includes divergent changeset: %s!")}
350 # If we are to push if there is at least one
360 # If we are to push if there is at least one
351 # obsolete or unstable changeset in missing, at
361 # obsolete or unstable changeset in missing, at
352 # least one of the missinghead will be obsolete or
362 # least one of the missinghead will be obsolete or
353 # unstable. So checking heads only is ok
363 # unstable. So checking heads only is ok
354 for node in outgoing.missingheads:
364 for node in outgoing.missingheads:
355 ctx = unfi[node]
365 ctx = unfi[node]
356 if ctx.obsolete():
366 if ctx.obsolete():
357 raise util.Abort(mso % ctx)
367 raise util.Abort(mso % ctx)
358 elif ctx.troubled():
368 elif ctx.troubled():
359 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
369 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
360 newbm = pushop.ui.configlist('bookmarks', 'pushing')
370 newbm = pushop.ui.configlist('bookmarks', 'pushing')
361 discovery.checkheads(unfi, pushop.remote, outgoing,
371 discovery.checkheads(unfi, pushop.remote, outgoing,
362 pushop.remoteheads,
372 pushop.remoteheads,
363 pushop.newbranch,
373 pushop.newbranch,
364 bool(pushop.incoming),
374 bool(pushop.incoming),
365 newbm)
375 newbm)
366 return True
376 return True
367
377
368 # List of names of steps to perform for an outgoing bundle2, order matters.
378 # List of names of steps to perform for an outgoing bundle2, order matters.
369 b2partsgenorder = []
379 b2partsgenorder = []
370
380
371 # Mapping between step name and function
381 # Mapping between step name and function
372 #
382 #
373 # This exists to help extensions wrap steps if necessary
383 # This exists to help extensions wrap steps if necessary
374 b2partsgenmapping = {}
384 b2partsgenmapping = {}
375
385
376 def b2partsgenerator(stepname):
386 def b2partsgenerator(stepname):
377 """decorator for function generating bundle2 part
387 """decorator for function generating bundle2 part
378
388
379 The function is added to the step -> function mapping and appended to the
389 The function is added to the step -> function mapping and appended to the
380 list of steps. Beware that decorated functions will be added in order
390 list of steps. Beware that decorated functions will be added in order
381 (this may matter).
391 (this may matter).
382
392
383 You can only use this decorator for new steps, if you want to wrap a step
393 You can only use this decorator for new steps, if you want to wrap a step
384 from an extension, attack the b2partsgenmapping dictionary directly."""
394 from an extension, attack the b2partsgenmapping dictionary directly."""
385 def dec(func):
395 def dec(func):
386 assert stepname not in b2partsgenmapping
396 assert stepname not in b2partsgenmapping
387 b2partsgenmapping[stepname] = func
397 b2partsgenmapping[stepname] = func
388 b2partsgenorder.append(stepname)
398 b2partsgenorder.append(stepname)
389 return func
399 return func
390 return dec
400 return dec
391
401
392 @b2partsgenerator('changeset')
402 @b2partsgenerator('changeset')
393 def _pushb2ctx(pushop, bundler):
403 def _pushb2ctx(pushop, bundler):
394 """handle changegroup push through bundle2
404 """handle changegroup push through bundle2
395
405
396 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
406 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
397 """
407 """
398 if 'changesets' in pushop.stepsdone:
408 if 'changesets' in pushop.stepsdone:
399 return
409 return
400 pushop.stepsdone.add('changesets')
410 pushop.stepsdone.add('changesets')
401 # Send known heads to the server for race detection.
411 # Send known heads to the server for race detection.
402 if not _pushcheckoutgoing(pushop):
412 if not _pushcheckoutgoing(pushop):
403 return
413 return
404 pushop.repo.prepushoutgoinghooks(pushop.repo,
414 pushop.repo.prepushoutgoinghooks(pushop.repo,
405 pushop.remote,
415 pushop.remote,
406 pushop.outgoing)
416 pushop.outgoing)
407 if not pushop.force:
417 if not pushop.force:
408 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
418 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
409 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
419 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
410 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
420 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
411 def handlereply(op):
421 def handlereply(op):
412 """extract addchangroup returns from server reply"""
422 """extract addchangroup returns from server reply"""
413 cgreplies = op.records.getreplies(cgpart.id)
423 cgreplies = op.records.getreplies(cgpart.id)
414 assert len(cgreplies['changegroup']) == 1
424 assert len(cgreplies['changegroup']) == 1
415 pushop.cgresult = cgreplies['changegroup'][0]['return']
425 pushop.cgresult = cgreplies['changegroup'][0]['return']
416 return handlereply
426 return handlereply
417
427
418 @b2partsgenerator('phase')
428 @b2partsgenerator('phase')
419 def _pushb2phases(pushop, bundler):
429 def _pushb2phases(pushop, bundler):
420 """handle phase push through bundle2"""
430 """handle phase push through bundle2"""
421 if 'phases' in pushop.stepsdone:
431 if 'phases' in pushop.stepsdone:
422 return
432 return
423 b2caps = bundle2.bundle2caps(pushop.remote)
433 b2caps = bundle2.bundle2caps(pushop.remote)
424 if not 'b2x:pushkey' in b2caps:
434 if not 'b2x:pushkey' in b2caps:
425 return
435 return
426 pushop.stepsdone.add('phases')
436 pushop.stepsdone.add('phases')
427 part2node = []
437 part2node = []
428 enc = pushkey.encode
438 enc = pushkey.encode
429 for newremotehead in pushop.outdatedphases:
439 for newremotehead in pushop.outdatedphases:
430 part = bundler.newpart('b2x:pushkey')
440 part = bundler.newpart('b2x:pushkey')
431 part.addparam('namespace', enc('phases'))
441 part.addparam('namespace', enc('phases'))
432 part.addparam('key', enc(newremotehead.hex()))
442 part.addparam('key', enc(newremotehead.hex()))
433 part.addparam('old', enc(str(phases.draft)))
443 part.addparam('old', enc(str(phases.draft)))
434 part.addparam('new', enc(str(phases.public)))
444 part.addparam('new', enc(str(phases.public)))
435 part2node.append((part.id, newremotehead))
445 part2node.append((part.id, newremotehead))
436 def handlereply(op):
446 def handlereply(op):
437 for partid, node in part2node:
447 for partid, node in part2node:
438 partrep = op.records.getreplies(partid)
448 partrep = op.records.getreplies(partid)
439 results = partrep['pushkey']
449 results = partrep['pushkey']
440 assert len(results) <= 1
450 assert len(results) <= 1
441 msg = None
451 msg = None
442 if not results:
452 if not results:
443 msg = _('server ignored update of %s to public!\n') % node
453 msg = _('server ignored update of %s to public!\n') % node
444 elif not int(results[0]['return']):
454 elif not int(results[0]['return']):
445 msg = _('updating %s to public failed!\n') % node
455 msg = _('updating %s to public failed!\n') % node
446 if msg is not None:
456 if msg is not None:
447 pushop.ui.warn(msg)
457 pushop.ui.warn(msg)
448 return handlereply
458 return handlereply
449
459
450 @b2partsgenerator('obsmarkers')
460 @b2partsgenerator('obsmarkers')
451 def _pushb2obsmarkers(pushop, bundler):
461 def _pushb2obsmarkers(pushop, bundler):
452 if 'obsmarkers' in pushop.stepsdone:
462 if 'obsmarkers' in pushop.stepsdone:
453 return
463 return
454 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
464 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
455 if obsolete.commonversion(remoteversions) is None:
465 if obsolete.commonversion(remoteversions) is None:
456 return
466 return
457 pushop.stepsdone.add('obsmarkers')
467 pushop.stepsdone.add('obsmarkers')
458 if pushop.outobsmarkers:
468 if pushop.outobsmarkers:
459 buildobsmarkerspart(bundler, pushop.outobsmarkers)
469 buildobsmarkerspart(bundler, pushop.outobsmarkers)
460
470
461 @b2partsgenerator('bookmarks')
471 @b2partsgenerator('bookmarks')
462 def _pushb2bookmarks(pushop, bundler):
472 def _pushb2bookmarks(pushop, bundler):
463 """handle phase push through bundle2"""
473 """handle phase push through bundle2"""
464 if 'bookmarks' in pushop.stepsdone:
474 if 'bookmarks' in pushop.stepsdone:
465 return
475 return
466 b2caps = bundle2.bundle2caps(pushop.remote)
476 b2caps = bundle2.bundle2caps(pushop.remote)
467 if 'b2x:pushkey' not in b2caps:
477 if 'b2x:pushkey' not in b2caps:
468 return
478 return
469 pushop.stepsdone.add('bookmarks')
479 pushop.stepsdone.add('bookmarks')
470 part2book = []
480 part2book = []
471 enc = pushkey.encode
481 enc = pushkey.encode
472 for book, old, new in pushop.outbookmarks:
482 for book, old, new in pushop.outbookmarks:
473 part = bundler.newpart('b2x:pushkey')
483 part = bundler.newpart('b2x:pushkey')
474 part.addparam('namespace', enc('bookmarks'))
484 part.addparam('namespace', enc('bookmarks'))
475 part.addparam('key', enc(book))
485 part.addparam('key', enc(book))
476 part.addparam('old', enc(old))
486 part.addparam('old', enc(old))
477 part.addparam('new', enc(new))
487 part.addparam('new', enc(new))
478 part2book.append((part.id, book))
488 action = 'update'
489 if not old:
490 action = 'export'
491 elif not new:
492 action = 'delete'
493 part2book.append((part.id, book, action))
494
495
479 def handlereply(op):
496 def handlereply(op):
480 for partid, book in part2book:
497 ui = pushop.ui
498 for partid, book, action in part2book:
481 partrep = op.records.getreplies(partid)
499 partrep = op.records.getreplies(partid)
482 results = partrep['pushkey']
500 results = partrep['pushkey']
483 assert len(results) <= 1
501 assert len(results) <= 1
484 if not results:
502 if not results:
485 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
503 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
486 else:
504 else:
487 ret = int(results[0]['return'])
505 ret = int(results[0]['return'])
488 if ret:
506 if ret:
489 pushop.ui.status(_("updating bookmark %s\n") % book)
507 ui.status(bookmsgmap[action][0] % book)
490 else:
508 else:
491 pushop.ui.warn(_('updating bookmark %s failed!\n') % book)
509 ui.warn(bookmsgmap[action][1] % book)
492 if pushop.bkresult is not None:
510 if pushop.bkresult is not None:
493 pushop.bkresult = 1
511 pushop.bkresult = 1
494 return handlereply
512 return handlereply
495
513
496
514
497 def _pushbundle2(pushop):
515 def _pushbundle2(pushop):
498 """push data to the remote using bundle2
516 """push data to the remote using bundle2
499
517
500 The only currently supported type of data is changegroup but this will
518 The only currently supported type of data is changegroup but this will
501 evolve in the future."""
519 evolve in the future."""
502 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
520 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
503 # create reply capability
521 # create reply capability
504 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
522 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
505 bundler.newpart('b2x:replycaps', data=capsblob)
523 bundler.newpart('b2x:replycaps', data=capsblob)
506 replyhandlers = []
524 replyhandlers = []
507 for partgenname in b2partsgenorder:
525 for partgenname in b2partsgenorder:
508 partgen = b2partsgenmapping[partgenname]
526 partgen = b2partsgenmapping[partgenname]
509 ret = partgen(pushop, bundler)
527 ret = partgen(pushop, bundler)
510 if callable(ret):
528 if callable(ret):
511 replyhandlers.append(ret)
529 replyhandlers.append(ret)
512 # do not push if nothing to push
530 # do not push if nothing to push
513 if bundler.nbparts <= 1:
531 if bundler.nbparts <= 1:
514 return
532 return
515 stream = util.chunkbuffer(bundler.getchunks())
533 stream = util.chunkbuffer(bundler.getchunks())
516 try:
534 try:
517 reply = pushop.remote.unbundle(stream, ['force'], 'push')
535 reply = pushop.remote.unbundle(stream, ['force'], 'push')
518 except error.BundleValueError, exc:
536 except error.BundleValueError, exc:
519 raise util.Abort('missing support for %s' % exc)
537 raise util.Abort('missing support for %s' % exc)
520 try:
538 try:
521 op = bundle2.processbundle(pushop.repo, reply)
539 op = bundle2.processbundle(pushop.repo, reply)
522 except error.BundleValueError, exc:
540 except error.BundleValueError, exc:
523 raise util.Abort('missing support for %s' % exc)
541 raise util.Abort('missing support for %s' % exc)
524 for rephand in replyhandlers:
542 for rephand in replyhandlers:
525 rephand(op)
543 rephand(op)
526
544
527 def _pushchangeset(pushop):
545 def _pushchangeset(pushop):
528 """Make the actual push of changeset bundle to remote repo"""
546 """Make the actual push of changeset bundle to remote repo"""
529 if 'changesets' in pushop.stepsdone:
547 if 'changesets' in pushop.stepsdone:
530 return
548 return
531 pushop.stepsdone.add('changesets')
549 pushop.stepsdone.add('changesets')
532 if not _pushcheckoutgoing(pushop):
550 if not _pushcheckoutgoing(pushop):
533 return
551 return
534 pushop.repo.prepushoutgoinghooks(pushop.repo,
552 pushop.repo.prepushoutgoinghooks(pushop.repo,
535 pushop.remote,
553 pushop.remote,
536 pushop.outgoing)
554 pushop.outgoing)
537 outgoing = pushop.outgoing
555 outgoing = pushop.outgoing
538 unbundle = pushop.remote.capable('unbundle')
556 unbundle = pushop.remote.capable('unbundle')
539 # TODO: get bundlecaps from remote
557 # TODO: get bundlecaps from remote
540 bundlecaps = None
558 bundlecaps = None
541 # create a changegroup from local
559 # create a changegroup from local
542 if pushop.revs is None and not (outgoing.excluded
560 if pushop.revs is None and not (outgoing.excluded
543 or pushop.repo.changelog.filteredrevs):
561 or pushop.repo.changelog.filteredrevs):
544 # push everything,
562 # push everything,
545 # use the fast path, no race possible on push
563 # use the fast path, no race possible on push
546 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
564 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
547 cg = changegroup.getsubset(pushop.repo,
565 cg = changegroup.getsubset(pushop.repo,
548 outgoing,
566 outgoing,
549 bundler,
567 bundler,
550 'push',
568 'push',
551 fastpath=True)
569 fastpath=True)
552 else:
570 else:
553 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
571 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
554 bundlecaps)
572 bundlecaps)
555
573
556 # apply changegroup to remote
574 # apply changegroup to remote
557 if unbundle:
575 if unbundle:
558 # local repo finds heads on server, finds out what
576 # local repo finds heads on server, finds out what
559 # revs it must push. once revs transferred, if server
577 # revs it must push. once revs transferred, if server
560 # finds it has different heads (someone else won
578 # finds it has different heads (someone else won
561 # commit/push race), server aborts.
579 # commit/push race), server aborts.
562 if pushop.force:
580 if pushop.force:
563 remoteheads = ['force']
581 remoteheads = ['force']
564 else:
582 else:
565 remoteheads = pushop.remoteheads
583 remoteheads = pushop.remoteheads
566 # ssh: return remote's addchangegroup()
584 # ssh: return remote's addchangegroup()
567 # http: return remote's addchangegroup() or 0 for error
585 # http: return remote's addchangegroup() or 0 for error
568 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
586 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
569 pushop.repo.url())
587 pushop.repo.url())
570 else:
588 else:
571 # we return an integer indicating remote head count
589 # we return an integer indicating remote head count
572 # change
590 # change
573 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
591 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
574 pushop.repo.url())
592 pushop.repo.url())
575
593
576 def _pushsyncphase(pushop):
594 def _pushsyncphase(pushop):
577 """synchronise phase information locally and remotely"""
595 """synchronise phase information locally and remotely"""
578 cheads = pushop.commonheads
596 cheads = pushop.commonheads
579 # even when we don't push, exchanging phase data is useful
597 # even when we don't push, exchanging phase data is useful
580 remotephases = pushop.remote.listkeys('phases')
598 remotephases = pushop.remote.listkeys('phases')
581 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
599 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
582 and remotephases # server supports phases
600 and remotephases # server supports phases
583 and pushop.cgresult is None # nothing was pushed
601 and pushop.cgresult is None # nothing was pushed
584 and remotephases.get('publishing', False)):
602 and remotephases.get('publishing', False)):
585 # When:
603 # When:
586 # - this is a subrepo push
604 # - this is a subrepo push
587 # - and remote support phase
605 # - and remote support phase
588 # - and no changeset was pushed
606 # - and no changeset was pushed
589 # - and remote is publishing
607 # - and remote is publishing
590 # We may be in issue 3871 case!
608 # We may be in issue 3871 case!
591 # We drop the possible phase synchronisation done by
609 # We drop the possible phase synchronisation done by
592 # courtesy to publish changesets possibly locally draft
610 # courtesy to publish changesets possibly locally draft
593 # on the remote.
611 # on the remote.
594 remotephases = {'publishing': 'True'}
612 remotephases = {'publishing': 'True'}
595 if not remotephases: # old server or public only reply from non-publishing
613 if not remotephases: # old server or public only reply from non-publishing
596 _localphasemove(pushop, cheads)
614 _localphasemove(pushop, cheads)
597 # don't push any phase data as there is nothing to push
615 # don't push any phase data as there is nothing to push
598 else:
616 else:
599 ana = phases.analyzeremotephases(pushop.repo, cheads,
617 ana = phases.analyzeremotephases(pushop.repo, cheads,
600 remotephases)
618 remotephases)
601 pheads, droots = ana
619 pheads, droots = ana
602 ### Apply remote phase on local
620 ### Apply remote phase on local
603 if remotephases.get('publishing', False):
621 if remotephases.get('publishing', False):
604 _localphasemove(pushop, cheads)
622 _localphasemove(pushop, cheads)
605 else: # publish = False
623 else: # publish = False
606 _localphasemove(pushop, pheads)
624 _localphasemove(pushop, pheads)
607 _localphasemove(pushop, cheads, phases.draft)
625 _localphasemove(pushop, cheads, phases.draft)
608 ### Apply local phase on remote
626 ### Apply local phase on remote
609
627
610 if pushop.cgresult:
628 if pushop.cgresult:
611 if 'phases' in pushop.stepsdone:
629 if 'phases' in pushop.stepsdone:
612 # phases already pushed though bundle2
630 # phases already pushed though bundle2
613 return
631 return
614 outdated = pushop.outdatedphases
632 outdated = pushop.outdatedphases
615 else:
633 else:
616 outdated = pushop.fallbackoutdatedphases
634 outdated = pushop.fallbackoutdatedphases
617
635
618 pushop.stepsdone.add('phases')
636 pushop.stepsdone.add('phases')
619
637
620 # filter heads already turned public by the push
638 # filter heads already turned public by the push
621 outdated = [c for c in outdated if c.node() not in pheads]
639 outdated = [c for c in outdated if c.node() not in pheads]
622 b2caps = bundle2.bundle2caps(pushop.remote)
640 b2caps = bundle2.bundle2caps(pushop.remote)
623 if 'b2x:pushkey' in b2caps:
641 if 'b2x:pushkey' in b2caps:
624 # server supports bundle2, let's do a batched push through it
642 # server supports bundle2, let's do a batched push through it
625 #
643 #
626 # This will eventually be unified with the changesets bundle2 push
644 # This will eventually be unified with the changesets bundle2 push
627 bundler = bundle2.bundle20(pushop.ui, b2caps)
645 bundler = bundle2.bundle20(pushop.ui, b2caps)
628 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
646 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
629 bundler.newpart('b2x:replycaps', data=capsblob)
647 bundler.newpart('b2x:replycaps', data=capsblob)
630 part2node = []
648 part2node = []
631 enc = pushkey.encode
649 enc = pushkey.encode
632 for newremotehead in outdated:
650 for newremotehead in outdated:
633 part = bundler.newpart('b2x:pushkey')
651 part = bundler.newpart('b2x:pushkey')
634 part.addparam('namespace', enc('phases'))
652 part.addparam('namespace', enc('phases'))
635 part.addparam('key', enc(newremotehead.hex()))
653 part.addparam('key', enc(newremotehead.hex()))
636 part.addparam('old', enc(str(phases.draft)))
654 part.addparam('old', enc(str(phases.draft)))
637 part.addparam('new', enc(str(phases.public)))
655 part.addparam('new', enc(str(phases.public)))
638 part2node.append((part.id, newremotehead))
656 part2node.append((part.id, newremotehead))
639 stream = util.chunkbuffer(bundler.getchunks())
657 stream = util.chunkbuffer(bundler.getchunks())
640 try:
658 try:
641 reply = pushop.remote.unbundle(stream, ['force'], 'push')
659 reply = pushop.remote.unbundle(stream, ['force'], 'push')
642 op = bundle2.processbundle(pushop.repo, reply)
660 op = bundle2.processbundle(pushop.repo, reply)
643 except error.BundleValueError, exc:
661 except error.BundleValueError, exc:
644 raise util.Abort('missing support for %s' % exc)
662 raise util.Abort('missing support for %s' % exc)
645 for partid, node in part2node:
663 for partid, node in part2node:
646 partrep = op.records.getreplies(partid)
664 partrep = op.records.getreplies(partid)
647 results = partrep['pushkey']
665 results = partrep['pushkey']
648 assert len(results) <= 1
666 assert len(results) <= 1
649 msg = None
667 msg = None
650 if not results:
668 if not results:
651 msg = _('server ignored update of %s to public!\n') % node
669 msg = _('server ignored update of %s to public!\n') % node
652 elif not int(results[0]['return']):
670 elif not int(results[0]['return']):
653 msg = _('updating %s to public failed!\n') % node
671 msg = _('updating %s to public failed!\n') % node
654 if msg is not None:
672 if msg is not None:
655 pushop.ui.warn(msg)
673 pushop.ui.warn(msg)
656
674
657 else:
675 else:
658 # fallback to independant pushkey command
676 # fallback to independant pushkey command
659 for newremotehead in outdated:
677 for newremotehead in outdated:
660 r = pushop.remote.pushkey('phases',
678 r = pushop.remote.pushkey('phases',
661 newremotehead.hex(),
679 newremotehead.hex(),
662 str(phases.draft),
680 str(phases.draft),
663 str(phases.public))
681 str(phases.public))
664 if not r:
682 if not r:
665 pushop.ui.warn(_('updating %s to public failed!\n')
683 pushop.ui.warn(_('updating %s to public failed!\n')
666 % newremotehead)
684 % newremotehead)
667
685
668 def _localphasemove(pushop, nodes, phase=phases.public):
686 def _localphasemove(pushop, nodes, phase=phases.public):
669 """move <nodes> to <phase> in the local source repo"""
687 """move <nodes> to <phase> in the local source repo"""
670 if pushop.locallocked:
688 if pushop.locallocked:
671 tr = pushop.repo.transaction('push-phase-sync')
689 tr = pushop.repo.transaction('push-phase-sync')
672 try:
690 try:
673 phases.advanceboundary(pushop.repo, tr, phase, nodes)
691 phases.advanceboundary(pushop.repo, tr, phase, nodes)
674 tr.close()
692 tr.close()
675 finally:
693 finally:
676 tr.release()
694 tr.release()
677 else:
695 else:
678 # repo is not locked, do not change any phases!
696 # repo is not locked, do not change any phases!
679 # Informs the user that phases should have been moved when
697 # Informs the user that phases should have been moved when
680 # applicable.
698 # applicable.
681 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
699 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
682 phasestr = phases.phasenames[phase]
700 phasestr = phases.phasenames[phase]
683 if actualmoves:
701 if actualmoves:
684 pushop.ui.status(_('cannot lock source repo, skipping '
702 pushop.ui.status(_('cannot lock source repo, skipping '
685 'local %s phase update\n') % phasestr)
703 'local %s phase update\n') % phasestr)
686
704
687 def _pushobsolete(pushop):
705 def _pushobsolete(pushop):
688 """utility function to push obsolete markers to a remote"""
706 """utility function to push obsolete markers to a remote"""
689 if 'obsmarkers' in pushop.stepsdone:
707 if 'obsmarkers' in pushop.stepsdone:
690 return
708 return
691 pushop.ui.debug('try to push obsolete markers to remote\n')
709 pushop.ui.debug('try to push obsolete markers to remote\n')
692 repo = pushop.repo
710 repo = pushop.repo
693 remote = pushop.remote
711 remote = pushop.remote
694 pushop.stepsdone.add('obsmarkers')
712 pushop.stepsdone.add('obsmarkers')
695 if pushop.outobsmarkers:
713 if pushop.outobsmarkers:
696 rslts = []
714 rslts = []
697 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
715 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
698 for key in sorted(remotedata, reverse=True):
716 for key in sorted(remotedata, reverse=True):
699 # reverse sort to ensure we end with dump0
717 # reverse sort to ensure we end with dump0
700 data = remotedata[key]
718 data = remotedata[key]
701 rslts.append(remote.pushkey('obsolete', key, '', data))
719 rslts.append(remote.pushkey('obsolete', key, '', data))
702 if [r for r in rslts if not r]:
720 if [r for r in rslts if not r]:
703 msg = _('failed to push some obsolete markers!\n')
721 msg = _('failed to push some obsolete markers!\n')
704 repo.ui.warn(msg)
722 repo.ui.warn(msg)
705
723
706 def _pushbookmark(pushop):
724 def _pushbookmark(pushop):
707 """Update bookmark position on remote"""
725 """Update bookmark position on remote"""
708 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
726 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
709 return
727 return
710 pushop.stepsdone.add('bookmarks')
728 pushop.stepsdone.add('bookmarks')
711 ui = pushop.ui
729 ui = pushop.ui
712 remote = pushop.remote
730 remote = pushop.remote
731
713 for b, old, new in pushop.outbookmarks:
732 for b, old, new in pushop.outbookmarks:
733 action = 'update'
734 if not old:
735 action = 'export'
736 elif not new:
737 action = 'delete'
714 if remote.pushkey('bookmarks', b, old, new):
738 if remote.pushkey('bookmarks', b, old, new):
715 ui.status(_("updating bookmark %s\n") % b)
739 ui.status(bookmsgmap[action][0] % b)
716 else:
740 else:
717 ui.warn(_('updating bookmark %s failed!\n') % b)
741 ui.warn(bookmsgmap[action][1] % b)
742 # discovery can have set the value form invalid entry
743 if pushop.bkresult is not None:
744 pushop.bkresult = 1
718
745
719 class pulloperation(object):
746 class pulloperation(object):
720 """A object that represent a single pull operation
747 """A object that represent a single pull operation
721
748
722 It purpose is to carry push related state and very common operation.
749 It purpose is to carry push related state and very common operation.
723
750
724 A new should be created at the beginning of each pull and discarded
751 A new should be created at the beginning of each pull and discarded
725 afterward.
752 afterward.
726 """
753 """
727
754
728 def __init__(self, repo, remote, heads=None, force=False):
755 def __init__(self, repo, remote, heads=None, force=False):
729 # repo we pull into
756 # repo we pull into
730 self.repo = repo
757 self.repo = repo
731 # repo we pull from
758 # repo we pull from
732 self.remote = remote
759 self.remote = remote
733 # revision we try to pull (None is "all")
760 # revision we try to pull (None is "all")
734 self.heads = heads
761 self.heads = heads
735 # do we force pull?
762 # do we force pull?
736 self.force = force
763 self.force = force
737 # the name the pull transaction
764 # the name the pull transaction
738 self._trname = 'pull\n' + util.hidepassword(remote.url())
765 self._trname = 'pull\n' + util.hidepassword(remote.url())
739 # hold the transaction once created
766 # hold the transaction once created
740 self._tr = None
767 self._tr = None
741 # set of common changeset between local and remote before pull
768 # set of common changeset between local and remote before pull
742 self.common = None
769 self.common = None
743 # set of pulled head
770 # set of pulled head
744 self.rheads = None
771 self.rheads = None
745 # list of missing changeset to fetch remotely
772 # list of missing changeset to fetch remotely
746 self.fetch = None
773 self.fetch = None
747 # result of changegroup pulling (used as return code by pull)
774 # result of changegroup pulling (used as return code by pull)
748 self.cgresult = None
775 self.cgresult = None
749 # list of step remaining todo (related to future bundle2 usage)
776 # list of step remaining todo (related to future bundle2 usage)
750 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
777 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
751
778
752 @util.propertycache
779 @util.propertycache
753 def pulledsubset(self):
780 def pulledsubset(self):
754 """heads of the set of changeset target by the pull"""
781 """heads of the set of changeset target by the pull"""
755 # compute target subset
782 # compute target subset
756 if self.heads is None:
783 if self.heads is None:
757 # We pulled every thing possible
784 # We pulled every thing possible
758 # sync on everything common
785 # sync on everything common
759 c = set(self.common)
786 c = set(self.common)
760 ret = list(self.common)
787 ret = list(self.common)
761 for n in self.rheads:
788 for n in self.rheads:
762 if n not in c:
789 if n not in c:
763 ret.append(n)
790 ret.append(n)
764 return ret
791 return ret
765 else:
792 else:
766 # We pulled a specific subset
793 # We pulled a specific subset
767 # sync on this subset
794 # sync on this subset
768 return self.heads
795 return self.heads
769
796
770 def gettransaction(self):
797 def gettransaction(self):
771 """get appropriate pull transaction, creating it if needed"""
798 """get appropriate pull transaction, creating it if needed"""
772 if self._tr is None:
799 if self._tr is None:
773 self._tr = self.repo.transaction(self._trname)
800 self._tr = self.repo.transaction(self._trname)
774 return self._tr
801 return self._tr
775
802
776 def closetransaction(self):
803 def closetransaction(self):
777 """close transaction if created"""
804 """close transaction if created"""
778 if self._tr is not None:
805 if self._tr is not None:
779 self._tr.close()
806 self._tr.close()
780
807
781 def releasetransaction(self):
808 def releasetransaction(self):
782 """release transaction if created"""
809 """release transaction if created"""
783 if self._tr is not None:
810 if self._tr is not None:
784 self._tr.release()
811 self._tr.release()
785
812
786 def pull(repo, remote, heads=None, force=False, bookmarks=()):
813 def pull(repo, remote, heads=None, force=False, bookmarks=()):
787 pullop = pulloperation(repo, remote, heads, force)
814 pullop = pulloperation(repo, remote, heads, force)
788 if pullop.remote.local():
815 if pullop.remote.local():
789 missing = set(pullop.remote.requirements) - pullop.repo.supported
816 missing = set(pullop.remote.requirements) - pullop.repo.supported
790 if missing:
817 if missing:
791 msg = _("required features are not"
818 msg = _("required features are not"
792 " supported in the destination:"
819 " supported in the destination:"
793 " %s") % (', '.join(sorted(missing)))
820 " %s") % (', '.join(sorted(missing)))
794 raise util.Abort(msg)
821 raise util.Abort(msg)
795
822
796 remotebookmarks = remote.listkeys('bookmarks')
823 remotebookmarks = remote.listkeys('bookmarks')
797 lock = pullop.repo.lock()
824 lock = pullop.repo.lock()
798 try:
825 try:
799 _pulldiscovery(pullop)
826 _pulldiscovery(pullop)
800 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
827 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
801 and pullop.remote.capable('bundle2-exp')):
828 and pullop.remote.capable('bundle2-exp')):
802 _pullbundle2(pullop)
829 _pullbundle2(pullop)
803 if 'changegroup' in pullop.todosteps:
830 if 'changegroup' in pullop.todosteps:
804 _pullchangeset(pullop)
831 _pullchangeset(pullop)
805 if 'phases' in pullop.todosteps:
832 if 'phases' in pullop.todosteps:
806 _pullphase(pullop)
833 _pullphase(pullop)
807 if 'obsmarkers' in pullop.todosteps:
834 if 'obsmarkers' in pullop.todosteps:
808 _pullobsolete(pullop)
835 _pullobsolete(pullop)
809 pullop.closetransaction()
836 pullop.closetransaction()
810 finally:
837 finally:
811 pullop.releasetransaction()
838 pullop.releasetransaction()
812 lock.release()
839 lock.release()
813 bookmod.updatefromremote(repo.ui, repo, remotebookmarks, remote.url())
840 bookmod.updatefromremote(repo.ui, repo, remotebookmarks, remote.url())
814 # update specified bookmarks
841 # update specified bookmarks
815 if bookmarks:
842 if bookmarks:
816 marks = repo._bookmarks
843 marks = repo._bookmarks
817 writer = repo.ui.status
844 writer = repo.ui.status
818 if repo.ui.configbool('ui', 'quietbookmarkmove', False):
845 if repo.ui.configbool('ui', 'quietbookmarkmove', False):
819 writer = repo.ui.debug
846 writer = repo.ui.debug
820 for b in bookmarks:
847 for b in bookmarks:
821 # explicit pull overrides local bookmark if any
848 # explicit pull overrides local bookmark if any
822 writer(_("importing bookmark %s\n") % b)
849 writer(_("importing bookmark %s\n") % b)
823 marks[b] = repo[remotebookmarks[b]].node()
850 marks[b] = repo[remotebookmarks[b]].node()
824 marks.write()
851 marks.write()
825
852
826 return pullop.cgresult
853 return pullop.cgresult
827
854
828 def _pulldiscovery(pullop):
855 def _pulldiscovery(pullop):
829 """discovery phase for the pull
856 """discovery phase for the pull
830
857
831 Current handle changeset discovery only, will change handle all discovery
858 Current handle changeset discovery only, will change handle all discovery
832 at some point."""
859 at some point."""
833 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
860 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
834 pullop.remote,
861 pullop.remote,
835 heads=pullop.heads,
862 heads=pullop.heads,
836 force=pullop.force)
863 force=pullop.force)
837 pullop.common, pullop.fetch, pullop.rheads = tmp
864 pullop.common, pullop.fetch, pullop.rheads = tmp
838
865
839 def _pullbundle2(pullop):
866 def _pullbundle2(pullop):
840 """pull data using bundle2
867 """pull data using bundle2
841
868
842 For now, the only supported data are changegroup."""
869 For now, the only supported data are changegroup."""
843 remotecaps = bundle2.bundle2caps(pullop.remote)
870 remotecaps = bundle2.bundle2caps(pullop.remote)
844 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
871 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
845 # pulling changegroup
872 # pulling changegroup
846 pullop.todosteps.remove('changegroup')
873 pullop.todosteps.remove('changegroup')
847
874
848 kwargs['common'] = pullop.common
875 kwargs['common'] = pullop.common
849 kwargs['heads'] = pullop.heads or pullop.rheads
876 kwargs['heads'] = pullop.heads or pullop.rheads
850 kwargs['cg'] = pullop.fetch
877 kwargs['cg'] = pullop.fetch
851 if 'b2x:listkeys' in remotecaps:
878 if 'b2x:listkeys' in remotecaps:
852 kwargs['listkeys'] = ['phase']
879 kwargs['listkeys'] = ['phase']
853 if not pullop.fetch:
880 if not pullop.fetch:
854 pullop.repo.ui.status(_("no changes found\n"))
881 pullop.repo.ui.status(_("no changes found\n"))
855 pullop.cgresult = 0
882 pullop.cgresult = 0
856 else:
883 else:
857 if pullop.heads is None and list(pullop.common) == [nullid]:
884 if pullop.heads is None and list(pullop.common) == [nullid]:
858 pullop.repo.ui.status(_("requesting all changes\n"))
885 pullop.repo.ui.status(_("requesting all changes\n"))
859 if obsolete._enabled:
886 if obsolete._enabled:
860 remoteversions = bundle2.obsmarkersversion(remotecaps)
887 remoteversions = bundle2.obsmarkersversion(remotecaps)
861 if obsolete.commonversion(remoteversions) is not None:
888 if obsolete.commonversion(remoteversions) is not None:
862 kwargs['obsmarkers'] = True
889 kwargs['obsmarkers'] = True
863 pullop.todosteps.remove('obsmarkers')
890 pullop.todosteps.remove('obsmarkers')
864 _pullbundle2extraprepare(pullop, kwargs)
891 _pullbundle2extraprepare(pullop, kwargs)
865 if kwargs.keys() == ['format']:
892 if kwargs.keys() == ['format']:
866 return # nothing to pull
893 return # nothing to pull
867 bundle = pullop.remote.getbundle('pull', **kwargs)
894 bundle = pullop.remote.getbundle('pull', **kwargs)
868 try:
895 try:
869 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
896 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
870 except error.BundleValueError, exc:
897 except error.BundleValueError, exc:
871 raise util.Abort('missing support for %s' % exc)
898 raise util.Abort('missing support for %s' % exc)
872
899
873 if pullop.fetch:
900 if pullop.fetch:
874 assert len(op.records['changegroup']) == 1
901 assert len(op.records['changegroup']) == 1
875 pullop.cgresult = op.records['changegroup'][0]['return']
902 pullop.cgresult = op.records['changegroup'][0]['return']
876
903
877 # processing phases change
904 # processing phases change
878 for namespace, value in op.records['listkeys']:
905 for namespace, value in op.records['listkeys']:
879 if namespace == 'phases':
906 if namespace == 'phases':
880 _pullapplyphases(pullop, value)
907 _pullapplyphases(pullop, value)
881
908
882 def _pullbundle2extraprepare(pullop, kwargs):
909 def _pullbundle2extraprepare(pullop, kwargs):
883 """hook function so that extensions can extend the getbundle call"""
910 """hook function so that extensions can extend the getbundle call"""
884 pass
911 pass
885
912
886 def _pullchangeset(pullop):
913 def _pullchangeset(pullop):
887 """pull changeset from unbundle into the local repo"""
914 """pull changeset from unbundle into the local repo"""
888 # We delay the open of the transaction as late as possible so we
915 # We delay the open of the transaction as late as possible so we
889 # don't open transaction for nothing or you break future useful
916 # don't open transaction for nothing or you break future useful
890 # rollback call
917 # rollback call
891 pullop.todosteps.remove('changegroup')
918 pullop.todosteps.remove('changegroup')
892 if not pullop.fetch:
919 if not pullop.fetch:
893 pullop.repo.ui.status(_("no changes found\n"))
920 pullop.repo.ui.status(_("no changes found\n"))
894 pullop.cgresult = 0
921 pullop.cgresult = 0
895 return
922 return
896 pullop.gettransaction()
923 pullop.gettransaction()
897 if pullop.heads is None and list(pullop.common) == [nullid]:
924 if pullop.heads is None and list(pullop.common) == [nullid]:
898 pullop.repo.ui.status(_("requesting all changes\n"))
925 pullop.repo.ui.status(_("requesting all changes\n"))
899 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
926 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
900 # issue1320, avoid a race if remote changed after discovery
927 # issue1320, avoid a race if remote changed after discovery
901 pullop.heads = pullop.rheads
928 pullop.heads = pullop.rheads
902
929
903 if pullop.remote.capable('getbundle'):
930 if pullop.remote.capable('getbundle'):
904 # TODO: get bundlecaps from remote
931 # TODO: get bundlecaps from remote
905 cg = pullop.remote.getbundle('pull', common=pullop.common,
932 cg = pullop.remote.getbundle('pull', common=pullop.common,
906 heads=pullop.heads or pullop.rheads)
933 heads=pullop.heads or pullop.rheads)
907 elif pullop.heads is None:
934 elif pullop.heads is None:
908 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
935 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
909 elif not pullop.remote.capable('changegroupsubset'):
936 elif not pullop.remote.capable('changegroupsubset'):
910 raise util.Abort(_("partial pull cannot be done because "
937 raise util.Abort(_("partial pull cannot be done because "
911 "other repository doesn't support "
938 "other repository doesn't support "
912 "changegroupsubset."))
939 "changegroupsubset."))
913 else:
940 else:
914 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
941 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
915 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
942 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
916 pullop.remote.url())
943 pullop.remote.url())
917
944
918 def _pullphase(pullop):
945 def _pullphase(pullop):
919 # Get remote phases data from remote
946 # Get remote phases data from remote
920 remotephases = pullop.remote.listkeys('phases')
947 remotephases = pullop.remote.listkeys('phases')
921 _pullapplyphases(pullop, remotephases)
948 _pullapplyphases(pullop, remotephases)
922
949
923 def _pullapplyphases(pullop, remotephases):
950 def _pullapplyphases(pullop, remotephases):
924 """apply phase movement from observed remote state"""
951 """apply phase movement from observed remote state"""
925 pullop.todosteps.remove('phases')
952 pullop.todosteps.remove('phases')
926 publishing = bool(remotephases.get('publishing', False))
953 publishing = bool(remotephases.get('publishing', False))
927 if remotephases and not publishing:
954 if remotephases and not publishing:
928 # remote is new and unpublishing
955 # remote is new and unpublishing
929 pheads, _dr = phases.analyzeremotephases(pullop.repo,
956 pheads, _dr = phases.analyzeremotephases(pullop.repo,
930 pullop.pulledsubset,
957 pullop.pulledsubset,
931 remotephases)
958 remotephases)
932 dheads = pullop.pulledsubset
959 dheads = pullop.pulledsubset
933 else:
960 else:
934 # Remote is old or publishing all common changesets
961 # Remote is old or publishing all common changesets
935 # should be seen as public
962 # should be seen as public
936 pheads = pullop.pulledsubset
963 pheads = pullop.pulledsubset
937 dheads = []
964 dheads = []
938 unfi = pullop.repo.unfiltered()
965 unfi = pullop.repo.unfiltered()
939 phase = unfi._phasecache.phase
966 phase = unfi._phasecache.phase
940 rev = unfi.changelog.nodemap.get
967 rev = unfi.changelog.nodemap.get
941 public = phases.public
968 public = phases.public
942 draft = phases.draft
969 draft = phases.draft
943
970
944 # exclude changesets already public locally and update the others
971 # exclude changesets already public locally and update the others
945 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
972 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
946 if pheads:
973 if pheads:
947 tr = pullop.gettransaction()
974 tr = pullop.gettransaction()
948 phases.advanceboundary(pullop.repo, tr, public, pheads)
975 phases.advanceboundary(pullop.repo, tr, public, pheads)
949
976
950 # exclude changesets already draft locally and update the others
977 # exclude changesets already draft locally and update the others
951 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
978 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
952 if dheads:
979 if dheads:
953 tr = pullop.gettransaction()
980 tr = pullop.gettransaction()
954 phases.advanceboundary(pullop.repo, tr, draft, dheads)
981 phases.advanceboundary(pullop.repo, tr, draft, dheads)
955
982
956 def _pullobsolete(pullop):
983 def _pullobsolete(pullop):
957 """utility function to pull obsolete markers from a remote
984 """utility function to pull obsolete markers from a remote
958
985
959 The `gettransaction` is function that return the pull transaction, creating
986 The `gettransaction` is function that return the pull transaction, creating
960 one if necessary. We return the transaction to inform the calling code that
987 one if necessary. We return the transaction to inform the calling code that
961 a new transaction have been created (when applicable).
988 a new transaction have been created (when applicable).
962
989
963 Exists mostly to allow overriding for experimentation purpose"""
990 Exists mostly to allow overriding for experimentation purpose"""
964 pullop.todosteps.remove('obsmarkers')
991 pullop.todosteps.remove('obsmarkers')
965 tr = None
992 tr = None
966 if obsolete._enabled:
993 if obsolete._enabled:
967 pullop.repo.ui.debug('fetching remote obsolete markers\n')
994 pullop.repo.ui.debug('fetching remote obsolete markers\n')
968 remoteobs = pullop.remote.listkeys('obsolete')
995 remoteobs = pullop.remote.listkeys('obsolete')
969 if 'dump0' in remoteobs:
996 if 'dump0' in remoteobs:
970 tr = pullop.gettransaction()
997 tr = pullop.gettransaction()
971 for key in sorted(remoteobs, reverse=True):
998 for key in sorted(remoteobs, reverse=True):
972 if key.startswith('dump'):
999 if key.startswith('dump'):
973 data = base85.b85decode(remoteobs[key])
1000 data = base85.b85decode(remoteobs[key])
974 pullop.repo.obsstore.mergemarkers(tr, data)
1001 pullop.repo.obsstore.mergemarkers(tr, data)
975 pullop.repo.invalidatevolatilesets()
1002 pullop.repo.invalidatevolatilesets()
976 return tr
1003 return tr
977
1004
978 def caps20to10(repo):
1005 def caps20to10(repo):
979 """return a set with appropriate options to use bundle20 during getbundle"""
1006 """return a set with appropriate options to use bundle20 during getbundle"""
980 caps = set(['HG2X'])
1007 caps = set(['HG2X'])
981 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1008 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
982 caps.add('bundle2=' + urllib.quote(capsblob))
1009 caps.add('bundle2=' + urllib.quote(capsblob))
983 return caps
1010 return caps
984
1011
985 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1012 # List of names of steps to perform for a bundle2 for getbundle, order matters.
986 getbundle2partsorder = []
1013 getbundle2partsorder = []
987
1014
988 # Mapping between step name and function
1015 # Mapping between step name and function
989 #
1016 #
990 # This exists to help extensions wrap steps if necessary
1017 # This exists to help extensions wrap steps if necessary
991 getbundle2partsmapping = {}
1018 getbundle2partsmapping = {}
992
1019
993 def getbundle2partsgenerator(stepname):
1020 def getbundle2partsgenerator(stepname):
994 """decorator for function generating bundle2 part for getbundle
1021 """decorator for function generating bundle2 part for getbundle
995
1022
996 The function is added to the step -> function mapping and appended to the
1023 The function is added to the step -> function mapping and appended to the
997 list of steps. Beware that decorated functions will be added in order
1024 list of steps. Beware that decorated functions will be added in order
998 (this may matter).
1025 (this may matter).
999
1026
1000 You can only use this decorator for new steps, if you want to wrap a step
1027 You can only use this decorator for new steps, if you want to wrap a step
1001 from an extension, attack the getbundle2partsmapping dictionary directly."""
1028 from an extension, attack the getbundle2partsmapping dictionary directly."""
1002 def dec(func):
1029 def dec(func):
1003 assert stepname not in getbundle2partsmapping
1030 assert stepname not in getbundle2partsmapping
1004 getbundle2partsmapping[stepname] = func
1031 getbundle2partsmapping[stepname] = func
1005 getbundle2partsorder.append(stepname)
1032 getbundle2partsorder.append(stepname)
1006 return func
1033 return func
1007 return dec
1034 return dec
1008
1035
1009 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1036 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1010 **kwargs):
1037 **kwargs):
1011 """return a full bundle (with potentially multiple kind of parts)
1038 """return a full bundle (with potentially multiple kind of parts)
1012
1039
1013 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
1040 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
1014 passed. For now, the bundle can contain only changegroup, but this will
1041 passed. For now, the bundle can contain only changegroup, but this will
1015 changes when more part type will be available for bundle2.
1042 changes when more part type will be available for bundle2.
1016
1043
1017 This is different from changegroup.getchangegroup that only returns an HG10
1044 This is different from changegroup.getchangegroup that only returns an HG10
1018 changegroup bundle. They may eventually get reunited in the future when we
1045 changegroup bundle. They may eventually get reunited in the future when we
1019 have a clearer idea of the API we what to query different data.
1046 have a clearer idea of the API we what to query different data.
1020
1047
1021 The implementation is at a very early stage and will get massive rework
1048 The implementation is at a very early stage and will get massive rework
1022 when the API of bundle is refined.
1049 when the API of bundle is refined.
1023 """
1050 """
1024 # bundle10 case
1051 # bundle10 case
1025 if bundlecaps is None or 'HG2X' not in bundlecaps:
1052 if bundlecaps is None or 'HG2X' not in bundlecaps:
1026 if bundlecaps and not kwargs.get('cg', True):
1053 if bundlecaps and not kwargs.get('cg', True):
1027 raise ValueError(_('request for bundle10 must include changegroup'))
1054 raise ValueError(_('request for bundle10 must include changegroup'))
1028
1055
1029 if kwargs:
1056 if kwargs:
1030 raise ValueError(_('unsupported getbundle arguments: %s')
1057 raise ValueError(_('unsupported getbundle arguments: %s')
1031 % ', '.join(sorted(kwargs.keys())))
1058 % ', '.join(sorted(kwargs.keys())))
1032 return changegroup.getchangegroup(repo, source, heads=heads,
1059 return changegroup.getchangegroup(repo, source, heads=heads,
1033 common=common, bundlecaps=bundlecaps)
1060 common=common, bundlecaps=bundlecaps)
1034
1061
1035 # bundle20 case
1062 # bundle20 case
1036 b2caps = {}
1063 b2caps = {}
1037 for bcaps in bundlecaps:
1064 for bcaps in bundlecaps:
1038 if bcaps.startswith('bundle2='):
1065 if bcaps.startswith('bundle2='):
1039 blob = urllib.unquote(bcaps[len('bundle2='):])
1066 blob = urllib.unquote(bcaps[len('bundle2='):])
1040 b2caps.update(bundle2.decodecaps(blob))
1067 b2caps.update(bundle2.decodecaps(blob))
1041 bundler = bundle2.bundle20(repo.ui, b2caps)
1068 bundler = bundle2.bundle20(repo.ui, b2caps)
1042
1069
1043 for name in getbundle2partsorder:
1070 for name in getbundle2partsorder:
1044 func = getbundle2partsmapping[name]
1071 func = getbundle2partsmapping[name]
1045 kwargs['heads'] = heads
1072 kwargs['heads'] = heads
1046 kwargs['common'] = common
1073 kwargs['common'] = common
1047 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1074 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1048 **kwargs)
1075 **kwargs)
1049
1076
1050 return util.chunkbuffer(bundler.getchunks())
1077 return util.chunkbuffer(bundler.getchunks())
1051
1078
1052 @getbundle2partsgenerator('changegroup')
1079 @getbundle2partsgenerator('changegroup')
1053 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1080 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1054 b2caps=None, heads=None, common=None, **kwargs):
1081 b2caps=None, heads=None, common=None, **kwargs):
1055 """add a changegroup part to the requested bundle"""
1082 """add a changegroup part to the requested bundle"""
1056 cg = None
1083 cg = None
1057 if kwargs.get('cg', True):
1084 if kwargs.get('cg', True):
1058 # build changegroup bundle here.
1085 # build changegroup bundle here.
1059 cg = changegroup.getchangegroup(repo, source, heads=heads,
1086 cg = changegroup.getchangegroup(repo, source, heads=heads,
1060 common=common, bundlecaps=bundlecaps)
1087 common=common, bundlecaps=bundlecaps)
1061
1088
1062 if cg:
1089 if cg:
1063 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1090 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1064
1091
1065 @getbundle2partsgenerator('listkeys')
1092 @getbundle2partsgenerator('listkeys')
1066 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1093 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1067 b2caps=None, **kwargs):
1094 b2caps=None, **kwargs):
1068 """add parts containing listkeys namespaces to the requested bundle"""
1095 """add parts containing listkeys namespaces to the requested bundle"""
1069 listkeys = kwargs.get('listkeys', ())
1096 listkeys = kwargs.get('listkeys', ())
1070 for namespace in listkeys:
1097 for namespace in listkeys:
1071 part = bundler.newpart('b2x:listkeys')
1098 part = bundler.newpart('b2x:listkeys')
1072 part.addparam('namespace', namespace)
1099 part.addparam('namespace', namespace)
1073 keys = repo.listkeys(namespace).items()
1100 keys = repo.listkeys(namespace).items()
1074 part.data = pushkey.encodekeys(keys)
1101 part.data = pushkey.encodekeys(keys)
1075
1102
1076 @getbundle2partsgenerator('obsmarkers')
1103 @getbundle2partsgenerator('obsmarkers')
1077 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1104 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1078 b2caps=None, heads=None, **kwargs):
1105 b2caps=None, heads=None, **kwargs):
1079 """add an obsolescence markers part to the requested bundle"""
1106 """add an obsolescence markers part to the requested bundle"""
1080 if kwargs.get('obsmarkers', False):
1107 if kwargs.get('obsmarkers', False):
1081 if heads is None:
1108 if heads is None:
1082 heads = repo.heads()
1109 heads = repo.heads()
1083 subset = [c.node() for c in repo.set('::%ln', heads)]
1110 subset = [c.node() for c in repo.set('::%ln', heads)]
1084 markers = repo.obsstore.relevantmarkers(subset)
1111 markers = repo.obsstore.relevantmarkers(subset)
1085 buildobsmarkerspart(bundler, markers)
1112 buildobsmarkerspart(bundler, markers)
1086
1113
1087 @getbundle2partsgenerator('extra')
1114 @getbundle2partsgenerator('extra')
1088 def _getbundleextrapart(bundler, repo, source, bundlecaps=None,
1115 def _getbundleextrapart(bundler, repo, source, bundlecaps=None,
1089 b2caps=None, **kwargs):
1116 b2caps=None, **kwargs):
1090 """hook function to let extensions add parts to the requested bundle"""
1117 """hook function to let extensions add parts to the requested bundle"""
1091 pass
1118 pass
1092
1119
1093 def check_heads(repo, their_heads, context):
1120 def check_heads(repo, their_heads, context):
1094 """check if the heads of a repo have been modified
1121 """check if the heads of a repo have been modified
1095
1122
1096 Used by peer for unbundling.
1123 Used by peer for unbundling.
1097 """
1124 """
1098 heads = repo.heads()
1125 heads = repo.heads()
1099 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1126 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1100 if not (their_heads == ['force'] or their_heads == heads or
1127 if not (their_heads == ['force'] or their_heads == heads or
1101 their_heads == ['hashed', heads_hash]):
1128 their_heads == ['hashed', heads_hash]):
1102 # someone else committed/pushed/unbundled while we
1129 # someone else committed/pushed/unbundled while we
1103 # were transferring data
1130 # were transferring data
1104 raise error.PushRaced('repository changed while %s - '
1131 raise error.PushRaced('repository changed while %s - '
1105 'please try again' % context)
1132 'please try again' % context)
1106
1133
1107 def unbundle(repo, cg, heads, source, url):
1134 def unbundle(repo, cg, heads, source, url):
1108 """Apply a bundle to a repo.
1135 """Apply a bundle to a repo.
1109
1136
1110 this function makes sure the repo is locked during the application and have
1137 this function makes sure the repo is locked during the application and have
1111 mechanism to check that no push race occurred between the creation of the
1138 mechanism to check that no push race occurred between the creation of the
1112 bundle and its application.
1139 bundle and its application.
1113
1140
1114 If the push was raced as PushRaced exception is raised."""
1141 If the push was raced as PushRaced exception is raised."""
1115 r = 0
1142 r = 0
1116 # need a transaction when processing a bundle2 stream
1143 # need a transaction when processing a bundle2 stream
1117 tr = None
1144 tr = None
1118 lock = repo.lock()
1145 lock = repo.lock()
1119 try:
1146 try:
1120 check_heads(repo, heads, 'uploading changes')
1147 check_heads(repo, heads, 'uploading changes')
1121 # push can proceed
1148 # push can proceed
1122 if util.safehasattr(cg, 'params'):
1149 if util.safehasattr(cg, 'params'):
1123 try:
1150 try:
1124 tr = repo.transaction('unbundle')
1151 tr = repo.transaction('unbundle')
1125 tr.hookargs['bundle2-exp'] = '1'
1152 tr.hookargs['bundle2-exp'] = '1'
1126 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1153 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1127 cl = repo.unfiltered().changelog
1154 cl = repo.unfiltered().changelog
1128 p = cl.writepending() and repo.root or ""
1155 p = cl.writepending() and repo.root or ""
1129 repo.hook('b2x-pretransactionclose', throw=True, source=source,
1156 repo.hook('b2x-pretransactionclose', throw=True, source=source,
1130 url=url, pending=p, **tr.hookargs)
1157 url=url, pending=p, **tr.hookargs)
1131 tr.close()
1158 tr.close()
1132 repo.hook('b2x-transactionclose', source=source, url=url,
1159 repo.hook('b2x-transactionclose', source=source, url=url,
1133 **tr.hookargs)
1160 **tr.hookargs)
1134 except Exception, exc:
1161 except Exception, exc:
1135 exc.duringunbundle2 = True
1162 exc.duringunbundle2 = True
1136 raise
1163 raise
1137 else:
1164 else:
1138 r = changegroup.addchangegroup(repo, cg, source, url)
1165 r = changegroup.addchangegroup(repo, cg, source, url)
1139 finally:
1166 finally:
1140 if tr is not None:
1167 if tr is not None:
1141 tr.release()
1168 tr.release()
1142 lock.release()
1169 lock.release()
1143 return r
1170 return r
General Comments 0
You need to be logged in to leave comments. Login now