##// END OF EJS Templates
bundle2: add an 'idx' argument to the 'b2partsgenerator'...
Pierre-Yves David -
r24731:88a36ede default
parent child Browse files
Show More
@@ -1,1294 +1,1297 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.cg1unpacker(fh, alg)
34 return changegroup.cg1unpacker(fh, alg)
35 elif version.startswith('2'):
35 elif version.startswith('2'):
36 return bundle2.getunbundler(ui, fh, header=magic + version)
36 return bundle2.getunbundler(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40 def buildobsmarkerspart(bundler, markers):
40 def buildobsmarkerspart(bundler, markers):
41 """add an obsmarker part to the bundler with <markers>
41 """add an obsmarker part to the bundler with <markers>
42
42
43 No part is created if markers is empty.
43 No part is created if markers is empty.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 """
45 """
46 if markers:
46 if markers:
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 version = obsolete.commonversion(remoteversions)
48 version = obsolete.commonversion(remoteversions)
49 if version is None:
49 if version is None:
50 raise ValueError('bundler do not support common obsmarker format')
50 raise ValueError('bundler do not support common obsmarker format')
51 stream = obsolete.encodemarkers(markers, True, version=version)
51 stream = obsolete.encodemarkers(markers, True, version=version)
52 return bundler.newpart('obsmarkers', data=stream)
52 return bundler.newpart('obsmarkers', data=stream)
53 return None
53 return None
54
54
55 def _canusebundle2(op):
55 def _canusebundle2(op):
56 """return true if a pull/push can use bundle2
56 """return true if a pull/push can use bundle2
57
57
58 Feel free to nuke this function when we drop the experimental option"""
58 Feel free to nuke this function when we drop the experimental option"""
59 return (op.repo.ui.configbool('experimental', 'bundle2-exp', False)
59 return (op.repo.ui.configbool('experimental', 'bundle2-exp', False)
60 and op.remote.capable('bundle2'))
60 and op.remote.capable('bundle2'))
61
61
62
62
63 class pushoperation(object):
63 class pushoperation(object):
64 """A object that represent a single push operation
64 """A object that represent a single push operation
65
65
66 It purpose is to carry push related state and very common operation.
66 It purpose is to carry push related state and very common operation.
67
67
68 A new should be created at the beginning of each push and discarded
68 A new should be created at the beginning of each push and discarded
69 afterward.
69 afterward.
70 """
70 """
71
71
72 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
72 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
73 bookmarks=()):
73 bookmarks=()):
74 # repo we push from
74 # repo we push from
75 self.repo = repo
75 self.repo = repo
76 self.ui = repo.ui
76 self.ui = repo.ui
77 # repo we push to
77 # repo we push to
78 self.remote = remote
78 self.remote = remote
79 # force option provided
79 # force option provided
80 self.force = force
80 self.force = force
81 # revs to be pushed (None is "all")
81 # revs to be pushed (None is "all")
82 self.revs = revs
82 self.revs = revs
83 # bookmark explicitly pushed
83 # bookmark explicitly pushed
84 self.bookmarks = bookmarks
84 self.bookmarks = bookmarks
85 # allow push of new branch
85 # allow push of new branch
86 self.newbranch = newbranch
86 self.newbranch = newbranch
87 # did a local lock get acquired?
87 # did a local lock get acquired?
88 self.locallocked = None
88 self.locallocked = None
89 # step already performed
89 # step already performed
90 # (used to check what steps have been already performed through bundle2)
90 # (used to check what steps have been already performed through bundle2)
91 self.stepsdone = set()
91 self.stepsdone = set()
92 # Integer version of the changegroup push result
92 # Integer version of the changegroup push result
93 # - None means nothing to push
93 # - None means nothing to push
94 # - 0 means HTTP error
94 # - 0 means HTTP error
95 # - 1 means we pushed and remote head count is unchanged *or*
95 # - 1 means we pushed and remote head count is unchanged *or*
96 # we have outgoing changesets but refused to push
96 # we have outgoing changesets but refused to push
97 # - other values as described by addchangegroup()
97 # - other values as described by addchangegroup()
98 self.cgresult = None
98 self.cgresult = None
99 # Boolean value for the bookmark push
99 # Boolean value for the bookmark push
100 self.bkresult = None
100 self.bkresult = None
101 # discover.outgoing object (contains common and outgoing data)
101 # discover.outgoing object (contains common and outgoing data)
102 self.outgoing = None
102 self.outgoing = None
103 # all remote heads before the push
103 # all remote heads before the push
104 self.remoteheads = None
104 self.remoteheads = None
105 # testable as a boolean indicating if any nodes are missing locally.
105 # testable as a boolean indicating if any nodes are missing locally.
106 self.incoming = None
106 self.incoming = None
107 # phases changes that must be pushed along side the changesets
107 # phases changes that must be pushed along side the changesets
108 self.outdatedphases = None
108 self.outdatedphases = None
109 # phases changes that must be pushed if changeset push fails
109 # phases changes that must be pushed if changeset push fails
110 self.fallbackoutdatedphases = None
110 self.fallbackoutdatedphases = None
111 # outgoing obsmarkers
111 # outgoing obsmarkers
112 self.outobsmarkers = set()
112 self.outobsmarkers = set()
113 # outgoing bookmarks
113 # outgoing bookmarks
114 self.outbookmarks = []
114 self.outbookmarks = []
115 # transaction manager
115 # transaction manager
116 self.trmanager = None
116 self.trmanager = None
117
117
118 @util.propertycache
118 @util.propertycache
119 def futureheads(self):
119 def futureheads(self):
120 """future remote heads if the changeset push succeeds"""
120 """future remote heads if the changeset push succeeds"""
121 return self.outgoing.missingheads
121 return self.outgoing.missingheads
122
122
123 @util.propertycache
123 @util.propertycache
124 def fallbackheads(self):
124 def fallbackheads(self):
125 """future remote heads if the changeset push fails"""
125 """future remote heads if the changeset push fails"""
126 if self.revs is None:
126 if self.revs is None:
127 # not target to push, all common are relevant
127 # not target to push, all common are relevant
128 return self.outgoing.commonheads
128 return self.outgoing.commonheads
129 unfi = self.repo.unfiltered()
129 unfi = self.repo.unfiltered()
130 # I want cheads = heads(::missingheads and ::commonheads)
130 # I want cheads = heads(::missingheads and ::commonheads)
131 # (missingheads is revs with secret changeset filtered out)
131 # (missingheads is revs with secret changeset filtered out)
132 #
132 #
133 # This can be expressed as:
133 # This can be expressed as:
134 # cheads = ( (missingheads and ::commonheads)
134 # cheads = ( (missingheads and ::commonheads)
135 # + (commonheads and ::missingheads))"
135 # + (commonheads and ::missingheads))"
136 # )
136 # )
137 #
137 #
138 # while trying to push we already computed the following:
138 # while trying to push we already computed the following:
139 # common = (::commonheads)
139 # common = (::commonheads)
140 # missing = ((commonheads::missingheads) - commonheads)
140 # missing = ((commonheads::missingheads) - commonheads)
141 #
141 #
142 # We can pick:
142 # We can pick:
143 # * missingheads part of common (::commonheads)
143 # * missingheads part of common (::commonheads)
144 common = set(self.outgoing.common)
144 common = set(self.outgoing.common)
145 nm = self.repo.changelog.nodemap
145 nm = self.repo.changelog.nodemap
146 cheads = [node for node in self.revs if nm[node] in common]
146 cheads = [node for node in self.revs if nm[node] in common]
147 # and
147 # and
148 # * commonheads parents on missing
148 # * commonheads parents on missing
149 revset = unfi.set('%ln and parents(roots(%ln))',
149 revset = unfi.set('%ln and parents(roots(%ln))',
150 self.outgoing.commonheads,
150 self.outgoing.commonheads,
151 self.outgoing.missing)
151 self.outgoing.missing)
152 cheads.extend(c.node() for c in revset)
152 cheads.extend(c.node() for c in revset)
153 return cheads
153 return cheads
154
154
155 @property
155 @property
156 def commonheads(self):
156 def commonheads(self):
157 """set of all common heads after changeset bundle push"""
157 """set of all common heads after changeset bundle push"""
158 if self.cgresult:
158 if self.cgresult:
159 return self.futureheads
159 return self.futureheads
160 else:
160 else:
161 return self.fallbackheads
161 return self.fallbackheads
162
162
163 # mapping of message used when pushing bookmark
163 # mapping of message used when pushing bookmark
164 bookmsgmap = {'update': (_("updating bookmark %s\n"),
164 bookmsgmap = {'update': (_("updating bookmark %s\n"),
165 _('updating bookmark %s failed!\n')),
165 _('updating bookmark %s failed!\n')),
166 'export': (_("exporting bookmark %s\n"),
166 'export': (_("exporting bookmark %s\n"),
167 _('exporting bookmark %s failed!\n')),
167 _('exporting bookmark %s failed!\n')),
168 'delete': (_("deleting remote bookmark %s\n"),
168 'delete': (_("deleting remote bookmark %s\n"),
169 _('deleting remote bookmark %s failed!\n')),
169 _('deleting remote bookmark %s failed!\n')),
170 }
170 }
171
171
172
172
173 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
173 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
174 '''Push outgoing changesets (limited by revs) from a local
174 '''Push outgoing changesets (limited by revs) from a local
175 repository to remote. Return an integer:
175 repository to remote. Return an integer:
176 - None means nothing to push
176 - None means nothing to push
177 - 0 means HTTP error
177 - 0 means HTTP error
178 - 1 means we pushed and remote head count is unchanged *or*
178 - 1 means we pushed and remote head count is unchanged *or*
179 we have outgoing changesets but refused to push
179 we have outgoing changesets but refused to push
180 - other values as described by addchangegroup()
180 - other values as described by addchangegroup()
181 '''
181 '''
182 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
182 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
183 if pushop.remote.local():
183 if pushop.remote.local():
184 missing = (set(pushop.repo.requirements)
184 missing = (set(pushop.repo.requirements)
185 - pushop.remote.local().supported)
185 - pushop.remote.local().supported)
186 if missing:
186 if missing:
187 msg = _("required features are not"
187 msg = _("required features are not"
188 " supported in the destination:"
188 " supported in the destination:"
189 " %s") % (', '.join(sorted(missing)))
189 " %s") % (', '.join(sorted(missing)))
190 raise util.Abort(msg)
190 raise util.Abort(msg)
191
191
192 # there are two ways to push to remote repo:
192 # there are two ways to push to remote repo:
193 #
193 #
194 # addchangegroup assumes local user can lock remote
194 # addchangegroup assumes local user can lock remote
195 # repo (local filesystem, old ssh servers).
195 # repo (local filesystem, old ssh servers).
196 #
196 #
197 # unbundle assumes local user cannot lock remote repo (new ssh
197 # unbundle assumes local user cannot lock remote repo (new ssh
198 # servers, http servers).
198 # servers, http servers).
199
199
200 if not pushop.remote.canpush():
200 if not pushop.remote.canpush():
201 raise util.Abort(_("destination does not support push"))
201 raise util.Abort(_("destination does not support push"))
202 # get local lock as we might write phase data
202 # get local lock as we might write phase data
203 locallock = None
203 locallock = None
204 try:
204 try:
205 locallock = pushop.repo.lock()
205 locallock = pushop.repo.lock()
206 pushop.locallocked = True
206 pushop.locallocked = True
207 except IOError, err:
207 except IOError, err:
208 pushop.locallocked = False
208 pushop.locallocked = False
209 if err.errno != errno.EACCES:
209 if err.errno != errno.EACCES:
210 raise
210 raise
211 # source repo cannot be locked.
211 # source repo cannot be locked.
212 # We do not abort the push, but just disable the local phase
212 # We do not abort the push, but just disable the local phase
213 # synchronisation.
213 # synchronisation.
214 msg = 'cannot lock source repository: %s\n' % err
214 msg = 'cannot lock source repository: %s\n' % err
215 pushop.ui.debug(msg)
215 pushop.ui.debug(msg)
216 try:
216 try:
217 if pushop.locallocked:
217 if pushop.locallocked:
218 pushop.trmanager = transactionmanager(repo,
218 pushop.trmanager = transactionmanager(repo,
219 'push-response',
219 'push-response',
220 pushop.remote.url())
220 pushop.remote.url())
221 pushop.repo.checkpush(pushop)
221 pushop.repo.checkpush(pushop)
222 lock = None
222 lock = None
223 unbundle = pushop.remote.capable('unbundle')
223 unbundle = pushop.remote.capable('unbundle')
224 if not unbundle:
224 if not unbundle:
225 lock = pushop.remote.lock()
225 lock = pushop.remote.lock()
226 try:
226 try:
227 _pushdiscovery(pushop)
227 _pushdiscovery(pushop)
228 if _canusebundle2(pushop):
228 if _canusebundle2(pushop):
229 _pushbundle2(pushop)
229 _pushbundle2(pushop)
230 _pushchangeset(pushop)
230 _pushchangeset(pushop)
231 _pushsyncphase(pushop)
231 _pushsyncphase(pushop)
232 _pushobsolete(pushop)
232 _pushobsolete(pushop)
233 _pushbookmark(pushop)
233 _pushbookmark(pushop)
234 finally:
234 finally:
235 if lock is not None:
235 if lock is not None:
236 lock.release()
236 lock.release()
237 if pushop.trmanager:
237 if pushop.trmanager:
238 pushop.trmanager.close()
238 pushop.trmanager.close()
239 finally:
239 finally:
240 if pushop.trmanager:
240 if pushop.trmanager:
241 pushop.trmanager.release()
241 pushop.trmanager.release()
242 if locallock is not None:
242 if locallock is not None:
243 locallock.release()
243 locallock.release()
244
244
245 return pushop
245 return pushop
246
246
247 # list of steps to perform discovery before push
247 # list of steps to perform discovery before push
248 pushdiscoveryorder = []
248 pushdiscoveryorder = []
249
249
250 # Mapping between step name and function
250 # Mapping between step name and function
251 #
251 #
252 # This exists to help extensions wrap steps if necessary
252 # This exists to help extensions wrap steps if necessary
253 pushdiscoverymapping = {}
253 pushdiscoverymapping = {}
254
254
255 def pushdiscovery(stepname):
255 def pushdiscovery(stepname):
256 """decorator for function performing discovery before push
256 """decorator for function performing discovery before push
257
257
258 The function is added to the step -> function mapping and appended to the
258 The function is added to the step -> function mapping and appended to the
259 list of steps. Beware that decorated function will be added in order (this
259 list of steps. Beware that decorated function will be added in order (this
260 may matter).
260 may matter).
261
261
262 You can only use this decorator for a new step, if you want to wrap a step
262 You can only use this decorator for a new step, if you want to wrap a step
263 from an extension, change the pushdiscovery dictionary directly."""
263 from an extension, change the pushdiscovery dictionary directly."""
264 def dec(func):
264 def dec(func):
265 assert stepname not in pushdiscoverymapping
265 assert stepname not in pushdiscoverymapping
266 pushdiscoverymapping[stepname] = func
266 pushdiscoverymapping[stepname] = func
267 pushdiscoveryorder.append(stepname)
267 pushdiscoveryorder.append(stepname)
268 return func
268 return func
269 return dec
269 return dec
270
270
271 def _pushdiscovery(pushop):
271 def _pushdiscovery(pushop):
272 """Run all discovery steps"""
272 """Run all discovery steps"""
273 for stepname in pushdiscoveryorder:
273 for stepname in pushdiscoveryorder:
274 step = pushdiscoverymapping[stepname]
274 step = pushdiscoverymapping[stepname]
275 step(pushop)
275 step(pushop)
276
276
277 @pushdiscovery('changeset')
277 @pushdiscovery('changeset')
278 def _pushdiscoverychangeset(pushop):
278 def _pushdiscoverychangeset(pushop):
279 """discover the changeset that need to be pushed"""
279 """discover the changeset that need to be pushed"""
280 fci = discovery.findcommonincoming
280 fci = discovery.findcommonincoming
281 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
281 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
282 common, inc, remoteheads = commoninc
282 common, inc, remoteheads = commoninc
283 fco = discovery.findcommonoutgoing
283 fco = discovery.findcommonoutgoing
284 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
284 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
285 commoninc=commoninc, force=pushop.force)
285 commoninc=commoninc, force=pushop.force)
286 pushop.outgoing = outgoing
286 pushop.outgoing = outgoing
287 pushop.remoteheads = remoteheads
287 pushop.remoteheads = remoteheads
288 pushop.incoming = inc
288 pushop.incoming = inc
289
289
290 @pushdiscovery('phase')
290 @pushdiscovery('phase')
291 def _pushdiscoveryphase(pushop):
291 def _pushdiscoveryphase(pushop):
292 """discover the phase that needs to be pushed
292 """discover the phase that needs to be pushed
293
293
294 (computed for both success and failure case for changesets push)"""
294 (computed for both success and failure case for changesets push)"""
295 outgoing = pushop.outgoing
295 outgoing = pushop.outgoing
296 unfi = pushop.repo.unfiltered()
296 unfi = pushop.repo.unfiltered()
297 remotephases = pushop.remote.listkeys('phases')
297 remotephases = pushop.remote.listkeys('phases')
298 publishing = remotephases.get('publishing', False)
298 publishing = remotephases.get('publishing', False)
299 ana = phases.analyzeremotephases(pushop.repo,
299 ana = phases.analyzeremotephases(pushop.repo,
300 pushop.fallbackheads,
300 pushop.fallbackheads,
301 remotephases)
301 remotephases)
302 pheads, droots = ana
302 pheads, droots = ana
303 extracond = ''
303 extracond = ''
304 if not publishing:
304 if not publishing:
305 extracond = ' and public()'
305 extracond = ' and public()'
306 revset = 'heads((%%ln::%%ln) %s)' % extracond
306 revset = 'heads((%%ln::%%ln) %s)' % extracond
307 # Get the list of all revs draft on remote by public here.
307 # Get the list of all revs draft on remote by public here.
308 # XXX Beware that revset break if droots is not strictly
308 # XXX Beware that revset break if droots is not strictly
309 # XXX root we may want to ensure it is but it is costly
309 # XXX root we may want to ensure it is but it is costly
310 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
310 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
311 if not outgoing.missing:
311 if not outgoing.missing:
312 future = fallback
312 future = fallback
313 else:
313 else:
314 # adds changeset we are going to push as draft
314 # adds changeset we are going to push as draft
315 #
315 #
316 # should not be necessary for publishing server, but because of an
316 # should not be necessary for publishing server, but because of an
317 # issue fixed in xxxxx we have to do it anyway.
317 # issue fixed in xxxxx we have to do it anyway.
318 fdroots = list(unfi.set('roots(%ln + %ln::)',
318 fdroots = list(unfi.set('roots(%ln + %ln::)',
319 outgoing.missing, droots))
319 outgoing.missing, droots))
320 fdroots = [f.node() for f in fdroots]
320 fdroots = [f.node() for f in fdroots]
321 future = list(unfi.set(revset, fdroots, pushop.futureheads))
321 future = list(unfi.set(revset, fdroots, pushop.futureheads))
322 pushop.outdatedphases = future
322 pushop.outdatedphases = future
323 pushop.fallbackoutdatedphases = fallback
323 pushop.fallbackoutdatedphases = fallback
324
324
325 @pushdiscovery('obsmarker')
325 @pushdiscovery('obsmarker')
326 def _pushdiscoveryobsmarkers(pushop):
326 def _pushdiscoveryobsmarkers(pushop):
327 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
327 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
328 and pushop.repo.obsstore
328 and pushop.repo.obsstore
329 and 'obsolete' in pushop.remote.listkeys('namespaces')):
329 and 'obsolete' in pushop.remote.listkeys('namespaces')):
330 repo = pushop.repo
330 repo = pushop.repo
331 # very naive computation, that can be quite expensive on big repo.
331 # very naive computation, that can be quite expensive on big repo.
332 # However: evolution is currently slow on them anyway.
332 # However: evolution is currently slow on them anyway.
333 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
333 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
334 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
334 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
335
335
336 @pushdiscovery('bookmarks')
336 @pushdiscovery('bookmarks')
337 def _pushdiscoverybookmarks(pushop):
337 def _pushdiscoverybookmarks(pushop):
338 ui = pushop.ui
338 ui = pushop.ui
339 repo = pushop.repo.unfiltered()
339 repo = pushop.repo.unfiltered()
340 remote = pushop.remote
340 remote = pushop.remote
341 ui.debug("checking for updated bookmarks\n")
341 ui.debug("checking for updated bookmarks\n")
342 ancestors = ()
342 ancestors = ()
343 if pushop.revs:
343 if pushop.revs:
344 revnums = map(repo.changelog.rev, pushop.revs)
344 revnums = map(repo.changelog.rev, pushop.revs)
345 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
345 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
346 remotebookmark = remote.listkeys('bookmarks')
346 remotebookmark = remote.listkeys('bookmarks')
347
347
348 explicit = set(pushop.bookmarks)
348 explicit = set(pushop.bookmarks)
349
349
350 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
350 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
351 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
351 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
352 for b, scid, dcid in advsrc:
352 for b, scid, dcid in advsrc:
353 if b in explicit:
353 if b in explicit:
354 explicit.remove(b)
354 explicit.remove(b)
355 if not ancestors or repo[scid].rev() in ancestors:
355 if not ancestors or repo[scid].rev() in ancestors:
356 pushop.outbookmarks.append((b, dcid, scid))
356 pushop.outbookmarks.append((b, dcid, scid))
357 # search added bookmark
357 # search added bookmark
358 for b, scid, dcid in addsrc:
358 for b, scid, dcid in addsrc:
359 if b in explicit:
359 if b in explicit:
360 explicit.remove(b)
360 explicit.remove(b)
361 pushop.outbookmarks.append((b, '', scid))
361 pushop.outbookmarks.append((b, '', scid))
362 # search for overwritten bookmark
362 # search for overwritten bookmark
363 for b, scid, dcid in advdst + diverge + differ:
363 for b, scid, dcid in advdst + diverge + differ:
364 if b in explicit:
364 if b in explicit:
365 explicit.remove(b)
365 explicit.remove(b)
366 pushop.outbookmarks.append((b, dcid, scid))
366 pushop.outbookmarks.append((b, dcid, scid))
367 # search for bookmark to delete
367 # search for bookmark to delete
368 for b, scid, dcid in adddst:
368 for b, scid, dcid in adddst:
369 if b in explicit:
369 if b in explicit:
370 explicit.remove(b)
370 explicit.remove(b)
371 # treat as "deleted locally"
371 # treat as "deleted locally"
372 pushop.outbookmarks.append((b, dcid, ''))
372 pushop.outbookmarks.append((b, dcid, ''))
373 # identical bookmarks shouldn't get reported
373 # identical bookmarks shouldn't get reported
374 for b, scid, dcid in same:
374 for b, scid, dcid in same:
375 if b in explicit:
375 if b in explicit:
376 explicit.remove(b)
376 explicit.remove(b)
377
377
378 if explicit:
378 if explicit:
379 explicit = sorted(explicit)
379 explicit = sorted(explicit)
380 # we should probably list all of them
380 # we should probably list all of them
381 ui.warn(_('bookmark %s does not exist on the local '
381 ui.warn(_('bookmark %s does not exist on the local '
382 'or remote repository!\n') % explicit[0])
382 'or remote repository!\n') % explicit[0])
383 pushop.bkresult = 2
383 pushop.bkresult = 2
384
384
385 pushop.outbookmarks.sort()
385 pushop.outbookmarks.sort()
386
386
387 def _pushcheckoutgoing(pushop):
387 def _pushcheckoutgoing(pushop):
388 outgoing = pushop.outgoing
388 outgoing = pushop.outgoing
389 unfi = pushop.repo.unfiltered()
389 unfi = pushop.repo.unfiltered()
390 if not outgoing.missing:
390 if not outgoing.missing:
391 # nothing to push
391 # nothing to push
392 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
392 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
393 return False
393 return False
394 # something to push
394 # something to push
395 if not pushop.force:
395 if not pushop.force:
396 # if repo.obsstore == False --> no obsolete
396 # if repo.obsstore == False --> no obsolete
397 # then, save the iteration
397 # then, save the iteration
398 if unfi.obsstore:
398 if unfi.obsstore:
399 # this message are here for 80 char limit reason
399 # this message are here for 80 char limit reason
400 mso = _("push includes obsolete changeset: %s!")
400 mso = _("push includes obsolete changeset: %s!")
401 mst = {"unstable": _("push includes unstable changeset: %s!"),
401 mst = {"unstable": _("push includes unstable changeset: %s!"),
402 "bumped": _("push includes bumped changeset: %s!"),
402 "bumped": _("push includes bumped changeset: %s!"),
403 "divergent": _("push includes divergent changeset: %s!")}
403 "divergent": _("push includes divergent changeset: %s!")}
404 # If we are to push if there is at least one
404 # If we are to push if there is at least one
405 # obsolete or unstable changeset in missing, at
405 # obsolete or unstable changeset in missing, at
406 # least one of the missinghead will be obsolete or
406 # least one of the missinghead will be obsolete or
407 # unstable. So checking heads only is ok
407 # unstable. So checking heads only is ok
408 for node in outgoing.missingheads:
408 for node in outgoing.missingheads:
409 ctx = unfi[node]
409 ctx = unfi[node]
410 if ctx.obsolete():
410 if ctx.obsolete():
411 raise util.Abort(mso % ctx)
411 raise util.Abort(mso % ctx)
412 elif ctx.troubled():
412 elif ctx.troubled():
413 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
413 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
414 newbm = pushop.ui.configlist('bookmarks', 'pushing')
414 newbm = pushop.ui.configlist('bookmarks', 'pushing')
415 discovery.checkheads(unfi, pushop.remote, outgoing,
415 discovery.checkheads(unfi, pushop.remote, outgoing,
416 pushop.remoteheads,
416 pushop.remoteheads,
417 pushop.newbranch,
417 pushop.newbranch,
418 bool(pushop.incoming),
418 bool(pushop.incoming),
419 newbm)
419 newbm)
420 return True
420 return True
421
421
422 # List of names of steps to perform for an outgoing bundle2, order matters.
422 # List of names of steps to perform for an outgoing bundle2, order matters.
423 b2partsgenorder = []
423 b2partsgenorder = []
424
424
425 # Mapping between step name and function
425 # Mapping between step name and function
426 #
426 #
427 # This exists to help extensions wrap steps if necessary
427 # This exists to help extensions wrap steps if necessary
428 b2partsgenmapping = {}
428 b2partsgenmapping = {}
429
429
430 def b2partsgenerator(stepname):
430 def b2partsgenerator(stepname, idx=None):
431 """decorator for function generating bundle2 part
431 """decorator for function generating bundle2 part
432
432
433 The function is added to the step -> function mapping and appended to the
433 The function is added to the step -> function mapping and appended to the
434 list of steps. Beware that decorated functions will be added in order
434 list of steps. Beware that decorated functions will be added in order
435 (this may matter).
435 (this may matter).
436
436
437 You can only use this decorator for new steps, if you want to wrap a step
437 You can only use this decorator for new steps, if you want to wrap a step
438 from an extension, attack the b2partsgenmapping dictionary directly."""
438 from an extension, attack the b2partsgenmapping dictionary directly."""
439 def dec(func):
439 def dec(func):
440 assert stepname not in b2partsgenmapping
440 assert stepname not in b2partsgenmapping
441 b2partsgenmapping[stepname] = func
441 b2partsgenmapping[stepname] = func
442 b2partsgenorder.append(stepname)
442 if idx is None:
443 b2partsgenorder.append(stepname)
444 else:
445 b2partsgenorder.insert(idx, stepname)
443 return func
446 return func
444 return dec
447 return dec
445
448
446 @b2partsgenerator('changeset')
449 @b2partsgenerator('changeset')
447 def _pushb2ctx(pushop, bundler):
450 def _pushb2ctx(pushop, bundler):
448 """handle changegroup push through bundle2
451 """handle changegroup push through bundle2
449
452
450 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
453 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
451 """
454 """
452 if 'changesets' in pushop.stepsdone:
455 if 'changesets' in pushop.stepsdone:
453 return
456 return
454 pushop.stepsdone.add('changesets')
457 pushop.stepsdone.add('changesets')
455 # Send known heads to the server for race detection.
458 # Send known heads to the server for race detection.
456 if not _pushcheckoutgoing(pushop):
459 if not _pushcheckoutgoing(pushop):
457 return
460 return
458 pushop.repo.prepushoutgoinghooks(pushop.repo,
461 pushop.repo.prepushoutgoinghooks(pushop.repo,
459 pushop.remote,
462 pushop.remote,
460 pushop.outgoing)
463 pushop.outgoing)
461 if not pushop.force:
464 if not pushop.force:
462 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
465 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
463 b2caps = bundle2.bundle2caps(pushop.remote)
466 b2caps = bundle2.bundle2caps(pushop.remote)
464 version = None
467 version = None
465 cgversions = b2caps.get('changegroup')
468 cgversions = b2caps.get('changegroup')
466 if not cgversions: # 3.1 and 3.2 ship with an empty value
469 if not cgversions: # 3.1 and 3.2 ship with an empty value
467 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
470 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
468 pushop.outgoing)
471 pushop.outgoing)
469 else:
472 else:
470 cgversions = [v for v in cgversions if v in changegroup.packermap]
473 cgversions = [v for v in cgversions if v in changegroup.packermap]
471 if not cgversions:
474 if not cgversions:
472 raise ValueError(_('no common changegroup version'))
475 raise ValueError(_('no common changegroup version'))
473 version = max(cgversions)
476 version = max(cgversions)
474 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
477 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
475 pushop.outgoing,
478 pushop.outgoing,
476 version=version)
479 version=version)
477 cgpart = bundler.newpart('changegroup', data=cg)
480 cgpart = bundler.newpart('changegroup', data=cg)
478 if version is not None:
481 if version is not None:
479 cgpart.addparam('version', version)
482 cgpart.addparam('version', version)
480 def handlereply(op):
483 def handlereply(op):
481 """extract addchangegroup returns from server reply"""
484 """extract addchangegroup returns from server reply"""
482 cgreplies = op.records.getreplies(cgpart.id)
485 cgreplies = op.records.getreplies(cgpart.id)
483 assert len(cgreplies['changegroup']) == 1
486 assert len(cgreplies['changegroup']) == 1
484 pushop.cgresult = cgreplies['changegroup'][0]['return']
487 pushop.cgresult = cgreplies['changegroup'][0]['return']
485 return handlereply
488 return handlereply
486
489
487 @b2partsgenerator('phase')
490 @b2partsgenerator('phase')
488 def _pushb2phases(pushop, bundler):
491 def _pushb2phases(pushop, bundler):
489 """handle phase push through bundle2"""
492 """handle phase push through bundle2"""
490 if 'phases' in pushop.stepsdone:
493 if 'phases' in pushop.stepsdone:
491 return
494 return
492 b2caps = bundle2.bundle2caps(pushop.remote)
495 b2caps = bundle2.bundle2caps(pushop.remote)
493 if not 'pushkey' in b2caps:
496 if not 'pushkey' in b2caps:
494 return
497 return
495 pushop.stepsdone.add('phases')
498 pushop.stepsdone.add('phases')
496 part2node = []
499 part2node = []
497 enc = pushkey.encode
500 enc = pushkey.encode
498 for newremotehead in pushop.outdatedphases:
501 for newremotehead in pushop.outdatedphases:
499 part = bundler.newpart('pushkey')
502 part = bundler.newpart('pushkey')
500 part.addparam('namespace', enc('phases'))
503 part.addparam('namespace', enc('phases'))
501 part.addparam('key', enc(newremotehead.hex()))
504 part.addparam('key', enc(newremotehead.hex()))
502 part.addparam('old', enc(str(phases.draft)))
505 part.addparam('old', enc(str(phases.draft)))
503 part.addparam('new', enc(str(phases.public)))
506 part.addparam('new', enc(str(phases.public)))
504 part2node.append((part.id, newremotehead))
507 part2node.append((part.id, newremotehead))
505 def handlereply(op):
508 def handlereply(op):
506 for partid, node in part2node:
509 for partid, node in part2node:
507 partrep = op.records.getreplies(partid)
510 partrep = op.records.getreplies(partid)
508 results = partrep['pushkey']
511 results = partrep['pushkey']
509 assert len(results) <= 1
512 assert len(results) <= 1
510 msg = None
513 msg = None
511 if not results:
514 if not results:
512 msg = _('server ignored update of %s to public!\n') % node
515 msg = _('server ignored update of %s to public!\n') % node
513 elif not int(results[0]['return']):
516 elif not int(results[0]['return']):
514 msg = _('updating %s to public failed!\n') % node
517 msg = _('updating %s to public failed!\n') % node
515 if msg is not None:
518 if msg is not None:
516 pushop.ui.warn(msg)
519 pushop.ui.warn(msg)
517 return handlereply
520 return handlereply
518
521
519 @b2partsgenerator('obsmarkers')
522 @b2partsgenerator('obsmarkers')
520 def _pushb2obsmarkers(pushop, bundler):
523 def _pushb2obsmarkers(pushop, bundler):
521 if 'obsmarkers' in pushop.stepsdone:
524 if 'obsmarkers' in pushop.stepsdone:
522 return
525 return
523 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
526 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
524 if obsolete.commonversion(remoteversions) is None:
527 if obsolete.commonversion(remoteversions) is None:
525 return
528 return
526 pushop.stepsdone.add('obsmarkers')
529 pushop.stepsdone.add('obsmarkers')
527 if pushop.outobsmarkers:
530 if pushop.outobsmarkers:
528 buildobsmarkerspart(bundler, pushop.outobsmarkers)
531 buildobsmarkerspart(bundler, pushop.outobsmarkers)
529
532
530 @b2partsgenerator('bookmarks')
533 @b2partsgenerator('bookmarks')
531 def _pushb2bookmarks(pushop, bundler):
534 def _pushb2bookmarks(pushop, bundler):
532 """handle phase push through bundle2"""
535 """handle phase push through bundle2"""
533 if 'bookmarks' in pushop.stepsdone:
536 if 'bookmarks' in pushop.stepsdone:
534 return
537 return
535 b2caps = bundle2.bundle2caps(pushop.remote)
538 b2caps = bundle2.bundle2caps(pushop.remote)
536 if 'pushkey' not in b2caps:
539 if 'pushkey' not in b2caps:
537 return
540 return
538 pushop.stepsdone.add('bookmarks')
541 pushop.stepsdone.add('bookmarks')
539 part2book = []
542 part2book = []
540 enc = pushkey.encode
543 enc = pushkey.encode
541 for book, old, new in pushop.outbookmarks:
544 for book, old, new in pushop.outbookmarks:
542 part = bundler.newpart('pushkey')
545 part = bundler.newpart('pushkey')
543 part.addparam('namespace', enc('bookmarks'))
546 part.addparam('namespace', enc('bookmarks'))
544 part.addparam('key', enc(book))
547 part.addparam('key', enc(book))
545 part.addparam('old', enc(old))
548 part.addparam('old', enc(old))
546 part.addparam('new', enc(new))
549 part.addparam('new', enc(new))
547 action = 'update'
550 action = 'update'
548 if not old:
551 if not old:
549 action = 'export'
552 action = 'export'
550 elif not new:
553 elif not new:
551 action = 'delete'
554 action = 'delete'
552 part2book.append((part.id, book, action))
555 part2book.append((part.id, book, action))
553
556
554
557
555 def handlereply(op):
558 def handlereply(op):
556 ui = pushop.ui
559 ui = pushop.ui
557 for partid, book, action in part2book:
560 for partid, book, action in part2book:
558 partrep = op.records.getreplies(partid)
561 partrep = op.records.getreplies(partid)
559 results = partrep['pushkey']
562 results = partrep['pushkey']
560 assert len(results) <= 1
563 assert len(results) <= 1
561 if not results:
564 if not results:
562 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
565 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
563 else:
566 else:
564 ret = int(results[0]['return'])
567 ret = int(results[0]['return'])
565 if ret:
568 if ret:
566 ui.status(bookmsgmap[action][0] % book)
569 ui.status(bookmsgmap[action][0] % book)
567 else:
570 else:
568 ui.warn(bookmsgmap[action][1] % book)
571 ui.warn(bookmsgmap[action][1] % book)
569 if pushop.bkresult is not None:
572 if pushop.bkresult is not None:
570 pushop.bkresult = 1
573 pushop.bkresult = 1
571 return handlereply
574 return handlereply
572
575
573
576
574 def _pushbundle2(pushop):
577 def _pushbundle2(pushop):
575 """push data to the remote using bundle2
578 """push data to the remote using bundle2
576
579
577 The only currently supported type of data is changegroup but this will
580 The only currently supported type of data is changegroup but this will
578 evolve in the future."""
581 evolve in the future."""
579 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
582 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
580 pushback = (pushop.trmanager
583 pushback = (pushop.trmanager
581 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
584 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
582
585
583 # create reply capability
586 # create reply capability
584 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
587 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
585 allowpushback=pushback))
588 allowpushback=pushback))
586 bundler.newpart('replycaps', data=capsblob)
589 bundler.newpart('replycaps', data=capsblob)
587 replyhandlers = []
590 replyhandlers = []
588 for partgenname in b2partsgenorder:
591 for partgenname in b2partsgenorder:
589 partgen = b2partsgenmapping[partgenname]
592 partgen = b2partsgenmapping[partgenname]
590 ret = partgen(pushop, bundler)
593 ret = partgen(pushop, bundler)
591 if callable(ret):
594 if callable(ret):
592 replyhandlers.append(ret)
595 replyhandlers.append(ret)
593 # do not push if nothing to push
596 # do not push if nothing to push
594 if bundler.nbparts <= 1:
597 if bundler.nbparts <= 1:
595 return
598 return
596 stream = util.chunkbuffer(bundler.getchunks())
599 stream = util.chunkbuffer(bundler.getchunks())
597 try:
600 try:
598 reply = pushop.remote.unbundle(stream, ['force'], 'push')
601 reply = pushop.remote.unbundle(stream, ['force'], 'push')
599 except error.BundleValueError, exc:
602 except error.BundleValueError, exc:
600 raise util.Abort('missing support for %s' % exc)
603 raise util.Abort('missing support for %s' % exc)
601 try:
604 try:
602 trgetter = None
605 trgetter = None
603 if pushback:
606 if pushback:
604 trgetter = pushop.trmanager.transaction
607 trgetter = pushop.trmanager.transaction
605 op = bundle2.processbundle(pushop.repo, reply, trgetter)
608 op = bundle2.processbundle(pushop.repo, reply, trgetter)
606 except error.BundleValueError, exc:
609 except error.BundleValueError, exc:
607 raise util.Abort('missing support for %s' % exc)
610 raise util.Abort('missing support for %s' % exc)
608 for rephand in replyhandlers:
611 for rephand in replyhandlers:
609 rephand(op)
612 rephand(op)
610
613
611 def _pushchangeset(pushop):
614 def _pushchangeset(pushop):
612 """Make the actual push of changeset bundle to remote repo"""
615 """Make the actual push of changeset bundle to remote repo"""
613 if 'changesets' in pushop.stepsdone:
616 if 'changesets' in pushop.stepsdone:
614 return
617 return
615 pushop.stepsdone.add('changesets')
618 pushop.stepsdone.add('changesets')
616 if not _pushcheckoutgoing(pushop):
619 if not _pushcheckoutgoing(pushop):
617 return
620 return
618 pushop.repo.prepushoutgoinghooks(pushop.repo,
621 pushop.repo.prepushoutgoinghooks(pushop.repo,
619 pushop.remote,
622 pushop.remote,
620 pushop.outgoing)
623 pushop.outgoing)
621 outgoing = pushop.outgoing
624 outgoing = pushop.outgoing
622 unbundle = pushop.remote.capable('unbundle')
625 unbundle = pushop.remote.capable('unbundle')
623 # TODO: get bundlecaps from remote
626 # TODO: get bundlecaps from remote
624 bundlecaps = None
627 bundlecaps = None
625 # create a changegroup from local
628 # create a changegroup from local
626 if pushop.revs is None and not (outgoing.excluded
629 if pushop.revs is None and not (outgoing.excluded
627 or pushop.repo.changelog.filteredrevs):
630 or pushop.repo.changelog.filteredrevs):
628 # push everything,
631 # push everything,
629 # use the fast path, no race possible on push
632 # use the fast path, no race possible on push
630 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
633 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
631 cg = changegroup.getsubset(pushop.repo,
634 cg = changegroup.getsubset(pushop.repo,
632 outgoing,
635 outgoing,
633 bundler,
636 bundler,
634 'push',
637 'push',
635 fastpath=True)
638 fastpath=True)
636 else:
639 else:
637 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
640 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
638 bundlecaps)
641 bundlecaps)
639
642
640 # apply changegroup to remote
643 # apply changegroup to remote
641 if unbundle:
644 if unbundle:
642 # local repo finds heads on server, finds out what
645 # local repo finds heads on server, finds out what
643 # revs it must push. once revs transferred, if server
646 # revs it must push. once revs transferred, if server
644 # finds it has different heads (someone else won
647 # finds it has different heads (someone else won
645 # commit/push race), server aborts.
648 # commit/push race), server aborts.
646 if pushop.force:
649 if pushop.force:
647 remoteheads = ['force']
650 remoteheads = ['force']
648 else:
651 else:
649 remoteheads = pushop.remoteheads
652 remoteheads = pushop.remoteheads
650 # ssh: return remote's addchangegroup()
653 # ssh: return remote's addchangegroup()
651 # http: return remote's addchangegroup() or 0 for error
654 # http: return remote's addchangegroup() or 0 for error
652 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
655 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
653 pushop.repo.url())
656 pushop.repo.url())
654 else:
657 else:
655 # we return an integer indicating remote head count
658 # we return an integer indicating remote head count
656 # change
659 # change
657 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
660 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
658 pushop.repo.url())
661 pushop.repo.url())
659
662
660 def _pushsyncphase(pushop):
663 def _pushsyncphase(pushop):
661 """synchronise phase information locally and remotely"""
664 """synchronise phase information locally and remotely"""
662 cheads = pushop.commonheads
665 cheads = pushop.commonheads
663 # even when we don't push, exchanging phase data is useful
666 # even when we don't push, exchanging phase data is useful
664 remotephases = pushop.remote.listkeys('phases')
667 remotephases = pushop.remote.listkeys('phases')
665 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
668 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
666 and remotephases # server supports phases
669 and remotephases # server supports phases
667 and pushop.cgresult is None # nothing was pushed
670 and pushop.cgresult is None # nothing was pushed
668 and remotephases.get('publishing', False)):
671 and remotephases.get('publishing', False)):
669 # When:
672 # When:
670 # - this is a subrepo push
673 # - this is a subrepo push
671 # - and remote support phase
674 # - and remote support phase
672 # - and no changeset was pushed
675 # - and no changeset was pushed
673 # - and remote is publishing
676 # - and remote is publishing
674 # We may be in issue 3871 case!
677 # We may be in issue 3871 case!
675 # We drop the possible phase synchronisation done by
678 # We drop the possible phase synchronisation done by
676 # courtesy to publish changesets possibly locally draft
679 # courtesy to publish changesets possibly locally draft
677 # on the remote.
680 # on the remote.
678 remotephases = {'publishing': 'True'}
681 remotephases = {'publishing': 'True'}
679 if not remotephases: # old server or public only reply from non-publishing
682 if not remotephases: # old server or public only reply from non-publishing
680 _localphasemove(pushop, cheads)
683 _localphasemove(pushop, cheads)
681 # don't push any phase data as there is nothing to push
684 # don't push any phase data as there is nothing to push
682 else:
685 else:
683 ana = phases.analyzeremotephases(pushop.repo, cheads,
686 ana = phases.analyzeremotephases(pushop.repo, cheads,
684 remotephases)
687 remotephases)
685 pheads, droots = ana
688 pheads, droots = ana
686 ### Apply remote phase on local
689 ### Apply remote phase on local
687 if remotephases.get('publishing', False):
690 if remotephases.get('publishing', False):
688 _localphasemove(pushop, cheads)
691 _localphasemove(pushop, cheads)
689 else: # publish = False
692 else: # publish = False
690 _localphasemove(pushop, pheads)
693 _localphasemove(pushop, pheads)
691 _localphasemove(pushop, cheads, phases.draft)
694 _localphasemove(pushop, cheads, phases.draft)
692 ### Apply local phase on remote
695 ### Apply local phase on remote
693
696
694 if pushop.cgresult:
697 if pushop.cgresult:
695 if 'phases' in pushop.stepsdone:
698 if 'phases' in pushop.stepsdone:
696 # phases already pushed though bundle2
699 # phases already pushed though bundle2
697 return
700 return
698 outdated = pushop.outdatedphases
701 outdated = pushop.outdatedphases
699 else:
702 else:
700 outdated = pushop.fallbackoutdatedphases
703 outdated = pushop.fallbackoutdatedphases
701
704
702 pushop.stepsdone.add('phases')
705 pushop.stepsdone.add('phases')
703
706
704 # filter heads already turned public by the push
707 # filter heads already turned public by the push
705 outdated = [c for c in outdated if c.node() not in pheads]
708 outdated = [c for c in outdated if c.node() not in pheads]
706 # fallback to independent pushkey command
709 # fallback to independent pushkey command
707 for newremotehead in outdated:
710 for newremotehead in outdated:
708 r = pushop.remote.pushkey('phases',
711 r = pushop.remote.pushkey('phases',
709 newremotehead.hex(),
712 newremotehead.hex(),
710 str(phases.draft),
713 str(phases.draft),
711 str(phases.public))
714 str(phases.public))
712 if not r:
715 if not r:
713 pushop.ui.warn(_('updating %s to public failed!\n')
716 pushop.ui.warn(_('updating %s to public failed!\n')
714 % newremotehead)
717 % newremotehead)
715
718
716 def _localphasemove(pushop, nodes, phase=phases.public):
719 def _localphasemove(pushop, nodes, phase=phases.public):
717 """move <nodes> to <phase> in the local source repo"""
720 """move <nodes> to <phase> in the local source repo"""
718 if pushop.trmanager:
721 if pushop.trmanager:
719 phases.advanceboundary(pushop.repo,
722 phases.advanceboundary(pushop.repo,
720 pushop.trmanager.transaction(),
723 pushop.trmanager.transaction(),
721 phase,
724 phase,
722 nodes)
725 nodes)
723 else:
726 else:
724 # repo is not locked, do not change any phases!
727 # repo is not locked, do not change any phases!
725 # Informs the user that phases should have been moved when
728 # Informs the user that phases should have been moved when
726 # applicable.
729 # applicable.
727 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
730 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
728 phasestr = phases.phasenames[phase]
731 phasestr = phases.phasenames[phase]
729 if actualmoves:
732 if actualmoves:
730 pushop.ui.status(_('cannot lock source repo, skipping '
733 pushop.ui.status(_('cannot lock source repo, skipping '
731 'local %s phase update\n') % phasestr)
734 'local %s phase update\n') % phasestr)
732
735
733 def _pushobsolete(pushop):
736 def _pushobsolete(pushop):
734 """utility function to push obsolete markers to a remote"""
737 """utility function to push obsolete markers to a remote"""
735 if 'obsmarkers' in pushop.stepsdone:
738 if 'obsmarkers' in pushop.stepsdone:
736 return
739 return
737 pushop.ui.debug('try to push obsolete markers to remote\n')
740 pushop.ui.debug('try to push obsolete markers to remote\n')
738 repo = pushop.repo
741 repo = pushop.repo
739 remote = pushop.remote
742 remote = pushop.remote
740 pushop.stepsdone.add('obsmarkers')
743 pushop.stepsdone.add('obsmarkers')
741 if pushop.outobsmarkers:
744 if pushop.outobsmarkers:
742 rslts = []
745 rslts = []
743 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
746 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
744 for key in sorted(remotedata, reverse=True):
747 for key in sorted(remotedata, reverse=True):
745 # reverse sort to ensure we end with dump0
748 # reverse sort to ensure we end with dump0
746 data = remotedata[key]
749 data = remotedata[key]
747 rslts.append(remote.pushkey('obsolete', key, '', data))
750 rslts.append(remote.pushkey('obsolete', key, '', data))
748 if [r for r in rslts if not r]:
751 if [r for r in rslts if not r]:
749 msg = _('failed to push some obsolete markers!\n')
752 msg = _('failed to push some obsolete markers!\n')
750 repo.ui.warn(msg)
753 repo.ui.warn(msg)
751
754
752 def _pushbookmark(pushop):
755 def _pushbookmark(pushop):
753 """Update bookmark position on remote"""
756 """Update bookmark position on remote"""
754 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
757 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
755 return
758 return
756 pushop.stepsdone.add('bookmarks')
759 pushop.stepsdone.add('bookmarks')
757 ui = pushop.ui
760 ui = pushop.ui
758 remote = pushop.remote
761 remote = pushop.remote
759
762
760 for b, old, new in pushop.outbookmarks:
763 for b, old, new in pushop.outbookmarks:
761 action = 'update'
764 action = 'update'
762 if not old:
765 if not old:
763 action = 'export'
766 action = 'export'
764 elif not new:
767 elif not new:
765 action = 'delete'
768 action = 'delete'
766 if remote.pushkey('bookmarks', b, old, new):
769 if remote.pushkey('bookmarks', b, old, new):
767 ui.status(bookmsgmap[action][0] % b)
770 ui.status(bookmsgmap[action][0] % b)
768 else:
771 else:
769 ui.warn(bookmsgmap[action][1] % b)
772 ui.warn(bookmsgmap[action][1] % b)
770 # discovery can have set the value form invalid entry
773 # discovery can have set the value form invalid entry
771 if pushop.bkresult is not None:
774 if pushop.bkresult is not None:
772 pushop.bkresult = 1
775 pushop.bkresult = 1
773
776
774 class pulloperation(object):
777 class pulloperation(object):
775 """A object that represent a single pull operation
778 """A object that represent a single pull operation
776
779
777 It purpose is to carry pull related state and very common operation.
780 It purpose is to carry pull related state and very common operation.
778
781
779 A new should be created at the beginning of each pull and discarded
782 A new should be created at the beginning of each pull and discarded
780 afterward.
783 afterward.
781 """
784 """
782
785
783 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
786 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
784 # repo we pull into
787 # repo we pull into
785 self.repo = repo
788 self.repo = repo
786 # repo we pull from
789 # repo we pull from
787 self.remote = remote
790 self.remote = remote
788 # revision we try to pull (None is "all")
791 # revision we try to pull (None is "all")
789 self.heads = heads
792 self.heads = heads
790 # bookmark pulled explicitly
793 # bookmark pulled explicitly
791 self.explicitbookmarks = bookmarks
794 self.explicitbookmarks = bookmarks
792 # do we force pull?
795 # do we force pull?
793 self.force = force
796 self.force = force
794 # transaction manager
797 # transaction manager
795 self.trmanager = None
798 self.trmanager = None
796 # set of common changeset between local and remote before pull
799 # set of common changeset between local and remote before pull
797 self.common = None
800 self.common = None
798 # set of pulled head
801 # set of pulled head
799 self.rheads = None
802 self.rheads = None
800 # list of missing changeset to fetch remotely
803 # list of missing changeset to fetch remotely
801 self.fetch = None
804 self.fetch = None
802 # remote bookmarks data
805 # remote bookmarks data
803 self.remotebookmarks = None
806 self.remotebookmarks = None
804 # result of changegroup pulling (used as return code by pull)
807 # result of changegroup pulling (used as return code by pull)
805 self.cgresult = None
808 self.cgresult = None
806 # list of step already done
809 # list of step already done
807 self.stepsdone = set()
810 self.stepsdone = set()
808
811
809 @util.propertycache
812 @util.propertycache
810 def pulledsubset(self):
813 def pulledsubset(self):
811 """heads of the set of changeset target by the pull"""
814 """heads of the set of changeset target by the pull"""
812 # compute target subset
815 # compute target subset
813 if self.heads is None:
816 if self.heads is None:
814 # We pulled every thing possible
817 # We pulled every thing possible
815 # sync on everything common
818 # sync on everything common
816 c = set(self.common)
819 c = set(self.common)
817 ret = list(self.common)
820 ret = list(self.common)
818 for n in self.rheads:
821 for n in self.rheads:
819 if n not in c:
822 if n not in c:
820 ret.append(n)
823 ret.append(n)
821 return ret
824 return ret
822 else:
825 else:
823 # We pulled a specific subset
826 # We pulled a specific subset
824 # sync on this subset
827 # sync on this subset
825 return self.heads
828 return self.heads
826
829
827 def gettransaction(self):
830 def gettransaction(self):
828 # deprecated; talk to trmanager directly
831 # deprecated; talk to trmanager directly
829 return self.trmanager.transaction()
832 return self.trmanager.transaction()
830
833
831 class transactionmanager(object):
834 class transactionmanager(object):
832 """An object to manage the life cycle of a transaction
835 """An object to manage the life cycle of a transaction
833
836
834 It creates the transaction on demand and calls the appropriate hooks when
837 It creates the transaction on demand and calls the appropriate hooks when
835 closing the transaction."""
838 closing the transaction."""
836 def __init__(self, repo, source, url):
839 def __init__(self, repo, source, url):
837 self.repo = repo
840 self.repo = repo
838 self.source = source
841 self.source = source
839 self.url = url
842 self.url = url
840 self._tr = None
843 self._tr = None
841
844
842 def transaction(self):
845 def transaction(self):
843 """Return an open transaction object, constructing if necessary"""
846 """Return an open transaction object, constructing if necessary"""
844 if not self._tr:
847 if not self._tr:
845 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
848 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
846 self._tr = self.repo.transaction(trname)
849 self._tr = self.repo.transaction(trname)
847 self._tr.hookargs['source'] = self.source
850 self._tr.hookargs['source'] = self.source
848 self._tr.hookargs['url'] = self.url
851 self._tr.hookargs['url'] = self.url
849 return self._tr
852 return self._tr
850
853
851 def close(self):
854 def close(self):
852 """close transaction if created"""
855 """close transaction if created"""
853 if self._tr is not None:
856 if self._tr is not None:
854 self._tr.close()
857 self._tr.close()
855
858
856 def release(self):
859 def release(self):
857 """release transaction if created"""
860 """release transaction if created"""
858 if self._tr is not None:
861 if self._tr is not None:
859 self._tr.release()
862 self._tr.release()
860
863
861 def pull(repo, remote, heads=None, force=False, bookmarks=()):
864 def pull(repo, remote, heads=None, force=False, bookmarks=()):
862 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
865 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
863 if pullop.remote.local():
866 if pullop.remote.local():
864 missing = set(pullop.remote.requirements) - pullop.repo.supported
867 missing = set(pullop.remote.requirements) - pullop.repo.supported
865 if missing:
868 if missing:
866 msg = _("required features are not"
869 msg = _("required features are not"
867 " supported in the destination:"
870 " supported in the destination:"
868 " %s") % (', '.join(sorted(missing)))
871 " %s") % (', '.join(sorted(missing)))
869 raise util.Abort(msg)
872 raise util.Abort(msg)
870
873
871 pullop.remotebookmarks = remote.listkeys('bookmarks')
874 pullop.remotebookmarks = remote.listkeys('bookmarks')
872 lock = pullop.repo.lock()
875 lock = pullop.repo.lock()
873 try:
876 try:
874 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
877 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
875 _pulldiscovery(pullop)
878 _pulldiscovery(pullop)
876 if _canusebundle2(pullop):
879 if _canusebundle2(pullop):
877 _pullbundle2(pullop)
880 _pullbundle2(pullop)
878 _pullchangeset(pullop)
881 _pullchangeset(pullop)
879 _pullphase(pullop)
882 _pullphase(pullop)
880 _pullbookmarks(pullop)
883 _pullbookmarks(pullop)
881 _pullobsolete(pullop)
884 _pullobsolete(pullop)
882 pullop.trmanager.close()
885 pullop.trmanager.close()
883 finally:
886 finally:
884 pullop.trmanager.release()
887 pullop.trmanager.release()
885 lock.release()
888 lock.release()
886
889
887 return pullop
890 return pullop
888
891
889 # list of steps to perform discovery before pull
892 # list of steps to perform discovery before pull
890 pulldiscoveryorder = []
893 pulldiscoveryorder = []
891
894
892 # Mapping between step name and function
895 # Mapping between step name and function
893 #
896 #
894 # This exists to help extensions wrap steps if necessary
897 # This exists to help extensions wrap steps if necessary
895 pulldiscoverymapping = {}
898 pulldiscoverymapping = {}
896
899
897 def pulldiscovery(stepname):
900 def pulldiscovery(stepname):
898 """decorator for function performing discovery before pull
901 """decorator for function performing discovery before pull
899
902
900 The function is added to the step -> function mapping and appended to the
903 The function is added to the step -> function mapping and appended to the
901 list of steps. Beware that decorated function will be added in order (this
904 list of steps. Beware that decorated function will be added in order (this
902 may matter).
905 may matter).
903
906
904 You can only use this decorator for a new step, if you want to wrap a step
907 You can only use this decorator for a new step, if you want to wrap a step
905 from an extension, change the pulldiscovery dictionary directly."""
908 from an extension, change the pulldiscovery dictionary directly."""
906 def dec(func):
909 def dec(func):
907 assert stepname not in pulldiscoverymapping
910 assert stepname not in pulldiscoverymapping
908 pulldiscoverymapping[stepname] = func
911 pulldiscoverymapping[stepname] = func
909 pulldiscoveryorder.append(stepname)
912 pulldiscoveryorder.append(stepname)
910 return func
913 return func
911 return dec
914 return dec
912
915
913 def _pulldiscovery(pullop):
916 def _pulldiscovery(pullop):
914 """Run all discovery steps"""
917 """Run all discovery steps"""
915 for stepname in pulldiscoveryorder:
918 for stepname in pulldiscoveryorder:
916 step = pulldiscoverymapping[stepname]
919 step = pulldiscoverymapping[stepname]
917 step(pullop)
920 step(pullop)
918
921
919 @pulldiscovery('changegroup')
922 @pulldiscovery('changegroup')
920 def _pulldiscoverychangegroup(pullop):
923 def _pulldiscoverychangegroup(pullop):
921 """discovery phase for the pull
924 """discovery phase for the pull
922
925
923 Current handle changeset discovery only, will change handle all discovery
926 Current handle changeset discovery only, will change handle all discovery
924 at some point."""
927 at some point."""
925 tmp = discovery.findcommonincoming(pullop.repo,
928 tmp = discovery.findcommonincoming(pullop.repo,
926 pullop.remote,
929 pullop.remote,
927 heads=pullop.heads,
930 heads=pullop.heads,
928 force=pullop.force)
931 force=pullop.force)
929 common, fetch, rheads = tmp
932 common, fetch, rheads = tmp
930 nm = pullop.repo.unfiltered().changelog.nodemap
933 nm = pullop.repo.unfiltered().changelog.nodemap
931 if fetch and rheads:
934 if fetch and rheads:
932 # If a remote heads in filtered locally, lets drop it from the unknown
935 # If a remote heads in filtered locally, lets drop it from the unknown
933 # remote heads and put in back in common.
936 # remote heads and put in back in common.
934 #
937 #
935 # This is a hackish solution to catch most of "common but locally
938 # This is a hackish solution to catch most of "common but locally
936 # hidden situation". We do not performs discovery on unfiltered
939 # hidden situation". We do not performs discovery on unfiltered
937 # repository because it end up doing a pathological amount of round
940 # repository because it end up doing a pathological amount of round
938 # trip for w huge amount of changeset we do not care about.
941 # trip for w huge amount of changeset we do not care about.
939 #
942 #
940 # If a set of such "common but filtered" changeset exist on the server
943 # If a set of such "common but filtered" changeset exist on the server
941 # but are not including a remote heads, we'll not be able to detect it,
944 # but are not including a remote heads, we'll not be able to detect it,
942 scommon = set(common)
945 scommon = set(common)
943 filteredrheads = []
946 filteredrheads = []
944 for n in rheads:
947 for n in rheads:
945 if n in nm:
948 if n in nm:
946 if n not in scommon:
949 if n not in scommon:
947 common.append(n)
950 common.append(n)
948 else:
951 else:
949 filteredrheads.append(n)
952 filteredrheads.append(n)
950 if not filteredrheads:
953 if not filteredrheads:
951 fetch = []
954 fetch = []
952 rheads = filteredrheads
955 rheads = filteredrheads
953 pullop.common = common
956 pullop.common = common
954 pullop.fetch = fetch
957 pullop.fetch = fetch
955 pullop.rheads = rheads
958 pullop.rheads = rheads
956
959
957 def _pullbundle2(pullop):
960 def _pullbundle2(pullop):
958 """pull data using bundle2
961 """pull data using bundle2
959
962
960 For now, the only supported data are changegroup."""
963 For now, the only supported data are changegroup."""
961 remotecaps = bundle2.bundle2caps(pullop.remote)
964 remotecaps = bundle2.bundle2caps(pullop.remote)
962 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
965 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
963 # pulling changegroup
966 # pulling changegroup
964 pullop.stepsdone.add('changegroup')
967 pullop.stepsdone.add('changegroup')
965
968
966 kwargs['common'] = pullop.common
969 kwargs['common'] = pullop.common
967 kwargs['heads'] = pullop.heads or pullop.rheads
970 kwargs['heads'] = pullop.heads or pullop.rheads
968 kwargs['cg'] = pullop.fetch
971 kwargs['cg'] = pullop.fetch
969 if 'listkeys' in remotecaps:
972 if 'listkeys' in remotecaps:
970 kwargs['listkeys'] = ['phase', 'bookmarks']
973 kwargs['listkeys'] = ['phase', 'bookmarks']
971 if not pullop.fetch:
974 if not pullop.fetch:
972 pullop.repo.ui.status(_("no changes found\n"))
975 pullop.repo.ui.status(_("no changes found\n"))
973 pullop.cgresult = 0
976 pullop.cgresult = 0
974 else:
977 else:
975 if pullop.heads is None and list(pullop.common) == [nullid]:
978 if pullop.heads is None and list(pullop.common) == [nullid]:
976 pullop.repo.ui.status(_("requesting all changes\n"))
979 pullop.repo.ui.status(_("requesting all changes\n"))
977 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
980 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
978 remoteversions = bundle2.obsmarkersversion(remotecaps)
981 remoteversions = bundle2.obsmarkersversion(remotecaps)
979 if obsolete.commonversion(remoteversions) is not None:
982 if obsolete.commonversion(remoteversions) is not None:
980 kwargs['obsmarkers'] = True
983 kwargs['obsmarkers'] = True
981 pullop.stepsdone.add('obsmarkers')
984 pullop.stepsdone.add('obsmarkers')
982 _pullbundle2extraprepare(pullop, kwargs)
985 _pullbundle2extraprepare(pullop, kwargs)
983 bundle = pullop.remote.getbundle('pull', **kwargs)
986 bundle = pullop.remote.getbundle('pull', **kwargs)
984 try:
987 try:
985 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
988 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
986 except error.BundleValueError, exc:
989 except error.BundleValueError, exc:
987 raise util.Abort('missing support for %s' % exc)
990 raise util.Abort('missing support for %s' % exc)
988
991
989 if pullop.fetch:
992 if pullop.fetch:
990 results = [cg['return'] for cg in op.records['changegroup']]
993 results = [cg['return'] for cg in op.records['changegroup']]
991 pullop.cgresult = changegroup.combineresults(results)
994 pullop.cgresult = changegroup.combineresults(results)
992
995
993 # processing phases change
996 # processing phases change
994 for namespace, value in op.records['listkeys']:
997 for namespace, value in op.records['listkeys']:
995 if namespace == 'phases':
998 if namespace == 'phases':
996 _pullapplyphases(pullop, value)
999 _pullapplyphases(pullop, value)
997
1000
998 # processing bookmark update
1001 # processing bookmark update
999 for namespace, value in op.records['listkeys']:
1002 for namespace, value in op.records['listkeys']:
1000 if namespace == 'bookmarks':
1003 if namespace == 'bookmarks':
1001 pullop.remotebookmarks = value
1004 pullop.remotebookmarks = value
1002 _pullbookmarks(pullop)
1005 _pullbookmarks(pullop)
1003
1006
1004 def _pullbundle2extraprepare(pullop, kwargs):
1007 def _pullbundle2extraprepare(pullop, kwargs):
1005 """hook function so that extensions can extend the getbundle call"""
1008 """hook function so that extensions can extend the getbundle call"""
1006 pass
1009 pass
1007
1010
1008 def _pullchangeset(pullop):
1011 def _pullchangeset(pullop):
1009 """pull changeset from unbundle into the local repo"""
1012 """pull changeset from unbundle into the local repo"""
1010 # We delay the open of the transaction as late as possible so we
1013 # We delay the open of the transaction as late as possible so we
1011 # don't open transaction for nothing or you break future useful
1014 # don't open transaction for nothing or you break future useful
1012 # rollback call
1015 # rollback call
1013 if 'changegroup' in pullop.stepsdone:
1016 if 'changegroup' in pullop.stepsdone:
1014 return
1017 return
1015 pullop.stepsdone.add('changegroup')
1018 pullop.stepsdone.add('changegroup')
1016 if not pullop.fetch:
1019 if not pullop.fetch:
1017 pullop.repo.ui.status(_("no changes found\n"))
1020 pullop.repo.ui.status(_("no changes found\n"))
1018 pullop.cgresult = 0
1021 pullop.cgresult = 0
1019 return
1022 return
1020 pullop.gettransaction()
1023 pullop.gettransaction()
1021 if pullop.heads is None and list(pullop.common) == [nullid]:
1024 if pullop.heads is None and list(pullop.common) == [nullid]:
1022 pullop.repo.ui.status(_("requesting all changes\n"))
1025 pullop.repo.ui.status(_("requesting all changes\n"))
1023 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1026 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1024 # issue1320, avoid a race if remote changed after discovery
1027 # issue1320, avoid a race if remote changed after discovery
1025 pullop.heads = pullop.rheads
1028 pullop.heads = pullop.rheads
1026
1029
1027 if pullop.remote.capable('getbundle'):
1030 if pullop.remote.capable('getbundle'):
1028 # TODO: get bundlecaps from remote
1031 # TODO: get bundlecaps from remote
1029 cg = pullop.remote.getbundle('pull', common=pullop.common,
1032 cg = pullop.remote.getbundle('pull', common=pullop.common,
1030 heads=pullop.heads or pullop.rheads)
1033 heads=pullop.heads or pullop.rheads)
1031 elif pullop.heads is None:
1034 elif pullop.heads is None:
1032 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1035 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1033 elif not pullop.remote.capable('changegroupsubset'):
1036 elif not pullop.remote.capable('changegroupsubset'):
1034 raise util.Abort(_("partial pull cannot be done because "
1037 raise util.Abort(_("partial pull cannot be done because "
1035 "other repository doesn't support "
1038 "other repository doesn't support "
1036 "changegroupsubset."))
1039 "changegroupsubset."))
1037 else:
1040 else:
1038 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1041 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1039 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1042 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1040 pullop.remote.url())
1043 pullop.remote.url())
1041
1044
1042 def _pullphase(pullop):
1045 def _pullphase(pullop):
1043 # Get remote phases data from remote
1046 # Get remote phases data from remote
1044 if 'phases' in pullop.stepsdone:
1047 if 'phases' in pullop.stepsdone:
1045 return
1048 return
1046 remotephases = pullop.remote.listkeys('phases')
1049 remotephases = pullop.remote.listkeys('phases')
1047 _pullapplyphases(pullop, remotephases)
1050 _pullapplyphases(pullop, remotephases)
1048
1051
1049 def _pullapplyphases(pullop, remotephases):
1052 def _pullapplyphases(pullop, remotephases):
1050 """apply phase movement from observed remote state"""
1053 """apply phase movement from observed remote state"""
1051 if 'phases' in pullop.stepsdone:
1054 if 'phases' in pullop.stepsdone:
1052 return
1055 return
1053 pullop.stepsdone.add('phases')
1056 pullop.stepsdone.add('phases')
1054 publishing = bool(remotephases.get('publishing', False))
1057 publishing = bool(remotephases.get('publishing', False))
1055 if remotephases and not publishing:
1058 if remotephases and not publishing:
1056 # remote is new and unpublishing
1059 # remote is new and unpublishing
1057 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1060 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1058 pullop.pulledsubset,
1061 pullop.pulledsubset,
1059 remotephases)
1062 remotephases)
1060 dheads = pullop.pulledsubset
1063 dheads = pullop.pulledsubset
1061 else:
1064 else:
1062 # Remote is old or publishing all common changesets
1065 # Remote is old or publishing all common changesets
1063 # should be seen as public
1066 # should be seen as public
1064 pheads = pullop.pulledsubset
1067 pheads = pullop.pulledsubset
1065 dheads = []
1068 dheads = []
1066 unfi = pullop.repo.unfiltered()
1069 unfi = pullop.repo.unfiltered()
1067 phase = unfi._phasecache.phase
1070 phase = unfi._phasecache.phase
1068 rev = unfi.changelog.nodemap.get
1071 rev = unfi.changelog.nodemap.get
1069 public = phases.public
1072 public = phases.public
1070 draft = phases.draft
1073 draft = phases.draft
1071
1074
1072 # exclude changesets already public locally and update the others
1075 # exclude changesets already public locally and update the others
1073 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1076 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1074 if pheads:
1077 if pheads:
1075 tr = pullop.gettransaction()
1078 tr = pullop.gettransaction()
1076 phases.advanceboundary(pullop.repo, tr, public, pheads)
1079 phases.advanceboundary(pullop.repo, tr, public, pheads)
1077
1080
1078 # exclude changesets already draft locally and update the others
1081 # exclude changesets already draft locally and update the others
1079 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1082 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1080 if dheads:
1083 if dheads:
1081 tr = pullop.gettransaction()
1084 tr = pullop.gettransaction()
1082 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1085 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1083
1086
1084 def _pullbookmarks(pullop):
1087 def _pullbookmarks(pullop):
1085 """process the remote bookmark information to update the local one"""
1088 """process the remote bookmark information to update the local one"""
1086 if 'bookmarks' in pullop.stepsdone:
1089 if 'bookmarks' in pullop.stepsdone:
1087 return
1090 return
1088 pullop.stepsdone.add('bookmarks')
1091 pullop.stepsdone.add('bookmarks')
1089 repo = pullop.repo
1092 repo = pullop.repo
1090 remotebookmarks = pullop.remotebookmarks
1093 remotebookmarks = pullop.remotebookmarks
1091 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1094 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1092 pullop.remote.url(),
1095 pullop.remote.url(),
1093 pullop.gettransaction,
1096 pullop.gettransaction,
1094 explicit=pullop.explicitbookmarks)
1097 explicit=pullop.explicitbookmarks)
1095
1098
1096 def _pullobsolete(pullop):
1099 def _pullobsolete(pullop):
1097 """utility function to pull obsolete markers from a remote
1100 """utility function to pull obsolete markers from a remote
1098
1101
1099 The `gettransaction` is function that return the pull transaction, creating
1102 The `gettransaction` is function that return the pull transaction, creating
1100 one if necessary. We return the transaction to inform the calling code that
1103 one if necessary. We return the transaction to inform the calling code that
1101 a new transaction have been created (when applicable).
1104 a new transaction have been created (when applicable).
1102
1105
1103 Exists mostly to allow overriding for experimentation purpose"""
1106 Exists mostly to allow overriding for experimentation purpose"""
1104 if 'obsmarkers' in pullop.stepsdone:
1107 if 'obsmarkers' in pullop.stepsdone:
1105 return
1108 return
1106 pullop.stepsdone.add('obsmarkers')
1109 pullop.stepsdone.add('obsmarkers')
1107 tr = None
1110 tr = None
1108 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1111 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1109 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1112 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1110 remoteobs = pullop.remote.listkeys('obsolete')
1113 remoteobs = pullop.remote.listkeys('obsolete')
1111 if 'dump0' in remoteobs:
1114 if 'dump0' in remoteobs:
1112 tr = pullop.gettransaction()
1115 tr = pullop.gettransaction()
1113 for key in sorted(remoteobs, reverse=True):
1116 for key in sorted(remoteobs, reverse=True):
1114 if key.startswith('dump'):
1117 if key.startswith('dump'):
1115 data = base85.b85decode(remoteobs[key])
1118 data = base85.b85decode(remoteobs[key])
1116 pullop.repo.obsstore.mergemarkers(tr, data)
1119 pullop.repo.obsstore.mergemarkers(tr, data)
1117 pullop.repo.invalidatevolatilesets()
1120 pullop.repo.invalidatevolatilesets()
1118 return tr
1121 return tr
1119
1122
1120 def caps20to10(repo):
1123 def caps20to10(repo):
1121 """return a set with appropriate options to use bundle20 during getbundle"""
1124 """return a set with appropriate options to use bundle20 during getbundle"""
1122 caps = set(['HG20'])
1125 caps = set(['HG20'])
1123 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1126 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1124 caps.add('bundle2=' + urllib.quote(capsblob))
1127 caps.add('bundle2=' + urllib.quote(capsblob))
1125 return caps
1128 return caps
1126
1129
1127 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1130 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1128 getbundle2partsorder = []
1131 getbundle2partsorder = []
1129
1132
1130 # Mapping between step name and function
1133 # Mapping between step name and function
1131 #
1134 #
1132 # This exists to help extensions wrap steps if necessary
1135 # This exists to help extensions wrap steps if necessary
1133 getbundle2partsmapping = {}
1136 getbundle2partsmapping = {}
1134
1137
1135 def getbundle2partsgenerator(stepname):
1138 def getbundle2partsgenerator(stepname):
1136 """decorator for function generating bundle2 part for getbundle
1139 """decorator for function generating bundle2 part for getbundle
1137
1140
1138 The function is added to the step -> function mapping and appended to the
1141 The function is added to the step -> function mapping and appended to the
1139 list of steps. Beware that decorated functions will be added in order
1142 list of steps. Beware that decorated functions will be added in order
1140 (this may matter).
1143 (this may matter).
1141
1144
1142 You can only use this decorator for new steps, if you want to wrap a step
1145 You can only use this decorator for new steps, if you want to wrap a step
1143 from an extension, attack the getbundle2partsmapping dictionary directly."""
1146 from an extension, attack the getbundle2partsmapping dictionary directly."""
1144 def dec(func):
1147 def dec(func):
1145 assert stepname not in getbundle2partsmapping
1148 assert stepname not in getbundle2partsmapping
1146 getbundle2partsmapping[stepname] = func
1149 getbundle2partsmapping[stepname] = func
1147 getbundle2partsorder.append(stepname)
1150 getbundle2partsorder.append(stepname)
1148 return func
1151 return func
1149 return dec
1152 return dec
1150
1153
1151 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1154 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1152 **kwargs):
1155 **kwargs):
1153 """return a full bundle (with potentially multiple kind of parts)
1156 """return a full bundle (with potentially multiple kind of parts)
1154
1157
1155 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1158 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1156 passed. For now, the bundle can contain only changegroup, but this will
1159 passed. For now, the bundle can contain only changegroup, but this will
1157 changes when more part type will be available for bundle2.
1160 changes when more part type will be available for bundle2.
1158
1161
1159 This is different from changegroup.getchangegroup that only returns an HG10
1162 This is different from changegroup.getchangegroup that only returns an HG10
1160 changegroup bundle. They may eventually get reunited in the future when we
1163 changegroup bundle. They may eventually get reunited in the future when we
1161 have a clearer idea of the API we what to query different data.
1164 have a clearer idea of the API we what to query different data.
1162
1165
1163 The implementation is at a very early stage and will get massive rework
1166 The implementation is at a very early stage and will get massive rework
1164 when the API of bundle is refined.
1167 when the API of bundle is refined.
1165 """
1168 """
1166 # bundle10 case
1169 # bundle10 case
1167 usebundle2 = False
1170 usebundle2 = False
1168 if bundlecaps is not None:
1171 if bundlecaps is not None:
1169 usebundle2 = util.any((cap.startswith('HG2') for cap in bundlecaps))
1172 usebundle2 = util.any((cap.startswith('HG2') for cap in bundlecaps))
1170 if not usebundle2:
1173 if not usebundle2:
1171 if bundlecaps and not kwargs.get('cg', True):
1174 if bundlecaps and not kwargs.get('cg', True):
1172 raise ValueError(_('request for bundle10 must include changegroup'))
1175 raise ValueError(_('request for bundle10 must include changegroup'))
1173
1176
1174 if kwargs:
1177 if kwargs:
1175 raise ValueError(_('unsupported getbundle arguments: %s')
1178 raise ValueError(_('unsupported getbundle arguments: %s')
1176 % ', '.join(sorted(kwargs.keys())))
1179 % ', '.join(sorted(kwargs.keys())))
1177 return changegroup.getchangegroup(repo, source, heads=heads,
1180 return changegroup.getchangegroup(repo, source, heads=heads,
1178 common=common, bundlecaps=bundlecaps)
1181 common=common, bundlecaps=bundlecaps)
1179
1182
1180 # bundle20 case
1183 # bundle20 case
1181 b2caps = {}
1184 b2caps = {}
1182 for bcaps in bundlecaps:
1185 for bcaps in bundlecaps:
1183 if bcaps.startswith('bundle2='):
1186 if bcaps.startswith('bundle2='):
1184 blob = urllib.unquote(bcaps[len('bundle2='):])
1187 blob = urllib.unquote(bcaps[len('bundle2='):])
1185 b2caps.update(bundle2.decodecaps(blob))
1188 b2caps.update(bundle2.decodecaps(blob))
1186 bundler = bundle2.bundle20(repo.ui, b2caps)
1189 bundler = bundle2.bundle20(repo.ui, b2caps)
1187
1190
1188 kwargs['heads'] = heads
1191 kwargs['heads'] = heads
1189 kwargs['common'] = common
1192 kwargs['common'] = common
1190
1193
1191 for name in getbundle2partsorder:
1194 for name in getbundle2partsorder:
1192 func = getbundle2partsmapping[name]
1195 func = getbundle2partsmapping[name]
1193 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1196 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1194 **kwargs)
1197 **kwargs)
1195
1198
1196 return util.chunkbuffer(bundler.getchunks())
1199 return util.chunkbuffer(bundler.getchunks())
1197
1200
1198 @getbundle2partsgenerator('changegroup')
1201 @getbundle2partsgenerator('changegroup')
1199 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1202 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1200 b2caps=None, heads=None, common=None, **kwargs):
1203 b2caps=None, heads=None, common=None, **kwargs):
1201 """add a changegroup part to the requested bundle"""
1204 """add a changegroup part to the requested bundle"""
1202 cg = None
1205 cg = None
1203 if kwargs.get('cg', True):
1206 if kwargs.get('cg', True):
1204 # build changegroup bundle here.
1207 # build changegroup bundle here.
1205 version = None
1208 version = None
1206 cgversions = b2caps.get('changegroup')
1209 cgversions = b2caps.get('changegroup')
1207 if not cgversions: # 3.1 and 3.2 ship with an empty value
1210 if not cgversions: # 3.1 and 3.2 ship with an empty value
1208 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1211 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1209 common=common,
1212 common=common,
1210 bundlecaps=bundlecaps)
1213 bundlecaps=bundlecaps)
1211 else:
1214 else:
1212 cgversions = [v for v in cgversions if v in changegroup.packermap]
1215 cgversions = [v for v in cgversions if v in changegroup.packermap]
1213 if not cgversions:
1216 if not cgversions:
1214 raise ValueError(_('no common changegroup version'))
1217 raise ValueError(_('no common changegroup version'))
1215 version = max(cgversions)
1218 version = max(cgversions)
1216 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1219 cg = changegroup.getchangegroupraw(repo, source, heads=heads,
1217 common=common,
1220 common=common,
1218 bundlecaps=bundlecaps,
1221 bundlecaps=bundlecaps,
1219 version=version)
1222 version=version)
1220
1223
1221 if cg:
1224 if cg:
1222 part = bundler.newpart('changegroup', data=cg)
1225 part = bundler.newpart('changegroup', data=cg)
1223 if version is not None:
1226 if version is not None:
1224 part.addparam('version', version)
1227 part.addparam('version', version)
1225
1228
1226 @getbundle2partsgenerator('listkeys')
1229 @getbundle2partsgenerator('listkeys')
1227 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1230 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1228 b2caps=None, **kwargs):
1231 b2caps=None, **kwargs):
1229 """add parts containing listkeys namespaces to the requested bundle"""
1232 """add parts containing listkeys namespaces to the requested bundle"""
1230 listkeys = kwargs.get('listkeys', ())
1233 listkeys = kwargs.get('listkeys', ())
1231 for namespace in listkeys:
1234 for namespace in listkeys:
1232 part = bundler.newpart('listkeys')
1235 part = bundler.newpart('listkeys')
1233 part.addparam('namespace', namespace)
1236 part.addparam('namespace', namespace)
1234 keys = repo.listkeys(namespace).items()
1237 keys = repo.listkeys(namespace).items()
1235 part.data = pushkey.encodekeys(keys)
1238 part.data = pushkey.encodekeys(keys)
1236
1239
1237 @getbundle2partsgenerator('obsmarkers')
1240 @getbundle2partsgenerator('obsmarkers')
1238 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1241 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1239 b2caps=None, heads=None, **kwargs):
1242 b2caps=None, heads=None, **kwargs):
1240 """add an obsolescence markers part to the requested bundle"""
1243 """add an obsolescence markers part to the requested bundle"""
1241 if kwargs.get('obsmarkers', False):
1244 if kwargs.get('obsmarkers', False):
1242 if heads is None:
1245 if heads is None:
1243 heads = repo.heads()
1246 heads = repo.heads()
1244 subset = [c.node() for c in repo.set('::%ln', heads)]
1247 subset = [c.node() for c in repo.set('::%ln', heads)]
1245 markers = repo.obsstore.relevantmarkers(subset)
1248 markers = repo.obsstore.relevantmarkers(subset)
1246 buildobsmarkerspart(bundler, markers)
1249 buildobsmarkerspart(bundler, markers)
1247
1250
1248 def check_heads(repo, their_heads, context):
1251 def check_heads(repo, their_heads, context):
1249 """check if the heads of a repo have been modified
1252 """check if the heads of a repo have been modified
1250
1253
1251 Used by peer for unbundling.
1254 Used by peer for unbundling.
1252 """
1255 """
1253 heads = repo.heads()
1256 heads = repo.heads()
1254 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1257 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1255 if not (their_heads == ['force'] or their_heads == heads or
1258 if not (their_heads == ['force'] or their_heads == heads or
1256 their_heads == ['hashed', heads_hash]):
1259 their_heads == ['hashed', heads_hash]):
1257 # someone else committed/pushed/unbundled while we
1260 # someone else committed/pushed/unbundled while we
1258 # were transferring data
1261 # were transferring data
1259 raise error.PushRaced('repository changed while %s - '
1262 raise error.PushRaced('repository changed while %s - '
1260 'please try again' % context)
1263 'please try again' % context)
1261
1264
1262 def unbundle(repo, cg, heads, source, url):
1265 def unbundle(repo, cg, heads, source, url):
1263 """Apply a bundle to a repo.
1266 """Apply a bundle to a repo.
1264
1267
1265 this function makes sure the repo is locked during the application and have
1268 this function makes sure the repo is locked during the application and have
1266 mechanism to check that no push race occurred between the creation of the
1269 mechanism to check that no push race occurred between the creation of the
1267 bundle and its application.
1270 bundle and its application.
1268
1271
1269 If the push was raced as PushRaced exception is raised."""
1272 If the push was raced as PushRaced exception is raised."""
1270 r = 0
1273 r = 0
1271 # need a transaction when processing a bundle2 stream
1274 # need a transaction when processing a bundle2 stream
1272 tr = None
1275 tr = None
1273 lock = repo.lock()
1276 lock = repo.lock()
1274 try:
1277 try:
1275 check_heads(repo, heads, 'uploading changes')
1278 check_heads(repo, heads, 'uploading changes')
1276 # push can proceed
1279 # push can proceed
1277 if util.safehasattr(cg, 'params'):
1280 if util.safehasattr(cg, 'params'):
1278 try:
1281 try:
1279 tr = repo.transaction('unbundle')
1282 tr = repo.transaction('unbundle')
1280 tr.hookargs['source'] = source
1283 tr.hookargs['source'] = source
1281 tr.hookargs['url'] = url
1284 tr.hookargs['url'] = url
1282 tr.hookargs['bundle2'] = '1'
1285 tr.hookargs['bundle2'] = '1'
1283 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1286 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1284 tr.close()
1287 tr.close()
1285 except Exception, exc:
1288 except Exception, exc:
1286 exc.duringunbundle2 = True
1289 exc.duringunbundle2 = True
1287 raise
1290 raise
1288 else:
1291 else:
1289 r = changegroup.addchangegroup(repo, cg, source, url)
1292 r = changegroup.addchangegroup(repo, cg, source, url)
1290 finally:
1293 finally:
1291 if tr is not None:
1294 if tr is not None:
1292 tr.release()
1295 tr.release()
1293 lock.release()
1296 lock.release()
1294 return r
1297 return r
General Comments 0
You need to be logged in to leave comments. Login now