##// END OF EJS Templates
bundle2-push: move changegroup push validation inside _pushb2ctx...
Pierre-Yves David -
r21903:48f61cfb default
parent child Browse files
Show More
@@ -1,811 +1,821
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # step already performed
64 # step already performed
65 # (used to check what steps have been already performed through bundle2)
65 # (used to check what steps have been already performed through bundle2)
66 self.stepsdone = set()
66 self.stepsdone = set()
67 # Integer version of the push result
67 # Integer version of the push result
68 # - None means nothing to push
68 # - None means nothing to push
69 # - 0 means HTTP error
69 # - 0 means HTTP error
70 # - 1 means we pushed and remote head count is unchanged *or*
70 # - 1 means we pushed and remote head count is unchanged *or*
71 # we have outgoing changesets but refused to push
71 # we have outgoing changesets but refused to push
72 # - other values as described by addchangegroup()
72 # - other values as described by addchangegroup()
73 self.ret = None
73 self.ret = None
74 # discover.outgoing object (contains common and outgoing data)
74 # discover.outgoing object (contains common and outgoing data)
75 self.outgoing = None
75 self.outgoing = None
76 # all remote heads before the push
76 # all remote heads before the push
77 self.remoteheads = None
77 self.remoteheads = None
78 # testable as a boolean indicating if any nodes are missing locally.
78 # testable as a boolean indicating if any nodes are missing locally.
79 self.incoming = None
79 self.incoming = None
80 # set of all heads common after changeset bundle push
80 # set of all heads common after changeset bundle push
81 self.commonheads = None
81 self.commonheads = None
82
82
83 def push(repo, remote, force=False, revs=None, newbranch=False):
83 def push(repo, remote, force=False, revs=None, newbranch=False):
84 '''Push outgoing changesets (limited by revs) from a local
84 '''Push outgoing changesets (limited by revs) from a local
85 repository to remote. Return an integer:
85 repository to remote. Return an integer:
86 - None means nothing to push
86 - None means nothing to push
87 - 0 means HTTP error
87 - 0 means HTTP error
88 - 1 means we pushed and remote head count is unchanged *or*
88 - 1 means we pushed and remote head count is unchanged *or*
89 we have outgoing changesets but refused to push
89 we have outgoing changesets but refused to push
90 - other values as described by addchangegroup()
90 - other values as described by addchangegroup()
91 '''
91 '''
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
93 if pushop.remote.local():
93 if pushop.remote.local():
94 missing = (set(pushop.repo.requirements)
94 missing = (set(pushop.repo.requirements)
95 - pushop.remote.local().supported)
95 - pushop.remote.local().supported)
96 if missing:
96 if missing:
97 msg = _("required features are not"
97 msg = _("required features are not"
98 " supported in the destination:"
98 " supported in the destination:"
99 " %s") % (', '.join(sorted(missing)))
99 " %s") % (', '.join(sorted(missing)))
100 raise util.Abort(msg)
100 raise util.Abort(msg)
101
101
102 # there are two ways to push to remote repo:
102 # there are two ways to push to remote repo:
103 #
103 #
104 # addchangegroup assumes local user can lock remote
104 # addchangegroup assumes local user can lock remote
105 # repo (local filesystem, old ssh servers).
105 # repo (local filesystem, old ssh servers).
106 #
106 #
107 # unbundle assumes local user cannot lock remote repo (new ssh
107 # unbundle assumes local user cannot lock remote repo (new ssh
108 # servers, http servers).
108 # servers, http servers).
109
109
110 if not pushop.remote.canpush():
110 if not pushop.remote.canpush():
111 raise util.Abort(_("destination does not support push"))
111 raise util.Abort(_("destination does not support push"))
112 # get local lock as we might write phase data
112 # get local lock as we might write phase data
113 locallock = None
113 locallock = None
114 try:
114 try:
115 locallock = pushop.repo.lock()
115 locallock = pushop.repo.lock()
116 pushop.locallocked = True
116 pushop.locallocked = True
117 except IOError, err:
117 except IOError, err:
118 pushop.locallocked = False
118 pushop.locallocked = False
119 if err.errno != errno.EACCES:
119 if err.errno != errno.EACCES:
120 raise
120 raise
121 # source repo cannot be locked.
121 # source repo cannot be locked.
122 # We do not abort the push, but just disable the local phase
122 # We do not abort the push, but just disable the local phase
123 # synchronisation.
123 # synchronisation.
124 msg = 'cannot lock source repository: %s\n' % err
124 msg = 'cannot lock source repository: %s\n' % err
125 pushop.ui.debug(msg)
125 pushop.ui.debug(msg)
126 try:
126 try:
127 pushop.repo.checkpush(pushop)
127 pushop.repo.checkpush(pushop)
128 lock = None
128 lock = None
129 unbundle = pushop.remote.capable('unbundle')
129 unbundle = pushop.remote.capable('unbundle')
130 if not unbundle:
130 if not unbundle:
131 lock = pushop.remote.lock()
131 lock = pushop.remote.lock()
132 try:
132 try:
133 _pushdiscovery(pushop)
133 _pushdiscovery(pushop)
134 if _pushcheckoutgoing(pushop):
134 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 pushop.repo.prepushoutgoinghooks(pushop.repo,
135 False)
136 pushop.remote,
136 and pushop.remote.capable('bundle2-exp')):
137 pushop.outgoing)
137 _pushbundle2(pushop)
138 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
138 _pushchangeset(pushop)
139 False)
140 and pushop.remote.capable('bundle2-exp')):
141 _pushbundle2(pushop)
142 _pushchangeset(pushop)
143 _pushcomputecommonheads(pushop)
139 _pushcomputecommonheads(pushop)
144 _pushsyncphase(pushop)
140 _pushsyncphase(pushop)
145 _pushobsolete(pushop)
141 _pushobsolete(pushop)
146 finally:
142 finally:
147 if lock is not None:
143 if lock is not None:
148 lock.release()
144 lock.release()
149 finally:
145 finally:
150 if locallock is not None:
146 if locallock is not None:
151 locallock.release()
147 locallock.release()
152
148
153 _pushbookmark(pushop)
149 _pushbookmark(pushop)
154 return pushop.ret
150 return pushop.ret
155
151
156 def _pushdiscovery(pushop):
152 def _pushdiscovery(pushop):
157 # discovery
153 # discovery
158 unfi = pushop.repo.unfiltered()
154 unfi = pushop.repo.unfiltered()
159 fci = discovery.findcommonincoming
155 fci = discovery.findcommonincoming
160 commoninc = fci(unfi, pushop.remote, force=pushop.force)
156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
161 common, inc, remoteheads = commoninc
157 common, inc, remoteheads = commoninc
162 fco = discovery.findcommonoutgoing
158 fco = discovery.findcommonoutgoing
163 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
164 commoninc=commoninc, force=pushop.force)
160 commoninc=commoninc, force=pushop.force)
165 pushop.outgoing = outgoing
161 pushop.outgoing = outgoing
166 pushop.remoteheads = remoteheads
162 pushop.remoteheads = remoteheads
167 pushop.incoming = inc
163 pushop.incoming = inc
168
164
169 def _pushcheckoutgoing(pushop):
165 def _pushcheckoutgoing(pushop):
170 outgoing = pushop.outgoing
166 outgoing = pushop.outgoing
171 unfi = pushop.repo.unfiltered()
167 unfi = pushop.repo.unfiltered()
172 if not outgoing.missing:
168 if not outgoing.missing:
173 # nothing to push
169 # nothing to push
174 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
175 return False
171 return False
176 # something to push
172 # something to push
177 if not pushop.force:
173 if not pushop.force:
178 # if repo.obsstore == False --> no obsolete
174 # if repo.obsstore == False --> no obsolete
179 # then, save the iteration
175 # then, save the iteration
180 if unfi.obsstore:
176 if unfi.obsstore:
181 # this message are here for 80 char limit reason
177 # this message are here for 80 char limit reason
182 mso = _("push includes obsolete changeset: %s!")
178 mso = _("push includes obsolete changeset: %s!")
183 mst = "push includes %s changeset: %s!"
179 mst = "push includes %s changeset: %s!"
184 # plain versions for i18n tool to detect them
180 # plain versions for i18n tool to detect them
185 _("push includes unstable changeset: %s!")
181 _("push includes unstable changeset: %s!")
186 _("push includes bumped changeset: %s!")
182 _("push includes bumped changeset: %s!")
187 _("push includes divergent changeset: %s!")
183 _("push includes divergent changeset: %s!")
188 # If we are to push if there is at least one
184 # If we are to push if there is at least one
189 # obsolete or unstable changeset in missing, at
185 # obsolete or unstable changeset in missing, at
190 # least one of the missinghead will be obsolete or
186 # least one of the missinghead will be obsolete or
191 # unstable. So checking heads only is ok
187 # unstable. So checking heads only is ok
192 for node in outgoing.missingheads:
188 for node in outgoing.missingheads:
193 ctx = unfi[node]
189 ctx = unfi[node]
194 if ctx.obsolete():
190 if ctx.obsolete():
195 raise util.Abort(mso % ctx)
191 raise util.Abort(mso % ctx)
196 elif ctx.troubled():
192 elif ctx.troubled():
197 raise util.Abort(_(mst)
193 raise util.Abort(_(mst)
198 % (ctx.troubles()[0],
194 % (ctx.troubles()[0],
199 ctx))
195 ctx))
200 newbm = pushop.ui.configlist('bookmarks', 'pushing')
196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
201 discovery.checkheads(unfi, pushop.remote, outgoing,
197 discovery.checkheads(unfi, pushop.remote, outgoing,
202 pushop.remoteheads,
198 pushop.remoteheads,
203 pushop.newbranch,
199 pushop.newbranch,
204 bool(pushop.incoming),
200 bool(pushop.incoming),
205 newbm)
201 newbm)
206 return True
202 return True
207
203
208 def _pushb2ctx(pushop, bundler):
204 def _pushb2ctx(pushop, bundler):
209 """handle changegroup push through bundle2
205 """handle changegroup push through bundle2
210
206
211 addchangegroup result is stored in the ``pushop.ret`` attribute.
207 addchangegroup result is stored in the ``pushop.ret`` attribute.
212 """
208 """
213 if 'changesets' in pushop.stepsdone:
209 if 'changesets' in pushop.stepsdone:
214 return
210 return
215 pushop.stepsdone.add('changesets')
211 pushop.stepsdone.add('changesets')
216 # Send known heads to the server for race detection.
212 # Send known heads to the server for race detection.
213 pushop.stepsdone.add('changesets')
214 if not _pushcheckoutgoing(pushop):
215 return
216 pushop.repo.prepushoutgoinghooks(pushop.repo,
217 pushop.remote,
218 pushop.outgoing)
217 if not pushop.force:
219 if not pushop.force:
218 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
220 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
219 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
221 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
220 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
222 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
221 def handlereply(op):
223 def handlereply(op):
222 """extract addchangroup returns from server reply"""
224 """extract addchangroup returns from server reply"""
223 cgreplies = op.records.getreplies(cgpart.id)
225 cgreplies = op.records.getreplies(cgpart.id)
224 assert len(cgreplies['changegroup']) == 1
226 assert len(cgreplies['changegroup']) == 1
225 pushop.ret = cgreplies['changegroup'][0]['return']
227 pushop.ret = cgreplies['changegroup'][0]['return']
226 return handlereply
228 return handlereply
227
229
228 def _pushbundle2(pushop):
230 def _pushbundle2(pushop):
229 """push data to the remote using bundle2
231 """push data to the remote using bundle2
230
232
231 The only currently supported type of data is changegroup but this will
233 The only currently supported type of data is changegroup but this will
232 evolve in the future."""
234 evolve in the future."""
233 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
235 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
234 # create reply capability
236 # create reply capability
235 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
237 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
236 bundler.newpart('b2x:replycaps', data=capsblob)
238 bundler.newpart('b2x:replycaps', data=capsblob)
237 extrainfo = _pushbundle2extraparts(pushop, bundler)
239 extrainfo = _pushbundle2extraparts(pushop, bundler)
238 # add the changegroup bundle
240 # add the changegroup bundle
239 cgreplyhandler = _pushb2ctx(pushop, bundler)
241 cgreplyhandler = _pushb2ctx(pushop, bundler)
242 # do not push if no other parts than the capability
243 if bundler.nbparts <= 1:
244 return
240 stream = util.chunkbuffer(bundler.getchunks())
245 stream = util.chunkbuffer(bundler.getchunks())
241 try:
246 try:
242 reply = pushop.remote.unbundle(stream, ['force'], 'push')
247 reply = pushop.remote.unbundle(stream, ['force'], 'push')
243 except error.BundleValueError, exc:
248 except error.BundleValueError, exc:
244 raise util.Abort('missing support for %s' % exc)
249 raise util.Abort('missing support for %s' % exc)
245 try:
250 try:
246 op = bundle2.processbundle(pushop.repo, reply)
251 op = bundle2.processbundle(pushop.repo, reply)
247 except error.BundleValueError, exc:
252 except error.BundleValueError, exc:
248 raise util.Abort('missing support for %s' % exc)
253 raise util.Abort('missing support for %s' % exc)
249 cgreplyhandler(op)
254 cgreplyhandler(op)
250 _pushbundle2extrareply(pushop, op, extrainfo)
255 _pushbundle2extrareply(pushop, op, extrainfo)
251
256
252 def _pushbundle2extraparts(pushop, bundler):
257 def _pushbundle2extraparts(pushop, bundler):
253 """hook function to let extensions add parts
258 """hook function to let extensions add parts
254
259
255 Return a dict to let extensions pass data to the reply processing.
260 Return a dict to let extensions pass data to the reply processing.
256 """
261 """
257 return {}
262 return {}
258
263
259 def _pushbundle2extrareply(pushop, op, extrainfo):
264 def _pushbundle2extrareply(pushop, op, extrainfo):
260 """hook function to let extensions react to part replies
265 """hook function to let extensions react to part replies
261
266
262 The dict from _pushbundle2extrareply is fed to this function.
267 The dict from _pushbundle2extrareply is fed to this function.
263 """
268 """
264 pass
269 pass
265
270
266 def _pushchangeset(pushop):
271 def _pushchangeset(pushop):
267 """Make the actual push of changeset bundle to remote repo"""
272 """Make the actual push of changeset bundle to remote repo"""
268 if 'changesets' in pushop.stepsdone:
273 if 'changesets' in pushop.stepsdone:
269 return
274 return
270 pushop.stepsdone.add('changesets')
275 pushop.stepsdone.add('changesets')
276 if not _pushcheckoutgoing(pushop):
277 return
278 pushop.repo.prepushoutgoinghooks(pushop.repo,
279 pushop.remote,
280 pushop.outgoing)
271 outgoing = pushop.outgoing
281 outgoing = pushop.outgoing
272 unbundle = pushop.remote.capable('unbundle')
282 unbundle = pushop.remote.capable('unbundle')
273 # TODO: get bundlecaps from remote
283 # TODO: get bundlecaps from remote
274 bundlecaps = None
284 bundlecaps = None
275 # create a changegroup from local
285 # create a changegroup from local
276 if pushop.revs is None and not (outgoing.excluded
286 if pushop.revs is None and not (outgoing.excluded
277 or pushop.repo.changelog.filteredrevs):
287 or pushop.repo.changelog.filteredrevs):
278 # push everything,
288 # push everything,
279 # use the fast path, no race possible on push
289 # use the fast path, no race possible on push
280 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
290 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
281 cg = changegroup.getsubset(pushop.repo,
291 cg = changegroup.getsubset(pushop.repo,
282 outgoing,
292 outgoing,
283 bundler,
293 bundler,
284 'push',
294 'push',
285 fastpath=True)
295 fastpath=True)
286 else:
296 else:
287 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
297 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
288 bundlecaps)
298 bundlecaps)
289
299
290 # apply changegroup to remote
300 # apply changegroup to remote
291 if unbundle:
301 if unbundle:
292 # local repo finds heads on server, finds out what
302 # local repo finds heads on server, finds out what
293 # revs it must push. once revs transferred, if server
303 # revs it must push. once revs transferred, if server
294 # finds it has different heads (someone else won
304 # finds it has different heads (someone else won
295 # commit/push race), server aborts.
305 # commit/push race), server aborts.
296 if pushop.force:
306 if pushop.force:
297 remoteheads = ['force']
307 remoteheads = ['force']
298 else:
308 else:
299 remoteheads = pushop.remoteheads
309 remoteheads = pushop.remoteheads
300 # ssh: return remote's addchangegroup()
310 # ssh: return remote's addchangegroup()
301 # http: return remote's addchangegroup() or 0 for error
311 # http: return remote's addchangegroup() or 0 for error
302 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
312 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
303 pushop.repo.url())
313 pushop.repo.url())
304 else:
314 else:
305 # we return an integer indicating remote head count
315 # we return an integer indicating remote head count
306 # change
316 # change
307 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
317 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
308
318
309 def _pushcomputecommonheads(pushop):
319 def _pushcomputecommonheads(pushop):
310 unfi = pushop.repo.unfiltered()
320 unfi = pushop.repo.unfiltered()
311 if pushop.ret:
321 if pushop.ret:
312 # push succeed, synchronize target of the push
322 # push succeed, synchronize target of the push
313 cheads = pushop.outgoing.missingheads
323 cheads = pushop.outgoing.missingheads
314 elif pushop.revs is None:
324 elif pushop.revs is None:
315 # All out push fails. synchronize all common
325 # All out push fails. synchronize all common
316 cheads = pushop.outgoing.commonheads
326 cheads = pushop.outgoing.commonheads
317 else:
327 else:
318 # I want cheads = heads(::missingheads and ::commonheads)
328 # I want cheads = heads(::missingheads and ::commonheads)
319 # (missingheads is revs with secret changeset filtered out)
329 # (missingheads is revs with secret changeset filtered out)
320 #
330 #
321 # This can be expressed as:
331 # This can be expressed as:
322 # cheads = ( (missingheads and ::commonheads)
332 # cheads = ( (missingheads and ::commonheads)
323 # + (commonheads and ::missingheads))"
333 # + (commonheads and ::missingheads))"
324 # )
334 # )
325 #
335 #
326 # while trying to push we already computed the following:
336 # while trying to push we already computed the following:
327 # common = (::commonheads)
337 # common = (::commonheads)
328 # missing = ((commonheads::missingheads) - commonheads)
338 # missing = ((commonheads::missingheads) - commonheads)
329 #
339 #
330 # We can pick:
340 # We can pick:
331 # * missingheads part of common (::commonheads)
341 # * missingheads part of common (::commonheads)
332 common = set(pushop.outgoing.common)
342 common = set(pushop.outgoing.common)
333 nm = pushop.repo.changelog.nodemap
343 nm = pushop.repo.changelog.nodemap
334 cheads = [node for node in pushop.revs if nm[node] in common]
344 cheads = [node for node in pushop.revs if nm[node] in common]
335 # and
345 # and
336 # * commonheads parents on missing
346 # * commonheads parents on missing
337 revset = unfi.set('%ln and parents(roots(%ln))',
347 revset = unfi.set('%ln and parents(roots(%ln))',
338 pushop.outgoing.commonheads,
348 pushop.outgoing.commonheads,
339 pushop.outgoing.missing)
349 pushop.outgoing.missing)
340 cheads.extend(c.node() for c in revset)
350 cheads.extend(c.node() for c in revset)
341 pushop.commonheads = cheads
351 pushop.commonheads = cheads
342
352
343 def _pushsyncphase(pushop):
353 def _pushsyncphase(pushop):
344 """synchronise phase information locally and remotely"""
354 """synchronise phase information locally and remotely"""
345 unfi = pushop.repo.unfiltered()
355 unfi = pushop.repo.unfiltered()
346 cheads = pushop.commonheads
356 cheads = pushop.commonheads
347 # even when we don't push, exchanging phase data is useful
357 # even when we don't push, exchanging phase data is useful
348 remotephases = pushop.remote.listkeys('phases')
358 remotephases = pushop.remote.listkeys('phases')
349 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
359 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
350 and remotephases # server supports phases
360 and remotephases # server supports phases
351 and pushop.ret is None # nothing was pushed
361 and pushop.ret is None # nothing was pushed
352 and remotephases.get('publishing', False)):
362 and remotephases.get('publishing', False)):
353 # When:
363 # When:
354 # - this is a subrepo push
364 # - this is a subrepo push
355 # - and remote support phase
365 # - and remote support phase
356 # - and no changeset was pushed
366 # - and no changeset was pushed
357 # - and remote is publishing
367 # - and remote is publishing
358 # We may be in issue 3871 case!
368 # We may be in issue 3871 case!
359 # We drop the possible phase synchronisation done by
369 # We drop the possible phase synchronisation done by
360 # courtesy to publish changesets possibly locally draft
370 # courtesy to publish changesets possibly locally draft
361 # on the remote.
371 # on the remote.
362 remotephases = {'publishing': 'True'}
372 remotephases = {'publishing': 'True'}
363 if not remotephases: # old server or public only reply from non-publishing
373 if not remotephases: # old server or public only reply from non-publishing
364 _localphasemove(pushop, cheads)
374 _localphasemove(pushop, cheads)
365 # don't push any phase data as there is nothing to push
375 # don't push any phase data as there is nothing to push
366 else:
376 else:
367 ana = phases.analyzeremotephases(pushop.repo, cheads,
377 ana = phases.analyzeremotephases(pushop.repo, cheads,
368 remotephases)
378 remotephases)
369 pheads, droots = ana
379 pheads, droots = ana
370 ### Apply remote phase on local
380 ### Apply remote phase on local
371 if remotephases.get('publishing', False):
381 if remotephases.get('publishing', False):
372 _localphasemove(pushop, cheads)
382 _localphasemove(pushop, cheads)
373 else: # publish = False
383 else: # publish = False
374 _localphasemove(pushop, pheads)
384 _localphasemove(pushop, pheads)
375 _localphasemove(pushop, cheads, phases.draft)
385 _localphasemove(pushop, cheads, phases.draft)
376 ### Apply local phase on remote
386 ### Apply local phase on remote
377
387
378 # Get the list of all revs draft on remote by public here.
388 # Get the list of all revs draft on remote by public here.
379 # XXX Beware that revset break if droots is not strictly
389 # XXX Beware that revset break if droots is not strictly
380 # XXX root we may want to ensure it is but it is costly
390 # XXX root we may want to ensure it is but it is costly
381 outdated = unfi.set('heads((%ln::%ln) and public())',
391 outdated = unfi.set('heads((%ln::%ln) and public())',
382 droots, cheads)
392 droots, cheads)
383
393
384 b2caps = bundle2.bundle2caps(pushop.remote)
394 b2caps = bundle2.bundle2caps(pushop.remote)
385 if 'b2x:pushkey' in b2caps:
395 if 'b2x:pushkey' in b2caps:
386 # server supports bundle2, let's do a batched push through it
396 # server supports bundle2, let's do a batched push through it
387 #
397 #
388 # This will eventually be unified with the changesets bundle2 push
398 # This will eventually be unified with the changesets bundle2 push
389 bundler = bundle2.bundle20(pushop.ui, b2caps)
399 bundler = bundle2.bundle20(pushop.ui, b2caps)
390 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
400 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
391 bundler.newpart('b2x:replycaps', data=capsblob)
401 bundler.newpart('b2x:replycaps', data=capsblob)
392 part2node = []
402 part2node = []
393 enc = pushkey.encode
403 enc = pushkey.encode
394 for newremotehead in outdated:
404 for newremotehead in outdated:
395 part = bundler.newpart('b2x:pushkey')
405 part = bundler.newpart('b2x:pushkey')
396 part.addparam('namespace', enc('phases'))
406 part.addparam('namespace', enc('phases'))
397 part.addparam('key', enc(newremotehead.hex()))
407 part.addparam('key', enc(newremotehead.hex()))
398 part.addparam('old', enc(str(phases.draft)))
408 part.addparam('old', enc(str(phases.draft)))
399 part.addparam('new', enc(str(phases.public)))
409 part.addparam('new', enc(str(phases.public)))
400 part2node.append((part.id, newremotehead))
410 part2node.append((part.id, newremotehead))
401 stream = util.chunkbuffer(bundler.getchunks())
411 stream = util.chunkbuffer(bundler.getchunks())
402 try:
412 try:
403 reply = pushop.remote.unbundle(stream, ['force'], 'push')
413 reply = pushop.remote.unbundle(stream, ['force'], 'push')
404 op = bundle2.processbundle(pushop.repo, reply)
414 op = bundle2.processbundle(pushop.repo, reply)
405 except error.BundleValueError, exc:
415 except error.BundleValueError, exc:
406 raise util.Abort('missing support for %s' % exc)
416 raise util.Abort('missing support for %s' % exc)
407 for partid, node in part2node:
417 for partid, node in part2node:
408 partrep = op.records.getreplies(partid)
418 partrep = op.records.getreplies(partid)
409 results = partrep['pushkey']
419 results = partrep['pushkey']
410 assert len(results) <= 1
420 assert len(results) <= 1
411 msg = None
421 msg = None
412 if not results:
422 if not results:
413 msg = _('server ignored update of %s to public!\n') % node
423 msg = _('server ignored update of %s to public!\n') % node
414 elif not int(results[0]['return']):
424 elif not int(results[0]['return']):
415 msg = _('updating %s to public failed!\n') % node
425 msg = _('updating %s to public failed!\n') % node
416 if msg is not None:
426 if msg is not None:
417 pushop.ui.warn(msg)
427 pushop.ui.warn(msg)
418
428
419 else:
429 else:
420 # fallback to independant pushkey command
430 # fallback to independant pushkey command
421 for newremotehead in outdated:
431 for newremotehead in outdated:
422 r = pushop.remote.pushkey('phases',
432 r = pushop.remote.pushkey('phases',
423 newremotehead.hex(),
433 newremotehead.hex(),
424 str(phases.draft),
434 str(phases.draft),
425 str(phases.public))
435 str(phases.public))
426 if not r:
436 if not r:
427 pushop.ui.warn(_('updating %s to public failed!\n')
437 pushop.ui.warn(_('updating %s to public failed!\n')
428 % newremotehead)
438 % newremotehead)
429
439
430 def _localphasemove(pushop, nodes, phase=phases.public):
440 def _localphasemove(pushop, nodes, phase=phases.public):
431 """move <nodes> to <phase> in the local source repo"""
441 """move <nodes> to <phase> in the local source repo"""
432 if pushop.locallocked:
442 if pushop.locallocked:
433 phases.advanceboundary(pushop.repo, phase, nodes)
443 phases.advanceboundary(pushop.repo, phase, nodes)
434 else:
444 else:
435 # repo is not locked, do not change any phases!
445 # repo is not locked, do not change any phases!
436 # Informs the user that phases should have been moved when
446 # Informs the user that phases should have been moved when
437 # applicable.
447 # applicable.
438 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
448 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
439 phasestr = phases.phasenames[phase]
449 phasestr = phases.phasenames[phase]
440 if actualmoves:
450 if actualmoves:
441 pushop.ui.status(_('cannot lock source repo, skipping '
451 pushop.ui.status(_('cannot lock source repo, skipping '
442 'local %s phase update\n') % phasestr)
452 'local %s phase update\n') % phasestr)
443
453
444 def _pushobsolete(pushop):
454 def _pushobsolete(pushop):
445 """utility function to push obsolete markers to a remote"""
455 """utility function to push obsolete markers to a remote"""
446 pushop.ui.debug('try to push obsolete markers to remote\n')
456 pushop.ui.debug('try to push obsolete markers to remote\n')
447 repo = pushop.repo
457 repo = pushop.repo
448 remote = pushop.remote
458 remote = pushop.remote
449 if (obsolete._enabled and repo.obsstore and
459 if (obsolete._enabled and repo.obsstore and
450 'obsolete' in remote.listkeys('namespaces')):
460 'obsolete' in remote.listkeys('namespaces')):
451 rslts = []
461 rslts = []
452 remotedata = repo.listkeys('obsolete')
462 remotedata = repo.listkeys('obsolete')
453 for key in sorted(remotedata, reverse=True):
463 for key in sorted(remotedata, reverse=True):
454 # reverse sort to ensure we end with dump0
464 # reverse sort to ensure we end with dump0
455 data = remotedata[key]
465 data = remotedata[key]
456 rslts.append(remote.pushkey('obsolete', key, '', data))
466 rslts.append(remote.pushkey('obsolete', key, '', data))
457 if [r for r in rslts if not r]:
467 if [r for r in rslts if not r]:
458 msg = _('failed to push some obsolete markers!\n')
468 msg = _('failed to push some obsolete markers!\n')
459 repo.ui.warn(msg)
469 repo.ui.warn(msg)
460
470
461 def _pushbookmark(pushop):
471 def _pushbookmark(pushop):
462 """Update bookmark position on remote"""
472 """Update bookmark position on remote"""
463 ui = pushop.ui
473 ui = pushop.ui
464 repo = pushop.repo.unfiltered()
474 repo = pushop.repo.unfiltered()
465 remote = pushop.remote
475 remote = pushop.remote
466 ui.debug("checking for updated bookmarks\n")
476 ui.debug("checking for updated bookmarks\n")
467 revnums = map(repo.changelog.rev, pushop.revs or [])
477 revnums = map(repo.changelog.rev, pushop.revs or [])
468 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
478 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
469 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
479 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
470 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
480 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
471 srchex=hex)
481 srchex=hex)
472
482
473 for b, scid, dcid in advsrc:
483 for b, scid, dcid in advsrc:
474 if ancestors and repo[scid].rev() not in ancestors:
484 if ancestors and repo[scid].rev() not in ancestors:
475 continue
485 continue
476 if remote.pushkey('bookmarks', b, dcid, scid):
486 if remote.pushkey('bookmarks', b, dcid, scid):
477 ui.status(_("updating bookmark %s\n") % b)
487 ui.status(_("updating bookmark %s\n") % b)
478 else:
488 else:
479 ui.warn(_('updating bookmark %s failed!\n') % b)
489 ui.warn(_('updating bookmark %s failed!\n') % b)
480
490
481 class pulloperation(object):
491 class pulloperation(object):
482 """A object that represent a single pull operation
492 """A object that represent a single pull operation
483
493
484 It purpose is to carry push related state and very common operation.
494 It purpose is to carry push related state and very common operation.
485
495
486 A new should be created at the beginning of each pull and discarded
496 A new should be created at the beginning of each pull and discarded
487 afterward.
497 afterward.
488 """
498 """
489
499
490 def __init__(self, repo, remote, heads=None, force=False):
500 def __init__(self, repo, remote, heads=None, force=False):
491 # repo we pull into
501 # repo we pull into
492 self.repo = repo
502 self.repo = repo
493 # repo we pull from
503 # repo we pull from
494 self.remote = remote
504 self.remote = remote
495 # revision we try to pull (None is "all")
505 # revision we try to pull (None is "all")
496 self.heads = heads
506 self.heads = heads
497 # do we force pull?
507 # do we force pull?
498 self.force = force
508 self.force = force
499 # the name the pull transaction
509 # the name the pull transaction
500 self._trname = 'pull\n' + util.hidepassword(remote.url())
510 self._trname = 'pull\n' + util.hidepassword(remote.url())
501 # hold the transaction once created
511 # hold the transaction once created
502 self._tr = None
512 self._tr = None
503 # set of common changeset between local and remote before pull
513 # set of common changeset between local and remote before pull
504 self.common = None
514 self.common = None
505 # set of pulled head
515 # set of pulled head
506 self.rheads = None
516 self.rheads = None
507 # list of missing changeset to fetch remotely
517 # list of missing changeset to fetch remotely
508 self.fetch = None
518 self.fetch = None
509 # result of changegroup pulling (used as return code by pull)
519 # result of changegroup pulling (used as return code by pull)
510 self.cgresult = None
520 self.cgresult = None
511 # list of step remaining todo (related to future bundle2 usage)
521 # list of step remaining todo (related to future bundle2 usage)
512 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
522 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
513
523
514 @util.propertycache
524 @util.propertycache
515 def pulledsubset(self):
525 def pulledsubset(self):
516 """heads of the set of changeset target by the pull"""
526 """heads of the set of changeset target by the pull"""
517 # compute target subset
527 # compute target subset
518 if self.heads is None:
528 if self.heads is None:
519 # We pulled every thing possible
529 # We pulled every thing possible
520 # sync on everything common
530 # sync on everything common
521 c = set(self.common)
531 c = set(self.common)
522 ret = list(self.common)
532 ret = list(self.common)
523 for n in self.rheads:
533 for n in self.rheads:
524 if n not in c:
534 if n not in c:
525 ret.append(n)
535 ret.append(n)
526 return ret
536 return ret
527 else:
537 else:
528 # We pulled a specific subset
538 # We pulled a specific subset
529 # sync on this subset
539 # sync on this subset
530 return self.heads
540 return self.heads
531
541
532 def gettransaction(self):
542 def gettransaction(self):
533 """get appropriate pull transaction, creating it if needed"""
543 """get appropriate pull transaction, creating it if needed"""
534 if self._tr is None:
544 if self._tr is None:
535 self._tr = self.repo.transaction(self._trname)
545 self._tr = self.repo.transaction(self._trname)
536 return self._tr
546 return self._tr
537
547
538 def closetransaction(self):
548 def closetransaction(self):
539 """close transaction if created"""
549 """close transaction if created"""
540 if self._tr is not None:
550 if self._tr is not None:
541 self._tr.close()
551 self._tr.close()
542
552
543 def releasetransaction(self):
553 def releasetransaction(self):
544 """release transaction if created"""
554 """release transaction if created"""
545 if self._tr is not None:
555 if self._tr is not None:
546 self._tr.release()
556 self._tr.release()
547
557
548 def pull(repo, remote, heads=None, force=False):
558 def pull(repo, remote, heads=None, force=False):
549 pullop = pulloperation(repo, remote, heads, force)
559 pullop = pulloperation(repo, remote, heads, force)
550 if pullop.remote.local():
560 if pullop.remote.local():
551 missing = set(pullop.remote.requirements) - pullop.repo.supported
561 missing = set(pullop.remote.requirements) - pullop.repo.supported
552 if missing:
562 if missing:
553 msg = _("required features are not"
563 msg = _("required features are not"
554 " supported in the destination:"
564 " supported in the destination:"
555 " %s") % (', '.join(sorted(missing)))
565 " %s") % (', '.join(sorted(missing)))
556 raise util.Abort(msg)
566 raise util.Abort(msg)
557
567
558 lock = pullop.repo.lock()
568 lock = pullop.repo.lock()
559 try:
569 try:
560 _pulldiscovery(pullop)
570 _pulldiscovery(pullop)
561 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
571 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
562 and pullop.remote.capable('bundle2-exp')):
572 and pullop.remote.capable('bundle2-exp')):
563 _pullbundle2(pullop)
573 _pullbundle2(pullop)
564 if 'changegroup' in pullop.todosteps:
574 if 'changegroup' in pullop.todosteps:
565 _pullchangeset(pullop)
575 _pullchangeset(pullop)
566 if 'phases' in pullop.todosteps:
576 if 'phases' in pullop.todosteps:
567 _pullphase(pullop)
577 _pullphase(pullop)
568 if 'obsmarkers' in pullop.todosteps:
578 if 'obsmarkers' in pullop.todosteps:
569 _pullobsolete(pullop)
579 _pullobsolete(pullop)
570 pullop.closetransaction()
580 pullop.closetransaction()
571 finally:
581 finally:
572 pullop.releasetransaction()
582 pullop.releasetransaction()
573 lock.release()
583 lock.release()
574
584
575 return pullop.cgresult
585 return pullop.cgresult
576
586
577 def _pulldiscovery(pullop):
587 def _pulldiscovery(pullop):
578 """discovery phase for the pull
588 """discovery phase for the pull
579
589
580 Current handle changeset discovery only, will change handle all discovery
590 Current handle changeset discovery only, will change handle all discovery
581 at some point."""
591 at some point."""
582 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
592 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
583 pullop.remote,
593 pullop.remote,
584 heads=pullop.heads,
594 heads=pullop.heads,
585 force=pullop.force)
595 force=pullop.force)
586 pullop.common, pullop.fetch, pullop.rheads = tmp
596 pullop.common, pullop.fetch, pullop.rheads = tmp
587
597
588 def _pullbundle2(pullop):
598 def _pullbundle2(pullop):
589 """pull data using bundle2
599 """pull data using bundle2
590
600
591 For now, the only supported data are changegroup."""
601 For now, the only supported data are changegroup."""
592 remotecaps = bundle2.bundle2caps(pullop.remote)
602 remotecaps = bundle2.bundle2caps(pullop.remote)
593 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
603 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
594 # pulling changegroup
604 # pulling changegroup
595 pullop.todosteps.remove('changegroup')
605 pullop.todosteps.remove('changegroup')
596
606
597 kwargs['common'] = pullop.common
607 kwargs['common'] = pullop.common
598 kwargs['heads'] = pullop.heads or pullop.rheads
608 kwargs['heads'] = pullop.heads or pullop.rheads
599 if 'b2x:listkeys' in remotecaps:
609 if 'b2x:listkeys' in remotecaps:
600 kwargs['listkeys'] = ['phase']
610 kwargs['listkeys'] = ['phase']
601 if not pullop.fetch:
611 if not pullop.fetch:
602 pullop.repo.ui.status(_("no changes found\n"))
612 pullop.repo.ui.status(_("no changes found\n"))
603 pullop.cgresult = 0
613 pullop.cgresult = 0
604 else:
614 else:
605 if pullop.heads is None and list(pullop.common) == [nullid]:
615 if pullop.heads is None and list(pullop.common) == [nullid]:
606 pullop.repo.ui.status(_("requesting all changes\n"))
616 pullop.repo.ui.status(_("requesting all changes\n"))
607 _pullbundle2extraprepare(pullop, kwargs)
617 _pullbundle2extraprepare(pullop, kwargs)
608 if kwargs.keys() == ['format']:
618 if kwargs.keys() == ['format']:
609 return # nothing to pull
619 return # nothing to pull
610 bundle = pullop.remote.getbundle('pull', **kwargs)
620 bundle = pullop.remote.getbundle('pull', **kwargs)
611 try:
621 try:
612 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
622 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
613 except error.BundleValueError, exc:
623 except error.BundleValueError, exc:
614 raise util.Abort('missing support for %s' % exc)
624 raise util.Abort('missing support for %s' % exc)
615
625
616 if pullop.fetch:
626 if pullop.fetch:
617 assert len(op.records['changegroup']) == 1
627 assert len(op.records['changegroup']) == 1
618 pullop.cgresult = op.records['changegroup'][0]['return']
628 pullop.cgresult = op.records['changegroup'][0]['return']
619
629
620 # processing phases change
630 # processing phases change
621 for namespace, value in op.records['listkeys']:
631 for namespace, value in op.records['listkeys']:
622 if namespace == 'phases':
632 if namespace == 'phases':
623 _pullapplyphases(pullop, value)
633 _pullapplyphases(pullop, value)
624
634
625 def _pullbundle2extraprepare(pullop, kwargs):
635 def _pullbundle2extraprepare(pullop, kwargs):
626 """hook function so that extensions can extend the getbundle call"""
636 """hook function so that extensions can extend the getbundle call"""
627 pass
637 pass
628
638
629 def _pullchangeset(pullop):
639 def _pullchangeset(pullop):
630 """pull changeset from unbundle into the local repo"""
640 """pull changeset from unbundle into the local repo"""
631 # We delay the open of the transaction as late as possible so we
641 # We delay the open of the transaction as late as possible so we
632 # don't open transaction for nothing or you break future useful
642 # don't open transaction for nothing or you break future useful
633 # rollback call
643 # rollback call
634 pullop.todosteps.remove('changegroup')
644 pullop.todosteps.remove('changegroup')
635 if not pullop.fetch:
645 if not pullop.fetch:
636 pullop.repo.ui.status(_("no changes found\n"))
646 pullop.repo.ui.status(_("no changes found\n"))
637 pullop.cgresult = 0
647 pullop.cgresult = 0
638 return
648 return
639 pullop.gettransaction()
649 pullop.gettransaction()
640 if pullop.heads is None and list(pullop.common) == [nullid]:
650 if pullop.heads is None and list(pullop.common) == [nullid]:
641 pullop.repo.ui.status(_("requesting all changes\n"))
651 pullop.repo.ui.status(_("requesting all changes\n"))
642 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
652 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
643 # issue1320, avoid a race if remote changed after discovery
653 # issue1320, avoid a race if remote changed after discovery
644 pullop.heads = pullop.rheads
654 pullop.heads = pullop.rheads
645
655
646 if pullop.remote.capable('getbundle'):
656 if pullop.remote.capable('getbundle'):
647 # TODO: get bundlecaps from remote
657 # TODO: get bundlecaps from remote
648 cg = pullop.remote.getbundle('pull', common=pullop.common,
658 cg = pullop.remote.getbundle('pull', common=pullop.common,
649 heads=pullop.heads or pullop.rheads)
659 heads=pullop.heads or pullop.rheads)
650 elif pullop.heads is None:
660 elif pullop.heads is None:
651 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
661 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
652 elif not pullop.remote.capable('changegroupsubset'):
662 elif not pullop.remote.capable('changegroupsubset'):
653 raise util.Abort(_("partial pull cannot be done because "
663 raise util.Abort(_("partial pull cannot be done because "
654 "other repository doesn't support "
664 "other repository doesn't support "
655 "changegroupsubset."))
665 "changegroupsubset."))
656 else:
666 else:
657 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
667 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
658 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
668 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
659 pullop.remote.url())
669 pullop.remote.url())
660
670
661 def _pullphase(pullop):
671 def _pullphase(pullop):
662 # Get remote phases data from remote
672 # Get remote phases data from remote
663 remotephases = pullop.remote.listkeys('phases')
673 remotephases = pullop.remote.listkeys('phases')
664 _pullapplyphases(pullop, remotephases)
674 _pullapplyphases(pullop, remotephases)
665
675
666 def _pullapplyphases(pullop, remotephases):
676 def _pullapplyphases(pullop, remotephases):
667 """apply phase movement from observed remote state"""
677 """apply phase movement from observed remote state"""
668 pullop.todosteps.remove('phases')
678 pullop.todosteps.remove('phases')
669 publishing = bool(remotephases.get('publishing', False))
679 publishing = bool(remotephases.get('publishing', False))
670 if remotephases and not publishing:
680 if remotephases and not publishing:
671 # remote is new and unpublishing
681 # remote is new and unpublishing
672 pheads, _dr = phases.analyzeremotephases(pullop.repo,
682 pheads, _dr = phases.analyzeremotephases(pullop.repo,
673 pullop.pulledsubset,
683 pullop.pulledsubset,
674 remotephases)
684 remotephases)
675 phases.advanceboundary(pullop.repo, phases.public, pheads)
685 phases.advanceboundary(pullop.repo, phases.public, pheads)
676 phases.advanceboundary(pullop.repo, phases.draft,
686 phases.advanceboundary(pullop.repo, phases.draft,
677 pullop.pulledsubset)
687 pullop.pulledsubset)
678 else:
688 else:
679 # Remote is old or publishing all common changesets
689 # Remote is old or publishing all common changesets
680 # should be seen as public
690 # should be seen as public
681 phases.advanceboundary(pullop.repo, phases.public,
691 phases.advanceboundary(pullop.repo, phases.public,
682 pullop.pulledsubset)
692 pullop.pulledsubset)
683
693
684 def _pullobsolete(pullop):
694 def _pullobsolete(pullop):
685 """utility function to pull obsolete markers from a remote
695 """utility function to pull obsolete markers from a remote
686
696
687 The `gettransaction` is function that return the pull transaction, creating
697 The `gettransaction` is function that return the pull transaction, creating
688 one if necessary. We return the transaction to inform the calling code that
698 one if necessary. We return the transaction to inform the calling code that
689 a new transaction have been created (when applicable).
699 a new transaction have been created (when applicable).
690
700
691 Exists mostly to allow overriding for experimentation purpose"""
701 Exists mostly to allow overriding for experimentation purpose"""
692 pullop.todosteps.remove('obsmarkers')
702 pullop.todosteps.remove('obsmarkers')
693 tr = None
703 tr = None
694 if obsolete._enabled:
704 if obsolete._enabled:
695 pullop.repo.ui.debug('fetching remote obsolete markers\n')
705 pullop.repo.ui.debug('fetching remote obsolete markers\n')
696 remoteobs = pullop.remote.listkeys('obsolete')
706 remoteobs = pullop.remote.listkeys('obsolete')
697 if 'dump0' in remoteobs:
707 if 'dump0' in remoteobs:
698 tr = pullop.gettransaction()
708 tr = pullop.gettransaction()
699 for key in sorted(remoteobs, reverse=True):
709 for key in sorted(remoteobs, reverse=True):
700 if key.startswith('dump'):
710 if key.startswith('dump'):
701 data = base85.b85decode(remoteobs[key])
711 data = base85.b85decode(remoteobs[key])
702 pullop.repo.obsstore.mergemarkers(tr, data)
712 pullop.repo.obsstore.mergemarkers(tr, data)
703 pullop.repo.invalidatevolatilesets()
713 pullop.repo.invalidatevolatilesets()
704 return tr
714 return tr
705
715
706 def caps20to10(repo):
716 def caps20to10(repo):
707 """return a set with appropriate options to use bundle20 during getbundle"""
717 """return a set with appropriate options to use bundle20 during getbundle"""
708 caps = set(['HG2X'])
718 caps = set(['HG2X'])
709 capsblob = bundle2.encodecaps(repo.bundle2caps)
719 capsblob = bundle2.encodecaps(repo.bundle2caps)
710 caps.add('bundle2=' + urllib.quote(capsblob))
720 caps.add('bundle2=' + urllib.quote(capsblob))
711 return caps
721 return caps
712
722
713 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
723 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
714 **kwargs):
724 **kwargs):
715 """return a full bundle (with potentially multiple kind of parts)
725 """return a full bundle (with potentially multiple kind of parts)
716
726
717 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
727 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
718 passed. For now, the bundle can contain only changegroup, but this will
728 passed. For now, the bundle can contain only changegroup, but this will
719 changes when more part type will be available for bundle2.
729 changes when more part type will be available for bundle2.
720
730
721 This is different from changegroup.getbundle that only returns an HG10
731 This is different from changegroup.getbundle that only returns an HG10
722 changegroup bundle. They may eventually get reunited in the future when we
732 changegroup bundle. They may eventually get reunited in the future when we
723 have a clearer idea of the API we what to query different data.
733 have a clearer idea of the API we what to query different data.
724
734
725 The implementation is at a very early stage and will get massive rework
735 The implementation is at a very early stage and will get massive rework
726 when the API of bundle is refined.
736 when the API of bundle is refined.
727 """
737 """
728 # build changegroup bundle here.
738 # build changegroup bundle here.
729 cg = changegroup.getbundle(repo, source, heads=heads,
739 cg = changegroup.getbundle(repo, source, heads=heads,
730 common=common, bundlecaps=bundlecaps)
740 common=common, bundlecaps=bundlecaps)
731 if bundlecaps is None or 'HG2X' not in bundlecaps:
741 if bundlecaps is None or 'HG2X' not in bundlecaps:
732 if kwargs:
742 if kwargs:
733 raise ValueError(_('unsupported getbundle arguments: %s')
743 raise ValueError(_('unsupported getbundle arguments: %s')
734 % ', '.join(sorted(kwargs.keys())))
744 % ', '.join(sorted(kwargs.keys())))
735 return cg
745 return cg
736 # very crude first implementation,
746 # very crude first implementation,
737 # the bundle API will change and the generation will be done lazily.
747 # the bundle API will change and the generation will be done lazily.
738 b2caps = {}
748 b2caps = {}
739 for bcaps in bundlecaps:
749 for bcaps in bundlecaps:
740 if bcaps.startswith('bundle2='):
750 if bcaps.startswith('bundle2='):
741 blob = urllib.unquote(bcaps[len('bundle2='):])
751 blob = urllib.unquote(bcaps[len('bundle2='):])
742 b2caps.update(bundle2.decodecaps(blob))
752 b2caps.update(bundle2.decodecaps(blob))
743 bundler = bundle2.bundle20(repo.ui, b2caps)
753 bundler = bundle2.bundle20(repo.ui, b2caps)
744 if cg:
754 if cg:
745 bundler.newpart('b2x:changegroup', data=cg.getchunks())
755 bundler.newpart('b2x:changegroup', data=cg.getchunks())
746 listkeys = kwargs.get('listkeys', ())
756 listkeys = kwargs.get('listkeys', ())
747 for namespace in listkeys:
757 for namespace in listkeys:
748 part = bundler.newpart('b2x:listkeys')
758 part = bundler.newpart('b2x:listkeys')
749 part.addparam('namespace', namespace)
759 part.addparam('namespace', namespace)
750 keys = repo.listkeys(namespace).items()
760 keys = repo.listkeys(namespace).items()
751 part.data = pushkey.encodekeys(keys)
761 part.data = pushkey.encodekeys(keys)
752 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
762 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
753 bundlecaps=bundlecaps, **kwargs)
763 bundlecaps=bundlecaps, **kwargs)
754 return util.chunkbuffer(bundler.getchunks())
764 return util.chunkbuffer(bundler.getchunks())
755
765
756 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
766 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
757 bundlecaps=None, **kwargs):
767 bundlecaps=None, **kwargs):
758 """hook function to let extensions add parts to the requested bundle"""
768 """hook function to let extensions add parts to the requested bundle"""
759 pass
769 pass
760
770
761 def check_heads(repo, their_heads, context):
771 def check_heads(repo, their_heads, context):
762 """check if the heads of a repo have been modified
772 """check if the heads of a repo have been modified
763
773
764 Used by peer for unbundling.
774 Used by peer for unbundling.
765 """
775 """
766 heads = repo.heads()
776 heads = repo.heads()
767 heads_hash = util.sha1(''.join(sorted(heads))).digest()
777 heads_hash = util.sha1(''.join(sorted(heads))).digest()
768 if not (their_heads == ['force'] or their_heads == heads or
778 if not (their_heads == ['force'] or their_heads == heads or
769 their_heads == ['hashed', heads_hash]):
779 their_heads == ['hashed', heads_hash]):
770 # someone else committed/pushed/unbundled while we
780 # someone else committed/pushed/unbundled while we
771 # were transferring data
781 # were transferring data
772 raise error.PushRaced('repository changed while %s - '
782 raise error.PushRaced('repository changed while %s - '
773 'please try again' % context)
783 'please try again' % context)
774
784
775 def unbundle(repo, cg, heads, source, url):
785 def unbundle(repo, cg, heads, source, url):
776 """Apply a bundle to a repo.
786 """Apply a bundle to a repo.
777
787
778 this function makes sure the repo is locked during the application and have
788 this function makes sure the repo is locked during the application and have
779 mechanism to check that no push race occurred between the creation of the
789 mechanism to check that no push race occurred between the creation of the
780 bundle and its application.
790 bundle and its application.
781
791
782 If the push was raced as PushRaced exception is raised."""
792 If the push was raced as PushRaced exception is raised."""
783 r = 0
793 r = 0
784 # need a transaction when processing a bundle2 stream
794 # need a transaction when processing a bundle2 stream
785 tr = None
795 tr = None
786 lock = repo.lock()
796 lock = repo.lock()
787 try:
797 try:
788 check_heads(repo, heads, 'uploading changes')
798 check_heads(repo, heads, 'uploading changes')
789 # push can proceed
799 # push can proceed
790 if util.safehasattr(cg, 'params'):
800 if util.safehasattr(cg, 'params'):
791 try:
801 try:
792 tr = repo.transaction('unbundle')
802 tr = repo.transaction('unbundle')
793 tr.hookargs['bundle2-exp'] = '1'
803 tr.hookargs['bundle2-exp'] = '1'
794 r = bundle2.processbundle(repo, cg, lambda: tr).reply
804 r = bundle2.processbundle(repo, cg, lambda: tr).reply
795 cl = repo.unfiltered().changelog
805 cl = repo.unfiltered().changelog
796 p = cl.writepending() and repo.root or ""
806 p = cl.writepending() and repo.root or ""
797 repo.hook('b2x-pretransactionclose', throw=True, source=source,
807 repo.hook('b2x-pretransactionclose', throw=True, source=source,
798 url=url, pending=p, **tr.hookargs)
808 url=url, pending=p, **tr.hookargs)
799 tr.close()
809 tr.close()
800 repo.hook('b2x-transactionclose', source=source, url=url,
810 repo.hook('b2x-transactionclose', source=source, url=url,
801 **tr.hookargs)
811 **tr.hookargs)
802 except Exception, exc:
812 except Exception, exc:
803 exc.duringunbundle2 = True
813 exc.duringunbundle2 = True
804 raise
814 raise
805 else:
815 else:
806 r = changegroup.addchangegroup(repo, cg, source, url)
816 r = changegroup.addchangegroup(repo, cg, source, url)
807 finally:
817 finally:
808 if tr is not None:
818 if tr is not None:
809 tr.release()
819 tr.release()
810 lock.release()
820 lock.release()
811 return r
821 return r
General Comments 0
You need to be logged in to leave comments. Login now