##// END OF EJS Templates
bundle2-push: introduce a list of part generating functions...
Pierre-Yves David -
r21904:5fbccbcc default
parent child Browse files
Show More
@@ -1,821 +1,827
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # step already performed
64 # step already performed
65 # (used to check what steps have been already performed through bundle2)
65 # (used to check what steps have been already performed through bundle2)
66 self.stepsdone = set()
66 self.stepsdone = set()
67 # Integer version of the push result
67 # Integer version of the push result
68 # - None means nothing to push
68 # - None means nothing to push
69 # - 0 means HTTP error
69 # - 0 means HTTP error
70 # - 1 means we pushed and remote head count is unchanged *or*
70 # - 1 means we pushed and remote head count is unchanged *or*
71 # we have outgoing changesets but refused to push
71 # we have outgoing changesets but refused to push
72 # - other values as described by addchangegroup()
72 # - other values as described by addchangegroup()
73 self.ret = None
73 self.ret = None
74 # discover.outgoing object (contains common and outgoing data)
74 # discover.outgoing object (contains common and outgoing data)
75 self.outgoing = None
75 self.outgoing = None
76 # all remote heads before the push
76 # all remote heads before the push
77 self.remoteheads = None
77 self.remoteheads = None
78 # testable as a boolean indicating if any nodes are missing locally.
78 # testable as a boolean indicating if any nodes are missing locally.
79 self.incoming = None
79 self.incoming = None
80 # set of all heads common after changeset bundle push
80 # set of all heads common after changeset bundle push
81 self.commonheads = None
81 self.commonheads = None
82
82
83 def push(repo, remote, force=False, revs=None, newbranch=False):
83 def push(repo, remote, force=False, revs=None, newbranch=False):
84 '''Push outgoing changesets (limited by revs) from a local
84 '''Push outgoing changesets (limited by revs) from a local
85 repository to remote. Return an integer:
85 repository to remote. Return an integer:
86 - None means nothing to push
86 - None means nothing to push
87 - 0 means HTTP error
87 - 0 means HTTP error
88 - 1 means we pushed and remote head count is unchanged *or*
88 - 1 means we pushed and remote head count is unchanged *or*
89 we have outgoing changesets but refused to push
89 we have outgoing changesets but refused to push
90 - other values as described by addchangegroup()
90 - other values as described by addchangegroup()
91 '''
91 '''
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
93 if pushop.remote.local():
93 if pushop.remote.local():
94 missing = (set(pushop.repo.requirements)
94 missing = (set(pushop.repo.requirements)
95 - pushop.remote.local().supported)
95 - pushop.remote.local().supported)
96 if missing:
96 if missing:
97 msg = _("required features are not"
97 msg = _("required features are not"
98 " supported in the destination:"
98 " supported in the destination:"
99 " %s") % (', '.join(sorted(missing)))
99 " %s") % (', '.join(sorted(missing)))
100 raise util.Abort(msg)
100 raise util.Abort(msg)
101
101
102 # there are two ways to push to remote repo:
102 # there are two ways to push to remote repo:
103 #
103 #
104 # addchangegroup assumes local user can lock remote
104 # addchangegroup assumes local user can lock remote
105 # repo (local filesystem, old ssh servers).
105 # repo (local filesystem, old ssh servers).
106 #
106 #
107 # unbundle assumes local user cannot lock remote repo (new ssh
107 # unbundle assumes local user cannot lock remote repo (new ssh
108 # servers, http servers).
108 # servers, http servers).
109
109
110 if not pushop.remote.canpush():
110 if not pushop.remote.canpush():
111 raise util.Abort(_("destination does not support push"))
111 raise util.Abort(_("destination does not support push"))
112 # get local lock as we might write phase data
112 # get local lock as we might write phase data
113 locallock = None
113 locallock = None
114 try:
114 try:
115 locallock = pushop.repo.lock()
115 locallock = pushop.repo.lock()
116 pushop.locallocked = True
116 pushop.locallocked = True
117 except IOError, err:
117 except IOError, err:
118 pushop.locallocked = False
118 pushop.locallocked = False
119 if err.errno != errno.EACCES:
119 if err.errno != errno.EACCES:
120 raise
120 raise
121 # source repo cannot be locked.
121 # source repo cannot be locked.
122 # We do not abort the push, but just disable the local phase
122 # We do not abort the push, but just disable the local phase
123 # synchronisation.
123 # synchronisation.
124 msg = 'cannot lock source repository: %s\n' % err
124 msg = 'cannot lock source repository: %s\n' % err
125 pushop.ui.debug(msg)
125 pushop.ui.debug(msg)
126 try:
126 try:
127 pushop.repo.checkpush(pushop)
127 pushop.repo.checkpush(pushop)
128 lock = None
128 lock = None
129 unbundle = pushop.remote.capable('unbundle')
129 unbundle = pushop.remote.capable('unbundle')
130 if not unbundle:
130 if not unbundle:
131 lock = pushop.remote.lock()
131 lock = pushop.remote.lock()
132 try:
132 try:
133 _pushdiscovery(pushop)
133 _pushdiscovery(pushop)
134 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
134 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 False)
135 False)
136 and pushop.remote.capable('bundle2-exp')):
136 and pushop.remote.capable('bundle2-exp')):
137 _pushbundle2(pushop)
137 _pushbundle2(pushop)
138 _pushchangeset(pushop)
138 _pushchangeset(pushop)
139 _pushcomputecommonheads(pushop)
139 _pushcomputecommonheads(pushop)
140 _pushsyncphase(pushop)
140 _pushsyncphase(pushop)
141 _pushobsolete(pushop)
141 _pushobsolete(pushop)
142 finally:
142 finally:
143 if lock is not None:
143 if lock is not None:
144 lock.release()
144 lock.release()
145 finally:
145 finally:
146 if locallock is not None:
146 if locallock is not None:
147 locallock.release()
147 locallock.release()
148
148
149 _pushbookmark(pushop)
149 _pushbookmark(pushop)
150 return pushop.ret
150 return pushop.ret
151
151
152 def _pushdiscovery(pushop):
152 def _pushdiscovery(pushop):
153 # discovery
153 # discovery
154 unfi = pushop.repo.unfiltered()
154 unfi = pushop.repo.unfiltered()
155 fci = discovery.findcommonincoming
155 fci = discovery.findcommonincoming
156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
157 common, inc, remoteheads = commoninc
157 common, inc, remoteheads = commoninc
158 fco = discovery.findcommonoutgoing
158 fco = discovery.findcommonoutgoing
159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
160 commoninc=commoninc, force=pushop.force)
160 commoninc=commoninc, force=pushop.force)
161 pushop.outgoing = outgoing
161 pushop.outgoing = outgoing
162 pushop.remoteheads = remoteheads
162 pushop.remoteheads = remoteheads
163 pushop.incoming = inc
163 pushop.incoming = inc
164
164
165 def _pushcheckoutgoing(pushop):
165 def _pushcheckoutgoing(pushop):
166 outgoing = pushop.outgoing
166 outgoing = pushop.outgoing
167 unfi = pushop.repo.unfiltered()
167 unfi = pushop.repo.unfiltered()
168 if not outgoing.missing:
168 if not outgoing.missing:
169 # nothing to push
169 # nothing to push
170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
171 return False
171 return False
172 # something to push
172 # something to push
173 if not pushop.force:
173 if not pushop.force:
174 # if repo.obsstore == False --> no obsolete
174 # if repo.obsstore == False --> no obsolete
175 # then, save the iteration
175 # then, save the iteration
176 if unfi.obsstore:
176 if unfi.obsstore:
177 # this message are here for 80 char limit reason
177 # this message are here for 80 char limit reason
178 mso = _("push includes obsolete changeset: %s!")
178 mso = _("push includes obsolete changeset: %s!")
179 mst = "push includes %s changeset: %s!"
179 mst = "push includes %s changeset: %s!"
180 # plain versions for i18n tool to detect them
180 # plain versions for i18n tool to detect them
181 _("push includes unstable changeset: %s!")
181 _("push includes unstable changeset: %s!")
182 _("push includes bumped changeset: %s!")
182 _("push includes bumped changeset: %s!")
183 _("push includes divergent changeset: %s!")
183 _("push includes divergent changeset: %s!")
184 # If we are to push if there is at least one
184 # If we are to push if there is at least one
185 # obsolete or unstable changeset in missing, at
185 # obsolete or unstable changeset in missing, at
186 # least one of the missinghead will be obsolete or
186 # least one of the missinghead will be obsolete or
187 # unstable. So checking heads only is ok
187 # unstable. So checking heads only is ok
188 for node in outgoing.missingheads:
188 for node in outgoing.missingheads:
189 ctx = unfi[node]
189 ctx = unfi[node]
190 if ctx.obsolete():
190 if ctx.obsolete():
191 raise util.Abort(mso % ctx)
191 raise util.Abort(mso % ctx)
192 elif ctx.troubled():
192 elif ctx.troubled():
193 raise util.Abort(_(mst)
193 raise util.Abort(_(mst)
194 % (ctx.troubles()[0],
194 % (ctx.troubles()[0],
195 ctx))
195 ctx))
196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
197 discovery.checkheads(unfi, pushop.remote, outgoing,
197 discovery.checkheads(unfi, pushop.remote, outgoing,
198 pushop.remoteheads,
198 pushop.remoteheads,
199 pushop.newbranch,
199 pushop.newbranch,
200 bool(pushop.incoming),
200 bool(pushop.incoming),
201 newbm)
201 newbm)
202 return True
202 return True
203
203
204 def _pushb2ctx(pushop, bundler):
204 def _pushb2ctx(pushop, bundler):
205 """handle changegroup push through bundle2
205 """handle changegroup push through bundle2
206
206
207 addchangegroup result is stored in the ``pushop.ret`` attribute.
207 addchangegroup result is stored in the ``pushop.ret`` attribute.
208 """
208 """
209 if 'changesets' in pushop.stepsdone:
209 if 'changesets' in pushop.stepsdone:
210 return
210 return
211 pushop.stepsdone.add('changesets')
211 pushop.stepsdone.add('changesets')
212 # Send known heads to the server for race detection.
212 # Send known heads to the server for race detection.
213 pushop.stepsdone.add('changesets')
213 pushop.stepsdone.add('changesets')
214 if not _pushcheckoutgoing(pushop):
214 if not _pushcheckoutgoing(pushop):
215 return
215 return
216 pushop.repo.prepushoutgoinghooks(pushop.repo,
216 pushop.repo.prepushoutgoinghooks(pushop.repo,
217 pushop.remote,
217 pushop.remote,
218 pushop.outgoing)
218 pushop.outgoing)
219 if not pushop.force:
219 if not pushop.force:
220 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
220 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
221 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
221 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
222 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
222 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
223 def handlereply(op):
223 def handlereply(op):
224 """extract addchangroup returns from server reply"""
224 """extract addchangroup returns from server reply"""
225 cgreplies = op.records.getreplies(cgpart.id)
225 cgreplies = op.records.getreplies(cgpart.id)
226 assert len(cgreplies['changegroup']) == 1
226 assert len(cgreplies['changegroup']) == 1
227 pushop.ret = cgreplies['changegroup'][0]['return']
227 pushop.ret = cgreplies['changegroup'][0]['return']
228 return handlereply
228 return handlereply
229
229
230 # list of function that may decide to add parts to an outgoing bundle2
231 bundle2partsgenerators = [_pushb2ctx]
232
230 def _pushbundle2(pushop):
233 def _pushbundle2(pushop):
231 """push data to the remote using bundle2
234 """push data to the remote using bundle2
232
235
233 The only currently supported type of data is changegroup but this will
236 The only currently supported type of data is changegroup but this will
234 evolve in the future."""
237 evolve in the future."""
235 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
238 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
236 # create reply capability
239 # create reply capability
237 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
240 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
238 bundler.newpart('b2x:replycaps', data=capsblob)
241 bundler.newpart('b2x:replycaps', data=capsblob)
239 extrainfo = _pushbundle2extraparts(pushop, bundler)
242 extrainfo = _pushbundle2extraparts(pushop, bundler)
240 # add the changegroup bundle
243 replyhandlers = []
241 cgreplyhandler = _pushb2ctx(pushop, bundler)
244 for partgen in bundle2partsgenerators:
242 # do not push if no other parts than the capability
245 ret = partgen(pushop, bundler)
246 replyhandlers.append(ret)
247 # do not push if nothing to push
243 if bundler.nbparts <= 1:
248 if bundler.nbparts <= 1:
244 return
249 return
245 stream = util.chunkbuffer(bundler.getchunks())
250 stream = util.chunkbuffer(bundler.getchunks())
246 try:
251 try:
247 reply = pushop.remote.unbundle(stream, ['force'], 'push')
252 reply = pushop.remote.unbundle(stream, ['force'], 'push')
248 except error.BundleValueError, exc:
253 except error.BundleValueError, exc:
249 raise util.Abort('missing support for %s' % exc)
254 raise util.Abort('missing support for %s' % exc)
250 try:
255 try:
251 op = bundle2.processbundle(pushop.repo, reply)
256 op = bundle2.processbundle(pushop.repo, reply)
252 except error.BundleValueError, exc:
257 except error.BundleValueError, exc:
253 raise util.Abort('missing support for %s' % exc)
258 raise util.Abort('missing support for %s' % exc)
254 cgreplyhandler(op)
259 for rephand in replyhandlers:
260 rephand(op)
255 _pushbundle2extrareply(pushop, op, extrainfo)
261 _pushbundle2extrareply(pushop, op, extrainfo)
256
262
257 def _pushbundle2extraparts(pushop, bundler):
263 def _pushbundle2extraparts(pushop, bundler):
258 """hook function to let extensions add parts
264 """hook function to let extensions add parts
259
265
260 Return a dict to let extensions pass data to the reply processing.
266 Return a dict to let extensions pass data to the reply processing.
261 """
267 """
262 return {}
268 return {}
263
269
264 def _pushbundle2extrareply(pushop, op, extrainfo):
270 def _pushbundle2extrareply(pushop, op, extrainfo):
265 """hook function to let extensions react to part replies
271 """hook function to let extensions react to part replies
266
272
267 The dict from _pushbundle2extrareply is fed to this function.
273 The dict from _pushbundle2extrareply is fed to this function.
268 """
274 """
269 pass
275 pass
270
276
271 def _pushchangeset(pushop):
277 def _pushchangeset(pushop):
272 """Make the actual push of changeset bundle to remote repo"""
278 """Make the actual push of changeset bundle to remote repo"""
273 if 'changesets' in pushop.stepsdone:
279 if 'changesets' in pushop.stepsdone:
274 return
280 return
275 pushop.stepsdone.add('changesets')
281 pushop.stepsdone.add('changesets')
276 if not _pushcheckoutgoing(pushop):
282 if not _pushcheckoutgoing(pushop):
277 return
283 return
278 pushop.repo.prepushoutgoinghooks(pushop.repo,
284 pushop.repo.prepushoutgoinghooks(pushop.repo,
279 pushop.remote,
285 pushop.remote,
280 pushop.outgoing)
286 pushop.outgoing)
281 outgoing = pushop.outgoing
287 outgoing = pushop.outgoing
282 unbundle = pushop.remote.capable('unbundle')
288 unbundle = pushop.remote.capable('unbundle')
283 # TODO: get bundlecaps from remote
289 # TODO: get bundlecaps from remote
284 bundlecaps = None
290 bundlecaps = None
285 # create a changegroup from local
291 # create a changegroup from local
286 if pushop.revs is None and not (outgoing.excluded
292 if pushop.revs is None and not (outgoing.excluded
287 or pushop.repo.changelog.filteredrevs):
293 or pushop.repo.changelog.filteredrevs):
288 # push everything,
294 # push everything,
289 # use the fast path, no race possible on push
295 # use the fast path, no race possible on push
290 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
296 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
291 cg = changegroup.getsubset(pushop.repo,
297 cg = changegroup.getsubset(pushop.repo,
292 outgoing,
298 outgoing,
293 bundler,
299 bundler,
294 'push',
300 'push',
295 fastpath=True)
301 fastpath=True)
296 else:
302 else:
297 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
303 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
298 bundlecaps)
304 bundlecaps)
299
305
300 # apply changegroup to remote
306 # apply changegroup to remote
301 if unbundle:
307 if unbundle:
302 # local repo finds heads on server, finds out what
308 # local repo finds heads on server, finds out what
303 # revs it must push. once revs transferred, if server
309 # revs it must push. once revs transferred, if server
304 # finds it has different heads (someone else won
310 # finds it has different heads (someone else won
305 # commit/push race), server aborts.
311 # commit/push race), server aborts.
306 if pushop.force:
312 if pushop.force:
307 remoteheads = ['force']
313 remoteheads = ['force']
308 else:
314 else:
309 remoteheads = pushop.remoteheads
315 remoteheads = pushop.remoteheads
310 # ssh: return remote's addchangegroup()
316 # ssh: return remote's addchangegroup()
311 # http: return remote's addchangegroup() or 0 for error
317 # http: return remote's addchangegroup() or 0 for error
312 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
318 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
313 pushop.repo.url())
319 pushop.repo.url())
314 else:
320 else:
315 # we return an integer indicating remote head count
321 # we return an integer indicating remote head count
316 # change
322 # change
317 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
323 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
318
324
319 def _pushcomputecommonheads(pushop):
325 def _pushcomputecommonheads(pushop):
320 unfi = pushop.repo.unfiltered()
326 unfi = pushop.repo.unfiltered()
321 if pushop.ret:
327 if pushop.ret:
322 # push succeed, synchronize target of the push
328 # push succeed, synchronize target of the push
323 cheads = pushop.outgoing.missingheads
329 cheads = pushop.outgoing.missingheads
324 elif pushop.revs is None:
330 elif pushop.revs is None:
325 # All out push fails. synchronize all common
331 # All out push fails. synchronize all common
326 cheads = pushop.outgoing.commonheads
332 cheads = pushop.outgoing.commonheads
327 else:
333 else:
328 # I want cheads = heads(::missingheads and ::commonheads)
334 # I want cheads = heads(::missingheads and ::commonheads)
329 # (missingheads is revs with secret changeset filtered out)
335 # (missingheads is revs with secret changeset filtered out)
330 #
336 #
331 # This can be expressed as:
337 # This can be expressed as:
332 # cheads = ( (missingheads and ::commonheads)
338 # cheads = ( (missingheads and ::commonheads)
333 # + (commonheads and ::missingheads))"
339 # + (commonheads and ::missingheads))"
334 # )
340 # )
335 #
341 #
336 # while trying to push we already computed the following:
342 # while trying to push we already computed the following:
337 # common = (::commonheads)
343 # common = (::commonheads)
338 # missing = ((commonheads::missingheads) - commonheads)
344 # missing = ((commonheads::missingheads) - commonheads)
339 #
345 #
340 # We can pick:
346 # We can pick:
341 # * missingheads part of common (::commonheads)
347 # * missingheads part of common (::commonheads)
342 common = set(pushop.outgoing.common)
348 common = set(pushop.outgoing.common)
343 nm = pushop.repo.changelog.nodemap
349 nm = pushop.repo.changelog.nodemap
344 cheads = [node for node in pushop.revs if nm[node] in common]
350 cheads = [node for node in pushop.revs if nm[node] in common]
345 # and
351 # and
346 # * commonheads parents on missing
352 # * commonheads parents on missing
347 revset = unfi.set('%ln and parents(roots(%ln))',
353 revset = unfi.set('%ln and parents(roots(%ln))',
348 pushop.outgoing.commonheads,
354 pushop.outgoing.commonheads,
349 pushop.outgoing.missing)
355 pushop.outgoing.missing)
350 cheads.extend(c.node() for c in revset)
356 cheads.extend(c.node() for c in revset)
351 pushop.commonheads = cheads
357 pushop.commonheads = cheads
352
358
353 def _pushsyncphase(pushop):
359 def _pushsyncphase(pushop):
354 """synchronise phase information locally and remotely"""
360 """synchronise phase information locally and remotely"""
355 unfi = pushop.repo.unfiltered()
361 unfi = pushop.repo.unfiltered()
356 cheads = pushop.commonheads
362 cheads = pushop.commonheads
357 # even when we don't push, exchanging phase data is useful
363 # even when we don't push, exchanging phase data is useful
358 remotephases = pushop.remote.listkeys('phases')
364 remotephases = pushop.remote.listkeys('phases')
359 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
365 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
360 and remotephases # server supports phases
366 and remotephases # server supports phases
361 and pushop.ret is None # nothing was pushed
367 and pushop.ret is None # nothing was pushed
362 and remotephases.get('publishing', False)):
368 and remotephases.get('publishing', False)):
363 # When:
369 # When:
364 # - this is a subrepo push
370 # - this is a subrepo push
365 # - and remote support phase
371 # - and remote support phase
366 # - and no changeset was pushed
372 # - and no changeset was pushed
367 # - and remote is publishing
373 # - and remote is publishing
368 # We may be in issue 3871 case!
374 # We may be in issue 3871 case!
369 # We drop the possible phase synchronisation done by
375 # We drop the possible phase synchronisation done by
370 # courtesy to publish changesets possibly locally draft
376 # courtesy to publish changesets possibly locally draft
371 # on the remote.
377 # on the remote.
372 remotephases = {'publishing': 'True'}
378 remotephases = {'publishing': 'True'}
373 if not remotephases: # old server or public only reply from non-publishing
379 if not remotephases: # old server or public only reply from non-publishing
374 _localphasemove(pushop, cheads)
380 _localphasemove(pushop, cheads)
375 # don't push any phase data as there is nothing to push
381 # don't push any phase data as there is nothing to push
376 else:
382 else:
377 ana = phases.analyzeremotephases(pushop.repo, cheads,
383 ana = phases.analyzeremotephases(pushop.repo, cheads,
378 remotephases)
384 remotephases)
379 pheads, droots = ana
385 pheads, droots = ana
380 ### Apply remote phase on local
386 ### Apply remote phase on local
381 if remotephases.get('publishing', False):
387 if remotephases.get('publishing', False):
382 _localphasemove(pushop, cheads)
388 _localphasemove(pushop, cheads)
383 else: # publish = False
389 else: # publish = False
384 _localphasemove(pushop, pheads)
390 _localphasemove(pushop, pheads)
385 _localphasemove(pushop, cheads, phases.draft)
391 _localphasemove(pushop, cheads, phases.draft)
386 ### Apply local phase on remote
392 ### Apply local phase on remote
387
393
388 # Get the list of all revs draft on remote by public here.
394 # Get the list of all revs draft on remote by public here.
389 # XXX Beware that revset break if droots is not strictly
395 # XXX Beware that revset break if droots is not strictly
390 # XXX root we may want to ensure it is but it is costly
396 # XXX root we may want to ensure it is but it is costly
391 outdated = unfi.set('heads((%ln::%ln) and public())',
397 outdated = unfi.set('heads((%ln::%ln) and public())',
392 droots, cheads)
398 droots, cheads)
393
399
394 b2caps = bundle2.bundle2caps(pushop.remote)
400 b2caps = bundle2.bundle2caps(pushop.remote)
395 if 'b2x:pushkey' in b2caps:
401 if 'b2x:pushkey' in b2caps:
396 # server supports bundle2, let's do a batched push through it
402 # server supports bundle2, let's do a batched push through it
397 #
403 #
398 # This will eventually be unified with the changesets bundle2 push
404 # This will eventually be unified with the changesets bundle2 push
399 bundler = bundle2.bundle20(pushop.ui, b2caps)
405 bundler = bundle2.bundle20(pushop.ui, b2caps)
400 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
406 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
401 bundler.newpart('b2x:replycaps', data=capsblob)
407 bundler.newpart('b2x:replycaps', data=capsblob)
402 part2node = []
408 part2node = []
403 enc = pushkey.encode
409 enc = pushkey.encode
404 for newremotehead in outdated:
410 for newremotehead in outdated:
405 part = bundler.newpart('b2x:pushkey')
411 part = bundler.newpart('b2x:pushkey')
406 part.addparam('namespace', enc('phases'))
412 part.addparam('namespace', enc('phases'))
407 part.addparam('key', enc(newremotehead.hex()))
413 part.addparam('key', enc(newremotehead.hex()))
408 part.addparam('old', enc(str(phases.draft)))
414 part.addparam('old', enc(str(phases.draft)))
409 part.addparam('new', enc(str(phases.public)))
415 part.addparam('new', enc(str(phases.public)))
410 part2node.append((part.id, newremotehead))
416 part2node.append((part.id, newremotehead))
411 stream = util.chunkbuffer(bundler.getchunks())
417 stream = util.chunkbuffer(bundler.getchunks())
412 try:
418 try:
413 reply = pushop.remote.unbundle(stream, ['force'], 'push')
419 reply = pushop.remote.unbundle(stream, ['force'], 'push')
414 op = bundle2.processbundle(pushop.repo, reply)
420 op = bundle2.processbundle(pushop.repo, reply)
415 except error.BundleValueError, exc:
421 except error.BundleValueError, exc:
416 raise util.Abort('missing support for %s' % exc)
422 raise util.Abort('missing support for %s' % exc)
417 for partid, node in part2node:
423 for partid, node in part2node:
418 partrep = op.records.getreplies(partid)
424 partrep = op.records.getreplies(partid)
419 results = partrep['pushkey']
425 results = partrep['pushkey']
420 assert len(results) <= 1
426 assert len(results) <= 1
421 msg = None
427 msg = None
422 if not results:
428 if not results:
423 msg = _('server ignored update of %s to public!\n') % node
429 msg = _('server ignored update of %s to public!\n') % node
424 elif not int(results[0]['return']):
430 elif not int(results[0]['return']):
425 msg = _('updating %s to public failed!\n') % node
431 msg = _('updating %s to public failed!\n') % node
426 if msg is not None:
432 if msg is not None:
427 pushop.ui.warn(msg)
433 pushop.ui.warn(msg)
428
434
429 else:
435 else:
430 # fallback to independant pushkey command
436 # fallback to independant pushkey command
431 for newremotehead in outdated:
437 for newremotehead in outdated:
432 r = pushop.remote.pushkey('phases',
438 r = pushop.remote.pushkey('phases',
433 newremotehead.hex(),
439 newremotehead.hex(),
434 str(phases.draft),
440 str(phases.draft),
435 str(phases.public))
441 str(phases.public))
436 if not r:
442 if not r:
437 pushop.ui.warn(_('updating %s to public failed!\n')
443 pushop.ui.warn(_('updating %s to public failed!\n')
438 % newremotehead)
444 % newremotehead)
439
445
440 def _localphasemove(pushop, nodes, phase=phases.public):
446 def _localphasemove(pushop, nodes, phase=phases.public):
441 """move <nodes> to <phase> in the local source repo"""
447 """move <nodes> to <phase> in the local source repo"""
442 if pushop.locallocked:
448 if pushop.locallocked:
443 phases.advanceboundary(pushop.repo, phase, nodes)
449 phases.advanceboundary(pushop.repo, phase, nodes)
444 else:
450 else:
445 # repo is not locked, do not change any phases!
451 # repo is not locked, do not change any phases!
446 # Informs the user that phases should have been moved when
452 # Informs the user that phases should have been moved when
447 # applicable.
453 # applicable.
448 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
454 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
449 phasestr = phases.phasenames[phase]
455 phasestr = phases.phasenames[phase]
450 if actualmoves:
456 if actualmoves:
451 pushop.ui.status(_('cannot lock source repo, skipping '
457 pushop.ui.status(_('cannot lock source repo, skipping '
452 'local %s phase update\n') % phasestr)
458 'local %s phase update\n') % phasestr)
453
459
454 def _pushobsolete(pushop):
460 def _pushobsolete(pushop):
455 """utility function to push obsolete markers to a remote"""
461 """utility function to push obsolete markers to a remote"""
456 pushop.ui.debug('try to push obsolete markers to remote\n')
462 pushop.ui.debug('try to push obsolete markers to remote\n')
457 repo = pushop.repo
463 repo = pushop.repo
458 remote = pushop.remote
464 remote = pushop.remote
459 if (obsolete._enabled and repo.obsstore and
465 if (obsolete._enabled and repo.obsstore and
460 'obsolete' in remote.listkeys('namespaces')):
466 'obsolete' in remote.listkeys('namespaces')):
461 rslts = []
467 rslts = []
462 remotedata = repo.listkeys('obsolete')
468 remotedata = repo.listkeys('obsolete')
463 for key in sorted(remotedata, reverse=True):
469 for key in sorted(remotedata, reverse=True):
464 # reverse sort to ensure we end with dump0
470 # reverse sort to ensure we end with dump0
465 data = remotedata[key]
471 data = remotedata[key]
466 rslts.append(remote.pushkey('obsolete', key, '', data))
472 rslts.append(remote.pushkey('obsolete', key, '', data))
467 if [r for r in rslts if not r]:
473 if [r for r in rslts if not r]:
468 msg = _('failed to push some obsolete markers!\n')
474 msg = _('failed to push some obsolete markers!\n')
469 repo.ui.warn(msg)
475 repo.ui.warn(msg)
470
476
471 def _pushbookmark(pushop):
477 def _pushbookmark(pushop):
472 """Update bookmark position on remote"""
478 """Update bookmark position on remote"""
473 ui = pushop.ui
479 ui = pushop.ui
474 repo = pushop.repo.unfiltered()
480 repo = pushop.repo.unfiltered()
475 remote = pushop.remote
481 remote = pushop.remote
476 ui.debug("checking for updated bookmarks\n")
482 ui.debug("checking for updated bookmarks\n")
477 revnums = map(repo.changelog.rev, pushop.revs or [])
483 revnums = map(repo.changelog.rev, pushop.revs or [])
478 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
484 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
479 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
485 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
480 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
486 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
481 srchex=hex)
487 srchex=hex)
482
488
483 for b, scid, dcid in advsrc:
489 for b, scid, dcid in advsrc:
484 if ancestors and repo[scid].rev() not in ancestors:
490 if ancestors and repo[scid].rev() not in ancestors:
485 continue
491 continue
486 if remote.pushkey('bookmarks', b, dcid, scid):
492 if remote.pushkey('bookmarks', b, dcid, scid):
487 ui.status(_("updating bookmark %s\n") % b)
493 ui.status(_("updating bookmark %s\n") % b)
488 else:
494 else:
489 ui.warn(_('updating bookmark %s failed!\n') % b)
495 ui.warn(_('updating bookmark %s failed!\n') % b)
490
496
491 class pulloperation(object):
497 class pulloperation(object):
492 """A object that represent a single pull operation
498 """A object that represent a single pull operation
493
499
494 It purpose is to carry push related state and very common operation.
500 It purpose is to carry push related state and very common operation.
495
501
496 A new should be created at the beginning of each pull and discarded
502 A new should be created at the beginning of each pull and discarded
497 afterward.
503 afterward.
498 """
504 """
499
505
500 def __init__(self, repo, remote, heads=None, force=False):
506 def __init__(self, repo, remote, heads=None, force=False):
501 # repo we pull into
507 # repo we pull into
502 self.repo = repo
508 self.repo = repo
503 # repo we pull from
509 # repo we pull from
504 self.remote = remote
510 self.remote = remote
505 # revision we try to pull (None is "all")
511 # revision we try to pull (None is "all")
506 self.heads = heads
512 self.heads = heads
507 # do we force pull?
513 # do we force pull?
508 self.force = force
514 self.force = force
509 # the name the pull transaction
515 # the name the pull transaction
510 self._trname = 'pull\n' + util.hidepassword(remote.url())
516 self._trname = 'pull\n' + util.hidepassword(remote.url())
511 # hold the transaction once created
517 # hold the transaction once created
512 self._tr = None
518 self._tr = None
513 # set of common changeset between local and remote before pull
519 # set of common changeset between local and remote before pull
514 self.common = None
520 self.common = None
515 # set of pulled head
521 # set of pulled head
516 self.rheads = None
522 self.rheads = None
517 # list of missing changeset to fetch remotely
523 # list of missing changeset to fetch remotely
518 self.fetch = None
524 self.fetch = None
519 # result of changegroup pulling (used as return code by pull)
525 # result of changegroup pulling (used as return code by pull)
520 self.cgresult = None
526 self.cgresult = None
521 # list of step remaining todo (related to future bundle2 usage)
527 # list of step remaining todo (related to future bundle2 usage)
522 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
528 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
523
529
524 @util.propertycache
530 @util.propertycache
525 def pulledsubset(self):
531 def pulledsubset(self):
526 """heads of the set of changeset target by the pull"""
532 """heads of the set of changeset target by the pull"""
527 # compute target subset
533 # compute target subset
528 if self.heads is None:
534 if self.heads is None:
529 # We pulled every thing possible
535 # We pulled every thing possible
530 # sync on everything common
536 # sync on everything common
531 c = set(self.common)
537 c = set(self.common)
532 ret = list(self.common)
538 ret = list(self.common)
533 for n in self.rheads:
539 for n in self.rheads:
534 if n not in c:
540 if n not in c:
535 ret.append(n)
541 ret.append(n)
536 return ret
542 return ret
537 else:
543 else:
538 # We pulled a specific subset
544 # We pulled a specific subset
539 # sync on this subset
545 # sync on this subset
540 return self.heads
546 return self.heads
541
547
542 def gettransaction(self):
548 def gettransaction(self):
543 """get appropriate pull transaction, creating it if needed"""
549 """get appropriate pull transaction, creating it if needed"""
544 if self._tr is None:
550 if self._tr is None:
545 self._tr = self.repo.transaction(self._trname)
551 self._tr = self.repo.transaction(self._trname)
546 return self._tr
552 return self._tr
547
553
548 def closetransaction(self):
554 def closetransaction(self):
549 """close transaction if created"""
555 """close transaction if created"""
550 if self._tr is not None:
556 if self._tr is not None:
551 self._tr.close()
557 self._tr.close()
552
558
553 def releasetransaction(self):
559 def releasetransaction(self):
554 """release transaction if created"""
560 """release transaction if created"""
555 if self._tr is not None:
561 if self._tr is not None:
556 self._tr.release()
562 self._tr.release()
557
563
558 def pull(repo, remote, heads=None, force=False):
564 def pull(repo, remote, heads=None, force=False):
559 pullop = pulloperation(repo, remote, heads, force)
565 pullop = pulloperation(repo, remote, heads, force)
560 if pullop.remote.local():
566 if pullop.remote.local():
561 missing = set(pullop.remote.requirements) - pullop.repo.supported
567 missing = set(pullop.remote.requirements) - pullop.repo.supported
562 if missing:
568 if missing:
563 msg = _("required features are not"
569 msg = _("required features are not"
564 " supported in the destination:"
570 " supported in the destination:"
565 " %s") % (', '.join(sorted(missing)))
571 " %s") % (', '.join(sorted(missing)))
566 raise util.Abort(msg)
572 raise util.Abort(msg)
567
573
568 lock = pullop.repo.lock()
574 lock = pullop.repo.lock()
569 try:
575 try:
570 _pulldiscovery(pullop)
576 _pulldiscovery(pullop)
571 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
577 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
572 and pullop.remote.capable('bundle2-exp')):
578 and pullop.remote.capable('bundle2-exp')):
573 _pullbundle2(pullop)
579 _pullbundle2(pullop)
574 if 'changegroup' in pullop.todosteps:
580 if 'changegroup' in pullop.todosteps:
575 _pullchangeset(pullop)
581 _pullchangeset(pullop)
576 if 'phases' in pullop.todosteps:
582 if 'phases' in pullop.todosteps:
577 _pullphase(pullop)
583 _pullphase(pullop)
578 if 'obsmarkers' in pullop.todosteps:
584 if 'obsmarkers' in pullop.todosteps:
579 _pullobsolete(pullop)
585 _pullobsolete(pullop)
580 pullop.closetransaction()
586 pullop.closetransaction()
581 finally:
587 finally:
582 pullop.releasetransaction()
588 pullop.releasetransaction()
583 lock.release()
589 lock.release()
584
590
585 return pullop.cgresult
591 return pullop.cgresult
586
592
587 def _pulldiscovery(pullop):
593 def _pulldiscovery(pullop):
588 """discovery phase for the pull
594 """discovery phase for the pull
589
595
590 Current handle changeset discovery only, will change handle all discovery
596 Current handle changeset discovery only, will change handle all discovery
591 at some point."""
597 at some point."""
592 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
598 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
593 pullop.remote,
599 pullop.remote,
594 heads=pullop.heads,
600 heads=pullop.heads,
595 force=pullop.force)
601 force=pullop.force)
596 pullop.common, pullop.fetch, pullop.rheads = tmp
602 pullop.common, pullop.fetch, pullop.rheads = tmp
597
603
598 def _pullbundle2(pullop):
604 def _pullbundle2(pullop):
599 """pull data using bundle2
605 """pull data using bundle2
600
606
601 For now, the only supported data are changegroup."""
607 For now, the only supported data are changegroup."""
602 remotecaps = bundle2.bundle2caps(pullop.remote)
608 remotecaps = bundle2.bundle2caps(pullop.remote)
603 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
609 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
604 # pulling changegroup
610 # pulling changegroup
605 pullop.todosteps.remove('changegroup')
611 pullop.todosteps.remove('changegroup')
606
612
607 kwargs['common'] = pullop.common
613 kwargs['common'] = pullop.common
608 kwargs['heads'] = pullop.heads or pullop.rheads
614 kwargs['heads'] = pullop.heads or pullop.rheads
609 if 'b2x:listkeys' in remotecaps:
615 if 'b2x:listkeys' in remotecaps:
610 kwargs['listkeys'] = ['phase']
616 kwargs['listkeys'] = ['phase']
611 if not pullop.fetch:
617 if not pullop.fetch:
612 pullop.repo.ui.status(_("no changes found\n"))
618 pullop.repo.ui.status(_("no changes found\n"))
613 pullop.cgresult = 0
619 pullop.cgresult = 0
614 else:
620 else:
615 if pullop.heads is None and list(pullop.common) == [nullid]:
621 if pullop.heads is None and list(pullop.common) == [nullid]:
616 pullop.repo.ui.status(_("requesting all changes\n"))
622 pullop.repo.ui.status(_("requesting all changes\n"))
617 _pullbundle2extraprepare(pullop, kwargs)
623 _pullbundle2extraprepare(pullop, kwargs)
618 if kwargs.keys() == ['format']:
624 if kwargs.keys() == ['format']:
619 return # nothing to pull
625 return # nothing to pull
620 bundle = pullop.remote.getbundle('pull', **kwargs)
626 bundle = pullop.remote.getbundle('pull', **kwargs)
621 try:
627 try:
622 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
628 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
623 except error.BundleValueError, exc:
629 except error.BundleValueError, exc:
624 raise util.Abort('missing support for %s' % exc)
630 raise util.Abort('missing support for %s' % exc)
625
631
626 if pullop.fetch:
632 if pullop.fetch:
627 assert len(op.records['changegroup']) == 1
633 assert len(op.records['changegroup']) == 1
628 pullop.cgresult = op.records['changegroup'][0]['return']
634 pullop.cgresult = op.records['changegroup'][0]['return']
629
635
630 # processing phases change
636 # processing phases change
631 for namespace, value in op.records['listkeys']:
637 for namespace, value in op.records['listkeys']:
632 if namespace == 'phases':
638 if namespace == 'phases':
633 _pullapplyphases(pullop, value)
639 _pullapplyphases(pullop, value)
634
640
635 def _pullbundle2extraprepare(pullop, kwargs):
641 def _pullbundle2extraprepare(pullop, kwargs):
636 """hook function so that extensions can extend the getbundle call"""
642 """hook function so that extensions can extend the getbundle call"""
637 pass
643 pass
638
644
639 def _pullchangeset(pullop):
645 def _pullchangeset(pullop):
640 """pull changeset from unbundle into the local repo"""
646 """pull changeset from unbundle into the local repo"""
641 # We delay the open of the transaction as late as possible so we
647 # We delay the open of the transaction as late as possible so we
642 # don't open transaction for nothing or you break future useful
648 # don't open transaction for nothing or you break future useful
643 # rollback call
649 # rollback call
644 pullop.todosteps.remove('changegroup')
650 pullop.todosteps.remove('changegroup')
645 if not pullop.fetch:
651 if not pullop.fetch:
646 pullop.repo.ui.status(_("no changes found\n"))
652 pullop.repo.ui.status(_("no changes found\n"))
647 pullop.cgresult = 0
653 pullop.cgresult = 0
648 return
654 return
649 pullop.gettransaction()
655 pullop.gettransaction()
650 if pullop.heads is None and list(pullop.common) == [nullid]:
656 if pullop.heads is None and list(pullop.common) == [nullid]:
651 pullop.repo.ui.status(_("requesting all changes\n"))
657 pullop.repo.ui.status(_("requesting all changes\n"))
652 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
658 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
653 # issue1320, avoid a race if remote changed after discovery
659 # issue1320, avoid a race if remote changed after discovery
654 pullop.heads = pullop.rheads
660 pullop.heads = pullop.rheads
655
661
656 if pullop.remote.capable('getbundle'):
662 if pullop.remote.capable('getbundle'):
657 # TODO: get bundlecaps from remote
663 # TODO: get bundlecaps from remote
658 cg = pullop.remote.getbundle('pull', common=pullop.common,
664 cg = pullop.remote.getbundle('pull', common=pullop.common,
659 heads=pullop.heads or pullop.rheads)
665 heads=pullop.heads or pullop.rheads)
660 elif pullop.heads is None:
666 elif pullop.heads is None:
661 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
667 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
662 elif not pullop.remote.capable('changegroupsubset'):
668 elif not pullop.remote.capable('changegroupsubset'):
663 raise util.Abort(_("partial pull cannot be done because "
669 raise util.Abort(_("partial pull cannot be done because "
664 "other repository doesn't support "
670 "other repository doesn't support "
665 "changegroupsubset."))
671 "changegroupsubset."))
666 else:
672 else:
667 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
673 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
668 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
674 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
669 pullop.remote.url())
675 pullop.remote.url())
670
676
671 def _pullphase(pullop):
677 def _pullphase(pullop):
672 # Get remote phases data from remote
678 # Get remote phases data from remote
673 remotephases = pullop.remote.listkeys('phases')
679 remotephases = pullop.remote.listkeys('phases')
674 _pullapplyphases(pullop, remotephases)
680 _pullapplyphases(pullop, remotephases)
675
681
676 def _pullapplyphases(pullop, remotephases):
682 def _pullapplyphases(pullop, remotephases):
677 """apply phase movement from observed remote state"""
683 """apply phase movement from observed remote state"""
678 pullop.todosteps.remove('phases')
684 pullop.todosteps.remove('phases')
679 publishing = bool(remotephases.get('publishing', False))
685 publishing = bool(remotephases.get('publishing', False))
680 if remotephases and not publishing:
686 if remotephases and not publishing:
681 # remote is new and unpublishing
687 # remote is new and unpublishing
682 pheads, _dr = phases.analyzeremotephases(pullop.repo,
688 pheads, _dr = phases.analyzeremotephases(pullop.repo,
683 pullop.pulledsubset,
689 pullop.pulledsubset,
684 remotephases)
690 remotephases)
685 phases.advanceboundary(pullop.repo, phases.public, pheads)
691 phases.advanceboundary(pullop.repo, phases.public, pheads)
686 phases.advanceboundary(pullop.repo, phases.draft,
692 phases.advanceboundary(pullop.repo, phases.draft,
687 pullop.pulledsubset)
693 pullop.pulledsubset)
688 else:
694 else:
689 # Remote is old or publishing all common changesets
695 # Remote is old or publishing all common changesets
690 # should be seen as public
696 # should be seen as public
691 phases.advanceboundary(pullop.repo, phases.public,
697 phases.advanceboundary(pullop.repo, phases.public,
692 pullop.pulledsubset)
698 pullop.pulledsubset)
693
699
694 def _pullobsolete(pullop):
700 def _pullobsolete(pullop):
695 """utility function to pull obsolete markers from a remote
701 """utility function to pull obsolete markers from a remote
696
702
697 The `gettransaction` is function that return the pull transaction, creating
703 The `gettransaction` is function that return the pull transaction, creating
698 one if necessary. We return the transaction to inform the calling code that
704 one if necessary. We return the transaction to inform the calling code that
699 a new transaction have been created (when applicable).
705 a new transaction have been created (when applicable).
700
706
701 Exists mostly to allow overriding for experimentation purpose"""
707 Exists mostly to allow overriding for experimentation purpose"""
702 pullop.todosteps.remove('obsmarkers')
708 pullop.todosteps.remove('obsmarkers')
703 tr = None
709 tr = None
704 if obsolete._enabled:
710 if obsolete._enabled:
705 pullop.repo.ui.debug('fetching remote obsolete markers\n')
711 pullop.repo.ui.debug('fetching remote obsolete markers\n')
706 remoteobs = pullop.remote.listkeys('obsolete')
712 remoteobs = pullop.remote.listkeys('obsolete')
707 if 'dump0' in remoteobs:
713 if 'dump0' in remoteobs:
708 tr = pullop.gettransaction()
714 tr = pullop.gettransaction()
709 for key in sorted(remoteobs, reverse=True):
715 for key in sorted(remoteobs, reverse=True):
710 if key.startswith('dump'):
716 if key.startswith('dump'):
711 data = base85.b85decode(remoteobs[key])
717 data = base85.b85decode(remoteobs[key])
712 pullop.repo.obsstore.mergemarkers(tr, data)
718 pullop.repo.obsstore.mergemarkers(tr, data)
713 pullop.repo.invalidatevolatilesets()
719 pullop.repo.invalidatevolatilesets()
714 return tr
720 return tr
715
721
716 def caps20to10(repo):
722 def caps20to10(repo):
717 """return a set with appropriate options to use bundle20 during getbundle"""
723 """return a set with appropriate options to use bundle20 during getbundle"""
718 caps = set(['HG2X'])
724 caps = set(['HG2X'])
719 capsblob = bundle2.encodecaps(repo.bundle2caps)
725 capsblob = bundle2.encodecaps(repo.bundle2caps)
720 caps.add('bundle2=' + urllib.quote(capsblob))
726 caps.add('bundle2=' + urllib.quote(capsblob))
721 return caps
727 return caps
722
728
723 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
729 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
724 **kwargs):
730 **kwargs):
725 """return a full bundle (with potentially multiple kind of parts)
731 """return a full bundle (with potentially multiple kind of parts)
726
732
727 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
733 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
728 passed. For now, the bundle can contain only changegroup, but this will
734 passed. For now, the bundle can contain only changegroup, but this will
729 changes when more part type will be available for bundle2.
735 changes when more part type will be available for bundle2.
730
736
731 This is different from changegroup.getbundle that only returns an HG10
737 This is different from changegroup.getbundle that only returns an HG10
732 changegroup bundle. They may eventually get reunited in the future when we
738 changegroup bundle. They may eventually get reunited in the future when we
733 have a clearer idea of the API we what to query different data.
739 have a clearer idea of the API we what to query different data.
734
740
735 The implementation is at a very early stage and will get massive rework
741 The implementation is at a very early stage and will get massive rework
736 when the API of bundle is refined.
742 when the API of bundle is refined.
737 """
743 """
738 # build changegroup bundle here.
744 # build changegroup bundle here.
739 cg = changegroup.getbundle(repo, source, heads=heads,
745 cg = changegroup.getbundle(repo, source, heads=heads,
740 common=common, bundlecaps=bundlecaps)
746 common=common, bundlecaps=bundlecaps)
741 if bundlecaps is None or 'HG2X' not in bundlecaps:
747 if bundlecaps is None or 'HG2X' not in bundlecaps:
742 if kwargs:
748 if kwargs:
743 raise ValueError(_('unsupported getbundle arguments: %s')
749 raise ValueError(_('unsupported getbundle arguments: %s')
744 % ', '.join(sorted(kwargs.keys())))
750 % ', '.join(sorted(kwargs.keys())))
745 return cg
751 return cg
746 # very crude first implementation,
752 # very crude first implementation,
747 # the bundle API will change and the generation will be done lazily.
753 # the bundle API will change and the generation will be done lazily.
748 b2caps = {}
754 b2caps = {}
749 for bcaps in bundlecaps:
755 for bcaps in bundlecaps:
750 if bcaps.startswith('bundle2='):
756 if bcaps.startswith('bundle2='):
751 blob = urllib.unquote(bcaps[len('bundle2='):])
757 blob = urllib.unquote(bcaps[len('bundle2='):])
752 b2caps.update(bundle2.decodecaps(blob))
758 b2caps.update(bundle2.decodecaps(blob))
753 bundler = bundle2.bundle20(repo.ui, b2caps)
759 bundler = bundle2.bundle20(repo.ui, b2caps)
754 if cg:
760 if cg:
755 bundler.newpart('b2x:changegroup', data=cg.getchunks())
761 bundler.newpart('b2x:changegroup', data=cg.getchunks())
756 listkeys = kwargs.get('listkeys', ())
762 listkeys = kwargs.get('listkeys', ())
757 for namespace in listkeys:
763 for namespace in listkeys:
758 part = bundler.newpart('b2x:listkeys')
764 part = bundler.newpart('b2x:listkeys')
759 part.addparam('namespace', namespace)
765 part.addparam('namespace', namespace)
760 keys = repo.listkeys(namespace).items()
766 keys = repo.listkeys(namespace).items()
761 part.data = pushkey.encodekeys(keys)
767 part.data = pushkey.encodekeys(keys)
762 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
768 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
763 bundlecaps=bundlecaps, **kwargs)
769 bundlecaps=bundlecaps, **kwargs)
764 return util.chunkbuffer(bundler.getchunks())
770 return util.chunkbuffer(bundler.getchunks())
765
771
766 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
772 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
767 bundlecaps=None, **kwargs):
773 bundlecaps=None, **kwargs):
768 """hook function to let extensions add parts to the requested bundle"""
774 """hook function to let extensions add parts to the requested bundle"""
769 pass
775 pass
770
776
771 def check_heads(repo, their_heads, context):
777 def check_heads(repo, their_heads, context):
772 """check if the heads of a repo have been modified
778 """check if the heads of a repo have been modified
773
779
774 Used by peer for unbundling.
780 Used by peer for unbundling.
775 """
781 """
776 heads = repo.heads()
782 heads = repo.heads()
777 heads_hash = util.sha1(''.join(sorted(heads))).digest()
783 heads_hash = util.sha1(''.join(sorted(heads))).digest()
778 if not (their_heads == ['force'] or their_heads == heads or
784 if not (their_heads == ['force'] or their_heads == heads or
779 their_heads == ['hashed', heads_hash]):
785 their_heads == ['hashed', heads_hash]):
780 # someone else committed/pushed/unbundled while we
786 # someone else committed/pushed/unbundled while we
781 # were transferring data
787 # were transferring data
782 raise error.PushRaced('repository changed while %s - '
788 raise error.PushRaced('repository changed while %s - '
783 'please try again' % context)
789 'please try again' % context)
784
790
785 def unbundle(repo, cg, heads, source, url):
791 def unbundle(repo, cg, heads, source, url):
786 """Apply a bundle to a repo.
792 """Apply a bundle to a repo.
787
793
788 this function makes sure the repo is locked during the application and have
794 this function makes sure the repo is locked during the application and have
789 mechanism to check that no push race occurred between the creation of the
795 mechanism to check that no push race occurred between the creation of the
790 bundle and its application.
796 bundle and its application.
791
797
792 If the push was raced as PushRaced exception is raised."""
798 If the push was raced as PushRaced exception is raised."""
793 r = 0
799 r = 0
794 # need a transaction when processing a bundle2 stream
800 # need a transaction when processing a bundle2 stream
795 tr = None
801 tr = None
796 lock = repo.lock()
802 lock = repo.lock()
797 try:
803 try:
798 check_heads(repo, heads, 'uploading changes')
804 check_heads(repo, heads, 'uploading changes')
799 # push can proceed
805 # push can proceed
800 if util.safehasattr(cg, 'params'):
806 if util.safehasattr(cg, 'params'):
801 try:
807 try:
802 tr = repo.transaction('unbundle')
808 tr = repo.transaction('unbundle')
803 tr.hookargs['bundle2-exp'] = '1'
809 tr.hookargs['bundle2-exp'] = '1'
804 r = bundle2.processbundle(repo, cg, lambda: tr).reply
810 r = bundle2.processbundle(repo, cg, lambda: tr).reply
805 cl = repo.unfiltered().changelog
811 cl = repo.unfiltered().changelog
806 p = cl.writepending() and repo.root or ""
812 p = cl.writepending() and repo.root or ""
807 repo.hook('b2x-pretransactionclose', throw=True, source=source,
813 repo.hook('b2x-pretransactionclose', throw=True, source=source,
808 url=url, pending=p, **tr.hookargs)
814 url=url, pending=p, **tr.hookargs)
809 tr.close()
815 tr.close()
810 repo.hook('b2x-transactionclose', source=source, url=url,
816 repo.hook('b2x-transactionclose', source=source, url=url,
811 **tr.hookargs)
817 **tr.hookargs)
812 except Exception, exc:
818 except Exception, exc:
813 exc.duringunbundle2 = True
819 exc.duringunbundle2 = True
814 raise
820 raise
815 else:
821 else:
816 r = changegroup.addchangegroup(repo, cg, source, url)
822 r = changegroup.addchangegroup(repo, cg, source, url)
817 finally:
823 finally:
818 if tr is not None:
824 if tr is not None:
819 tr.release()
825 tr.release()
820 lock.release()
826 lock.release()
821 return r
827 return r
General Comments 0
You need to be logged in to leave comments. Login now