##// END OF EJS Templates
bundle2-push: drop _pushbundle2extraparts...
Pierre-Yves David -
r21906:08dcb572 default
parent child Browse files
Show More
@@ -1,827 +1,811 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # step already performed
64 # step already performed
65 # (used to check what steps have been already performed through bundle2)
65 # (used to check what steps have been already performed through bundle2)
66 self.stepsdone = set()
66 self.stepsdone = set()
67 # Integer version of the push result
67 # Integer version of the push result
68 # - None means nothing to push
68 # - None means nothing to push
69 # - 0 means HTTP error
69 # - 0 means HTTP error
70 # - 1 means we pushed and remote head count is unchanged *or*
70 # - 1 means we pushed and remote head count is unchanged *or*
71 # we have outgoing changesets but refused to push
71 # we have outgoing changesets but refused to push
72 # - other values as described by addchangegroup()
72 # - other values as described by addchangegroup()
73 self.ret = None
73 self.ret = None
74 # discover.outgoing object (contains common and outgoing data)
74 # discover.outgoing object (contains common and outgoing data)
75 self.outgoing = None
75 self.outgoing = None
76 # all remote heads before the push
76 # all remote heads before the push
77 self.remoteheads = None
77 self.remoteheads = None
78 # testable as a boolean indicating if any nodes are missing locally.
78 # testable as a boolean indicating if any nodes are missing locally.
79 self.incoming = None
79 self.incoming = None
80 # set of all heads common after changeset bundle push
80 # set of all heads common after changeset bundle push
81 self.commonheads = None
81 self.commonheads = None
82
82
83 def push(repo, remote, force=False, revs=None, newbranch=False):
83 def push(repo, remote, force=False, revs=None, newbranch=False):
84 '''Push outgoing changesets (limited by revs) from a local
84 '''Push outgoing changesets (limited by revs) from a local
85 repository to remote. Return an integer:
85 repository to remote. Return an integer:
86 - None means nothing to push
86 - None means nothing to push
87 - 0 means HTTP error
87 - 0 means HTTP error
88 - 1 means we pushed and remote head count is unchanged *or*
88 - 1 means we pushed and remote head count is unchanged *or*
89 we have outgoing changesets but refused to push
89 we have outgoing changesets but refused to push
90 - other values as described by addchangegroup()
90 - other values as described by addchangegroup()
91 '''
91 '''
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
93 if pushop.remote.local():
93 if pushop.remote.local():
94 missing = (set(pushop.repo.requirements)
94 missing = (set(pushop.repo.requirements)
95 - pushop.remote.local().supported)
95 - pushop.remote.local().supported)
96 if missing:
96 if missing:
97 msg = _("required features are not"
97 msg = _("required features are not"
98 " supported in the destination:"
98 " supported in the destination:"
99 " %s") % (', '.join(sorted(missing)))
99 " %s") % (', '.join(sorted(missing)))
100 raise util.Abort(msg)
100 raise util.Abort(msg)
101
101
102 # there are two ways to push to remote repo:
102 # there are two ways to push to remote repo:
103 #
103 #
104 # addchangegroup assumes local user can lock remote
104 # addchangegroup assumes local user can lock remote
105 # repo (local filesystem, old ssh servers).
105 # repo (local filesystem, old ssh servers).
106 #
106 #
107 # unbundle assumes local user cannot lock remote repo (new ssh
107 # unbundle assumes local user cannot lock remote repo (new ssh
108 # servers, http servers).
108 # servers, http servers).
109
109
110 if not pushop.remote.canpush():
110 if not pushop.remote.canpush():
111 raise util.Abort(_("destination does not support push"))
111 raise util.Abort(_("destination does not support push"))
112 # get local lock as we might write phase data
112 # get local lock as we might write phase data
113 locallock = None
113 locallock = None
114 try:
114 try:
115 locallock = pushop.repo.lock()
115 locallock = pushop.repo.lock()
116 pushop.locallocked = True
116 pushop.locallocked = True
117 except IOError, err:
117 except IOError, err:
118 pushop.locallocked = False
118 pushop.locallocked = False
119 if err.errno != errno.EACCES:
119 if err.errno != errno.EACCES:
120 raise
120 raise
121 # source repo cannot be locked.
121 # source repo cannot be locked.
122 # We do not abort the push, but just disable the local phase
122 # We do not abort the push, but just disable the local phase
123 # synchronisation.
123 # synchronisation.
124 msg = 'cannot lock source repository: %s\n' % err
124 msg = 'cannot lock source repository: %s\n' % err
125 pushop.ui.debug(msg)
125 pushop.ui.debug(msg)
126 try:
126 try:
127 pushop.repo.checkpush(pushop)
127 pushop.repo.checkpush(pushop)
128 lock = None
128 lock = None
129 unbundle = pushop.remote.capable('unbundle')
129 unbundle = pushop.remote.capable('unbundle')
130 if not unbundle:
130 if not unbundle:
131 lock = pushop.remote.lock()
131 lock = pushop.remote.lock()
132 try:
132 try:
133 _pushdiscovery(pushop)
133 _pushdiscovery(pushop)
134 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
134 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 False)
135 False)
136 and pushop.remote.capable('bundle2-exp')):
136 and pushop.remote.capable('bundle2-exp')):
137 _pushbundle2(pushop)
137 _pushbundle2(pushop)
138 _pushchangeset(pushop)
138 _pushchangeset(pushop)
139 _pushcomputecommonheads(pushop)
139 _pushcomputecommonheads(pushop)
140 _pushsyncphase(pushop)
140 _pushsyncphase(pushop)
141 _pushobsolete(pushop)
141 _pushobsolete(pushop)
142 finally:
142 finally:
143 if lock is not None:
143 if lock is not None:
144 lock.release()
144 lock.release()
145 finally:
145 finally:
146 if locallock is not None:
146 if locallock is not None:
147 locallock.release()
147 locallock.release()
148
148
149 _pushbookmark(pushop)
149 _pushbookmark(pushop)
150 return pushop.ret
150 return pushop.ret
151
151
152 def _pushdiscovery(pushop):
152 def _pushdiscovery(pushop):
153 # discovery
153 # discovery
154 unfi = pushop.repo.unfiltered()
154 unfi = pushop.repo.unfiltered()
155 fci = discovery.findcommonincoming
155 fci = discovery.findcommonincoming
156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
157 common, inc, remoteheads = commoninc
157 common, inc, remoteheads = commoninc
158 fco = discovery.findcommonoutgoing
158 fco = discovery.findcommonoutgoing
159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
160 commoninc=commoninc, force=pushop.force)
160 commoninc=commoninc, force=pushop.force)
161 pushop.outgoing = outgoing
161 pushop.outgoing = outgoing
162 pushop.remoteheads = remoteheads
162 pushop.remoteheads = remoteheads
163 pushop.incoming = inc
163 pushop.incoming = inc
164
164
165 def _pushcheckoutgoing(pushop):
165 def _pushcheckoutgoing(pushop):
166 outgoing = pushop.outgoing
166 outgoing = pushop.outgoing
167 unfi = pushop.repo.unfiltered()
167 unfi = pushop.repo.unfiltered()
168 if not outgoing.missing:
168 if not outgoing.missing:
169 # nothing to push
169 # nothing to push
170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
171 return False
171 return False
172 # something to push
172 # something to push
173 if not pushop.force:
173 if not pushop.force:
174 # if repo.obsstore == False --> no obsolete
174 # if repo.obsstore == False --> no obsolete
175 # then, save the iteration
175 # then, save the iteration
176 if unfi.obsstore:
176 if unfi.obsstore:
177 # this message are here for 80 char limit reason
177 # this message are here for 80 char limit reason
178 mso = _("push includes obsolete changeset: %s!")
178 mso = _("push includes obsolete changeset: %s!")
179 mst = "push includes %s changeset: %s!"
179 mst = "push includes %s changeset: %s!"
180 # plain versions for i18n tool to detect them
180 # plain versions for i18n tool to detect them
181 _("push includes unstable changeset: %s!")
181 _("push includes unstable changeset: %s!")
182 _("push includes bumped changeset: %s!")
182 _("push includes bumped changeset: %s!")
183 _("push includes divergent changeset: %s!")
183 _("push includes divergent changeset: %s!")
184 # If we are to push if there is at least one
184 # If we are to push if there is at least one
185 # obsolete or unstable changeset in missing, at
185 # obsolete or unstable changeset in missing, at
186 # least one of the missinghead will be obsolete or
186 # least one of the missinghead will be obsolete or
187 # unstable. So checking heads only is ok
187 # unstable. So checking heads only is ok
188 for node in outgoing.missingheads:
188 for node in outgoing.missingheads:
189 ctx = unfi[node]
189 ctx = unfi[node]
190 if ctx.obsolete():
190 if ctx.obsolete():
191 raise util.Abort(mso % ctx)
191 raise util.Abort(mso % ctx)
192 elif ctx.troubled():
192 elif ctx.troubled():
193 raise util.Abort(_(mst)
193 raise util.Abort(_(mst)
194 % (ctx.troubles()[0],
194 % (ctx.troubles()[0],
195 ctx))
195 ctx))
196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
197 discovery.checkheads(unfi, pushop.remote, outgoing,
197 discovery.checkheads(unfi, pushop.remote, outgoing,
198 pushop.remoteheads,
198 pushop.remoteheads,
199 pushop.newbranch,
199 pushop.newbranch,
200 bool(pushop.incoming),
200 bool(pushop.incoming),
201 newbm)
201 newbm)
202 return True
202 return True
203
203
204 def _pushb2ctx(pushop, bundler):
204 def _pushb2ctx(pushop, bundler):
205 """handle changegroup push through bundle2
205 """handle changegroup push through bundle2
206
206
207 addchangegroup result is stored in the ``pushop.ret`` attribute.
207 addchangegroup result is stored in the ``pushop.ret`` attribute.
208 """
208 """
209 if 'changesets' in pushop.stepsdone:
209 if 'changesets' in pushop.stepsdone:
210 return
210 return
211 pushop.stepsdone.add('changesets')
211 pushop.stepsdone.add('changesets')
212 # Send known heads to the server for race detection.
212 # Send known heads to the server for race detection.
213 pushop.stepsdone.add('changesets')
213 pushop.stepsdone.add('changesets')
214 if not _pushcheckoutgoing(pushop):
214 if not _pushcheckoutgoing(pushop):
215 return
215 return
216 pushop.repo.prepushoutgoinghooks(pushop.repo,
216 pushop.repo.prepushoutgoinghooks(pushop.repo,
217 pushop.remote,
217 pushop.remote,
218 pushop.outgoing)
218 pushop.outgoing)
219 if not pushop.force:
219 if not pushop.force:
220 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
220 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
221 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
221 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
222 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
222 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
223 def handlereply(op):
223 def handlereply(op):
224 """extract addchangroup returns from server reply"""
224 """extract addchangroup returns from server reply"""
225 cgreplies = op.records.getreplies(cgpart.id)
225 cgreplies = op.records.getreplies(cgpart.id)
226 assert len(cgreplies['changegroup']) == 1
226 assert len(cgreplies['changegroup']) == 1
227 pushop.ret = cgreplies['changegroup'][0]['return']
227 pushop.ret = cgreplies['changegroup'][0]['return']
228 return handlereply
228 return handlereply
229
229
230 # list of function that may decide to add parts to an outgoing bundle2
230 # list of function that may decide to add parts to an outgoing bundle2
231 bundle2partsgenerators = [_pushb2ctx]
231 bundle2partsgenerators = [_pushb2ctx]
232
232
233 def _pushbundle2(pushop):
233 def _pushbundle2(pushop):
234 """push data to the remote using bundle2
234 """push data to the remote using bundle2
235
235
236 The only currently supported type of data is changegroup but this will
236 The only currently supported type of data is changegroup but this will
237 evolve in the future."""
237 evolve in the future."""
238 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
238 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
239 # create reply capability
239 # create reply capability
240 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
240 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
241 bundler.newpart('b2x:replycaps', data=capsblob)
241 bundler.newpart('b2x:replycaps', data=capsblob)
242 extrainfo = _pushbundle2extraparts(pushop, bundler)
243 replyhandlers = []
242 replyhandlers = []
244 for partgen in bundle2partsgenerators:
243 for partgen in bundle2partsgenerators:
245 ret = partgen(pushop, bundler)
244 ret = partgen(pushop, bundler)
246 replyhandlers.append(ret)
245 replyhandlers.append(ret)
247 # do not push if nothing to push
246 # do not push if nothing to push
248 if bundler.nbparts <= 1:
247 if bundler.nbparts <= 1:
249 return
248 return
250 stream = util.chunkbuffer(bundler.getchunks())
249 stream = util.chunkbuffer(bundler.getchunks())
251 try:
250 try:
252 reply = pushop.remote.unbundle(stream, ['force'], 'push')
251 reply = pushop.remote.unbundle(stream, ['force'], 'push')
253 except error.BundleValueError, exc:
252 except error.BundleValueError, exc:
254 raise util.Abort('missing support for %s' % exc)
253 raise util.Abort('missing support for %s' % exc)
255 try:
254 try:
256 op = bundle2.processbundle(pushop.repo, reply)
255 op = bundle2.processbundle(pushop.repo, reply)
257 except error.BundleValueError, exc:
256 except error.BundleValueError, exc:
258 raise util.Abort('missing support for %s' % exc)
257 raise util.Abort('missing support for %s' % exc)
259 for rephand in replyhandlers:
258 for rephand in replyhandlers:
260 rephand(op)
259 rephand(op)
261 _pushbundle2extrareply(pushop, op, extrainfo)
262
263 def _pushbundle2extraparts(pushop, bundler):
264 """hook function to let extensions add parts
265
266 Return a dict to let extensions pass data to the reply processing.
267 """
268 return {}
269
270 def _pushbundle2extrareply(pushop, op, extrainfo):
271 """hook function to let extensions react to part replies
272
273 The dict from _pushbundle2extrareply is fed to this function.
274 """
275 pass
276
260
277 def _pushchangeset(pushop):
261 def _pushchangeset(pushop):
278 """Make the actual push of changeset bundle to remote repo"""
262 """Make the actual push of changeset bundle to remote repo"""
279 if 'changesets' in pushop.stepsdone:
263 if 'changesets' in pushop.stepsdone:
280 return
264 return
281 pushop.stepsdone.add('changesets')
265 pushop.stepsdone.add('changesets')
282 if not _pushcheckoutgoing(pushop):
266 if not _pushcheckoutgoing(pushop):
283 return
267 return
284 pushop.repo.prepushoutgoinghooks(pushop.repo,
268 pushop.repo.prepushoutgoinghooks(pushop.repo,
285 pushop.remote,
269 pushop.remote,
286 pushop.outgoing)
270 pushop.outgoing)
287 outgoing = pushop.outgoing
271 outgoing = pushop.outgoing
288 unbundle = pushop.remote.capable('unbundle')
272 unbundle = pushop.remote.capable('unbundle')
289 # TODO: get bundlecaps from remote
273 # TODO: get bundlecaps from remote
290 bundlecaps = None
274 bundlecaps = None
291 # create a changegroup from local
275 # create a changegroup from local
292 if pushop.revs is None and not (outgoing.excluded
276 if pushop.revs is None and not (outgoing.excluded
293 or pushop.repo.changelog.filteredrevs):
277 or pushop.repo.changelog.filteredrevs):
294 # push everything,
278 # push everything,
295 # use the fast path, no race possible on push
279 # use the fast path, no race possible on push
296 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
280 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
297 cg = changegroup.getsubset(pushop.repo,
281 cg = changegroup.getsubset(pushop.repo,
298 outgoing,
282 outgoing,
299 bundler,
283 bundler,
300 'push',
284 'push',
301 fastpath=True)
285 fastpath=True)
302 else:
286 else:
303 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
287 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
304 bundlecaps)
288 bundlecaps)
305
289
306 # apply changegroup to remote
290 # apply changegroup to remote
307 if unbundle:
291 if unbundle:
308 # local repo finds heads on server, finds out what
292 # local repo finds heads on server, finds out what
309 # revs it must push. once revs transferred, if server
293 # revs it must push. once revs transferred, if server
310 # finds it has different heads (someone else won
294 # finds it has different heads (someone else won
311 # commit/push race), server aborts.
295 # commit/push race), server aborts.
312 if pushop.force:
296 if pushop.force:
313 remoteheads = ['force']
297 remoteheads = ['force']
314 else:
298 else:
315 remoteheads = pushop.remoteheads
299 remoteheads = pushop.remoteheads
316 # ssh: return remote's addchangegroup()
300 # ssh: return remote's addchangegroup()
317 # http: return remote's addchangegroup() or 0 for error
301 # http: return remote's addchangegroup() or 0 for error
318 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
302 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
319 pushop.repo.url())
303 pushop.repo.url())
320 else:
304 else:
321 # we return an integer indicating remote head count
305 # we return an integer indicating remote head count
322 # change
306 # change
323 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
307 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
324
308
325 def _pushcomputecommonheads(pushop):
309 def _pushcomputecommonheads(pushop):
326 unfi = pushop.repo.unfiltered()
310 unfi = pushop.repo.unfiltered()
327 if pushop.ret:
311 if pushop.ret:
328 # push succeed, synchronize target of the push
312 # push succeed, synchronize target of the push
329 cheads = pushop.outgoing.missingheads
313 cheads = pushop.outgoing.missingheads
330 elif pushop.revs is None:
314 elif pushop.revs is None:
331 # All out push fails. synchronize all common
315 # All out push fails. synchronize all common
332 cheads = pushop.outgoing.commonheads
316 cheads = pushop.outgoing.commonheads
333 else:
317 else:
334 # I want cheads = heads(::missingheads and ::commonheads)
318 # I want cheads = heads(::missingheads and ::commonheads)
335 # (missingheads is revs with secret changeset filtered out)
319 # (missingheads is revs with secret changeset filtered out)
336 #
320 #
337 # This can be expressed as:
321 # This can be expressed as:
338 # cheads = ( (missingheads and ::commonheads)
322 # cheads = ( (missingheads and ::commonheads)
339 # + (commonheads and ::missingheads))"
323 # + (commonheads and ::missingheads))"
340 # )
324 # )
341 #
325 #
342 # while trying to push we already computed the following:
326 # while trying to push we already computed the following:
343 # common = (::commonheads)
327 # common = (::commonheads)
344 # missing = ((commonheads::missingheads) - commonheads)
328 # missing = ((commonheads::missingheads) - commonheads)
345 #
329 #
346 # We can pick:
330 # We can pick:
347 # * missingheads part of common (::commonheads)
331 # * missingheads part of common (::commonheads)
348 common = set(pushop.outgoing.common)
332 common = set(pushop.outgoing.common)
349 nm = pushop.repo.changelog.nodemap
333 nm = pushop.repo.changelog.nodemap
350 cheads = [node for node in pushop.revs if nm[node] in common]
334 cheads = [node for node in pushop.revs if nm[node] in common]
351 # and
335 # and
352 # * commonheads parents on missing
336 # * commonheads parents on missing
353 revset = unfi.set('%ln and parents(roots(%ln))',
337 revset = unfi.set('%ln and parents(roots(%ln))',
354 pushop.outgoing.commonheads,
338 pushop.outgoing.commonheads,
355 pushop.outgoing.missing)
339 pushop.outgoing.missing)
356 cheads.extend(c.node() for c in revset)
340 cheads.extend(c.node() for c in revset)
357 pushop.commonheads = cheads
341 pushop.commonheads = cheads
358
342
359 def _pushsyncphase(pushop):
343 def _pushsyncphase(pushop):
360 """synchronise phase information locally and remotely"""
344 """synchronise phase information locally and remotely"""
361 unfi = pushop.repo.unfiltered()
345 unfi = pushop.repo.unfiltered()
362 cheads = pushop.commonheads
346 cheads = pushop.commonheads
363 # even when we don't push, exchanging phase data is useful
347 # even when we don't push, exchanging phase data is useful
364 remotephases = pushop.remote.listkeys('phases')
348 remotephases = pushop.remote.listkeys('phases')
365 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
349 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
366 and remotephases # server supports phases
350 and remotephases # server supports phases
367 and pushop.ret is None # nothing was pushed
351 and pushop.ret is None # nothing was pushed
368 and remotephases.get('publishing', False)):
352 and remotephases.get('publishing', False)):
369 # When:
353 # When:
370 # - this is a subrepo push
354 # - this is a subrepo push
371 # - and remote support phase
355 # - and remote support phase
372 # - and no changeset was pushed
356 # - and no changeset was pushed
373 # - and remote is publishing
357 # - and remote is publishing
374 # We may be in issue 3871 case!
358 # We may be in issue 3871 case!
375 # We drop the possible phase synchronisation done by
359 # We drop the possible phase synchronisation done by
376 # courtesy to publish changesets possibly locally draft
360 # courtesy to publish changesets possibly locally draft
377 # on the remote.
361 # on the remote.
378 remotephases = {'publishing': 'True'}
362 remotephases = {'publishing': 'True'}
379 if not remotephases: # old server or public only reply from non-publishing
363 if not remotephases: # old server or public only reply from non-publishing
380 _localphasemove(pushop, cheads)
364 _localphasemove(pushop, cheads)
381 # don't push any phase data as there is nothing to push
365 # don't push any phase data as there is nothing to push
382 else:
366 else:
383 ana = phases.analyzeremotephases(pushop.repo, cheads,
367 ana = phases.analyzeremotephases(pushop.repo, cheads,
384 remotephases)
368 remotephases)
385 pheads, droots = ana
369 pheads, droots = ana
386 ### Apply remote phase on local
370 ### Apply remote phase on local
387 if remotephases.get('publishing', False):
371 if remotephases.get('publishing', False):
388 _localphasemove(pushop, cheads)
372 _localphasemove(pushop, cheads)
389 else: # publish = False
373 else: # publish = False
390 _localphasemove(pushop, pheads)
374 _localphasemove(pushop, pheads)
391 _localphasemove(pushop, cheads, phases.draft)
375 _localphasemove(pushop, cheads, phases.draft)
392 ### Apply local phase on remote
376 ### Apply local phase on remote
393
377
394 # Get the list of all revs draft on remote by public here.
378 # Get the list of all revs draft on remote by public here.
395 # XXX Beware that revset break if droots is not strictly
379 # XXX Beware that revset break if droots is not strictly
396 # XXX root we may want to ensure it is but it is costly
380 # XXX root we may want to ensure it is but it is costly
397 outdated = unfi.set('heads((%ln::%ln) and public())',
381 outdated = unfi.set('heads((%ln::%ln) and public())',
398 droots, cheads)
382 droots, cheads)
399
383
400 b2caps = bundle2.bundle2caps(pushop.remote)
384 b2caps = bundle2.bundle2caps(pushop.remote)
401 if 'b2x:pushkey' in b2caps:
385 if 'b2x:pushkey' in b2caps:
402 # server supports bundle2, let's do a batched push through it
386 # server supports bundle2, let's do a batched push through it
403 #
387 #
404 # This will eventually be unified with the changesets bundle2 push
388 # This will eventually be unified with the changesets bundle2 push
405 bundler = bundle2.bundle20(pushop.ui, b2caps)
389 bundler = bundle2.bundle20(pushop.ui, b2caps)
406 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
390 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
407 bundler.newpart('b2x:replycaps', data=capsblob)
391 bundler.newpart('b2x:replycaps', data=capsblob)
408 part2node = []
392 part2node = []
409 enc = pushkey.encode
393 enc = pushkey.encode
410 for newremotehead in outdated:
394 for newremotehead in outdated:
411 part = bundler.newpart('b2x:pushkey')
395 part = bundler.newpart('b2x:pushkey')
412 part.addparam('namespace', enc('phases'))
396 part.addparam('namespace', enc('phases'))
413 part.addparam('key', enc(newremotehead.hex()))
397 part.addparam('key', enc(newremotehead.hex()))
414 part.addparam('old', enc(str(phases.draft)))
398 part.addparam('old', enc(str(phases.draft)))
415 part.addparam('new', enc(str(phases.public)))
399 part.addparam('new', enc(str(phases.public)))
416 part2node.append((part.id, newremotehead))
400 part2node.append((part.id, newremotehead))
417 stream = util.chunkbuffer(bundler.getchunks())
401 stream = util.chunkbuffer(bundler.getchunks())
418 try:
402 try:
419 reply = pushop.remote.unbundle(stream, ['force'], 'push')
403 reply = pushop.remote.unbundle(stream, ['force'], 'push')
420 op = bundle2.processbundle(pushop.repo, reply)
404 op = bundle2.processbundle(pushop.repo, reply)
421 except error.BundleValueError, exc:
405 except error.BundleValueError, exc:
422 raise util.Abort('missing support for %s' % exc)
406 raise util.Abort('missing support for %s' % exc)
423 for partid, node in part2node:
407 for partid, node in part2node:
424 partrep = op.records.getreplies(partid)
408 partrep = op.records.getreplies(partid)
425 results = partrep['pushkey']
409 results = partrep['pushkey']
426 assert len(results) <= 1
410 assert len(results) <= 1
427 msg = None
411 msg = None
428 if not results:
412 if not results:
429 msg = _('server ignored update of %s to public!\n') % node
413 msg = _('server ignored update of %s to public!\n') % node
430 elif not int(results[0]['return']):
414 elif not int(results[0]['return']):
431 msg = _('updating %s to public failed!\n') % node
415 msg = _('updating %s to public failed!\n') % node
432 if msg is not None:
416 if msg is not None:
433 pushop.ui.warn(msg)
417 pushop.ui.warn(msg)
434
418
435 else:
419 else:
436 # fallback to independant pushkey command
420 # fallback to independant pushkey command
437 for newremotehead in outdated:
421 for newremotehead in outdated:
438 r = pushop.remote.pushkey('phases',
422 r = pushop.remote.pushkey('phases',
439 newremotehead.hex(),
423 newremotehead.hex(),
440 str(phases.draft),
424 str(phases.draft),
441 str(phases.public))
425 str(phases.public))
442 if not r:
426 if not r:
443 pushop.ui.warn(_('updating %s to public failed!\n')
427 pushop.ui.warn(_('updating %s to public failed!\n')
444 % newremotehead)
428 % newremotehead)
445
429
446 def _localphasemove(pushop, nodes, phase=phases.public):
430 def _localphasemove(pushop, nodes, phase=phases.public):
447 """move <nodes> to <phase> in the local source repo"""
431 """move <nodes> to <phase> in the local source repo"""
448 if pushop.locallocked:
432 if pushop.locallocked:
449 phases.advanceboundary(pushop.repo, phase, nodes)
433 phases.advanceboundary(pushop.repo, phase, nodes)
450 else:
434 else:
451 # repo is not locked, do not change any phases!
435 # repo is not locked, do not change any phases!
452 # Informs the user that phases should have been moved when
436 # Informs the user that phases should have been moved when
453 # applicable.
437 # applicable.
454 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
438 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
455 phasestr = phases.phasenames[phase]
439 phasestr = phases.phasenames[phase]
456 if actualmoves:
440 if actualmoves:
457 pushop.ui.status(_('cannot lock source repo, skipping '
441 pushop.ui.status(_('cannot lock source repo, skipping '
458 'local %s phase update\n') % phasestr)
442 'local %s phase update\n') % phasestr)
459
443
460 def _pushobsolete(pushop):
444 def _pushobsolete(pushop):
461 """utility function to push obsolete markers to a remote"""
445 """utility function to push obsolete markers to a remote"""
462 pushop.ui.debug('try to push obsolete markers to remote\n')
446 pushop.ui.debug('try to push obsolete markers to remote\n')
463 repo = pushop.repo
447 repo = pushop.repo
464 remote = pushop.remote
448 remote = pushop.remote
465 if (obsolete._enabled and repo.obsstore and
449 if (obsolete._enabled and repo.obsstore and
466 'obsolete' in remote.listkeys('namespaces')):
450 'obsolete' in remote.listkeys('namespaces')):
467 rslts = []
451 rslts = []
468 remotedata = repo.listkeys('obsolete')
452 remotedata = repo.listkeys('obsolete')
469 for key in sorted(remotedata, reverse=True):
453 for key in sorted(remotedata, reverse=True):
470 # reverse sort to ensure we end with dump0
454 # reverse sort to ensure we end with dump0
471 data = remotedata[key]
455 data = remotedata[key]
472 rslts.append(remote.pushkey('obsolete', key, '', data))
456 rslts.append(remote.pushkey('obsolete', key, '', data))
473 if [r for r in rslts if not r]:
457 if [r for r in rslts if not r]:
474 msg = _('failed to push some obsolete markers!\n')
458 msg = _('failed to push some obsolete markers!\n')
475 repo.ui.warn(msg)
459 repo.ui.warn(msg)
476
460
477 def _pushbookmark(pushop):
461 def _pushbookmark(pushop):
478 """Update bookmark position on remote"""
462 """Update bookmark position on remote"""
479 ui = pushop.ui
463 ui = pushop.ui
480 repo = pushop.repo.unfiltered()
464 repo = pushop.repo.unfiltered()
481 remote = pushop.remote
465 remote = pushop.remote
482 ui.debug("checking for updated bookmarks\n")
466 ui.debug("checking for updated bookmarks\n")
483 revnums = map(repo.changelog.rev, pushop.revs or [])
467 revnums = map(repo.changelog.rev, pushop.revs or [])
484 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
468 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
485 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
469 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
486 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
470 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
487 srchex=hex)
471 srchex=hex)
488
472
489 for b, scid, dcid in advsrc:
473 for b, scid, dcid in advsrc:
490 if ancestors and repo[scid].rev() not in ancestors:
474 if ancestors and repo[scid].rev() not in ancestors:
491 continue
475 continue
492 if remote.pushkey('bookmarks', b, dcid, scid):
476 if remote.pushkey('bookmarks', b, dcid, scid):
493 ui.status(_("updating bookmark %s\n") % b)
477 ui.status(_("updating bookmark %s\n") % b)
494 else:
478 else:
495 ui.warn(_('updating bookmark %s failed!\n') % b)
479 ui.warn(_('updating bookmark %s failed!\n') % b)
496
480
497 class pulloperation(object):
481 class pulloperation(object):
498 """A object that represent a single pull operation
482 """A object that represent a single pull operation
499
483
500 It purpose is to carry push related state and very common operation.
484 It purpose is to carry push related state and very common operation.
501
485
502 A new should be created at the beginning of each pull and discarded
486 A new should be created at the beginning of each pull and discarded
503 afterward.
487 afterward.
504 """
488 """
505
489
506 def __init__(self, repo, remote, heads=None, force=False):
490 def __init__(self, repo, remote, heads=None, force=False):
507 # repo we pull into
491 # repo we pull into
508 self.repo = repo
492 self.repo = repo
509 # repo we pull from
493 # repo we pull from
510 self.remote = remote
494 self.remote = remote
511 # revision we try to pull (None is "all")
495 # revision we try to pull (None is "all")
512 self.heads = heads
496 self.heads = heads
513 # do we force pull?
497 # do we force pull?
514 self.force = force
498 self.force = force
515 # the name the pull transaction
499 # the name the pull transaction
516 self._trname = 'pull\n' + util.hidepassword(remote.url())
500 self._trname = 'pull\n' + util.hidepassword(remote.url())
517 # hold the transaction once created
501 # hold the transaction once created
518 self._tr = None
502 self._tr = None
519 # set of common changeset between local and remote before pull
503 # set of common changeset between local and remote before pull
520 self.common = None
504 self.common = None
521 # set of pulled head
505 # set of pulled head
522 self.rheads = None
506 self.rheads = None
523 # list of missing changeset to fetch remotely
507 # list of missing changeset to fetch remotely
524 self.fetch = None
508 self.fetch = None
525 # result of changegroup pulling (used as return code by pull)
509 # result of changegroup pulling (used as return code by pull)
526 self.cgresult = None
510 self.cgresult = None
527 # list of step remaining todo (related to future bundle2 usage)
511 # list of step remaining todo (related to future bundle2 usage)
528 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
512 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
529
513
530 @util.propertycache
514 @util.propertycache
531 def pulledsubset(self):
515 def pulledsubset(self):
532 """heads of the set of changeset target by the pull"""
516 """heads of the set of changeset target by the pull"""
533 # compute target subset
517 # compute target subset
534 if self.heads is None:
518 if self.heads is None:
535 # We pulled every thing possible
519 # We pulled every thing possible
536 # sync on everything common
520 # sync on everything common
537 c = set(self.common)
521 c = set(self.common)
538 ret = list(self.common)
522 ret = list(self.common)
539 for n in self.rheads:
523 for n in self.rheads:
540 if n not in c:
524 if n not in c:
541 ret.append(n)
525 ret.append(n)
542 return ret
526 return ret
543 else:
527 else:
544 # We pulled a specific subset
528 # We pulled a specific subset
545 # sync on this subset
529 # sync on this subset
546 return self.heads
530 return self.heads
547
531
548 def gettransaction(self):
532 def gettransaction(self):
549 """get appropriate pull transaction, creating it if needed"""
533 """get appropriate pull transaction, creating it if needed"""
550 if self._tr is None:
534 if self._tr is None:
551 self._tr = self.repo.transaction(self._trname)
535 self._tr = self.repo.transaction(self._trname)
552 return self._tr
536 return self._tr
553
537
554 def closetransaction(self):
538 def closetransaction(self):
555 """close transaction if created"""
539 """close transaction if created"""
556 if self._tr is not None:
540 if self._tr is not None:
557 self._tr.close()
541 self._tr.close()
558
542
559 def releasetransaction(self):
543 def releasetransaction(self):
560 """release transaction if created"""
544 """release transaction if created"""
561 if self._tr is not None:
545 if self._tr is not None:
562 self._tr.release()
546 self._tr.release()
563
547
564 def pull(repo, remote, heads=None, force=False):
548 def pull(repo, remote, heads=None, force=False):
565 pullop = pulloperation(repo, remote, heads, force)
549 pullop = pulloperation(repo, remote, heads, force)
566 if pullop.remote.local():
550 if pullop.remote.local():
567 missing = set(pullop.remote.requirements) - pullop.repo.supported
551 missing = set(pullop.remote.requirements) - pullop.repo.supported
568 if missing:
552 if missing:
569 msg = _("required features are not"
553 msg = _("required features are not"
570 " supported in the destination:"
554 " supported in the destination:"
571 " %s") % (', '.join(sorted(missing)))
555 " %s") % (', '.join(sorted(missing)))
572 raise util.Abort(msg)
556 raise util.Abort(msg)
573
557
574 lock = pullop.repo.lock()
558 lock = pullop.repo.lock()
575 try:
559 try:
576 _pulldiscovery(pullop)
560 _pulldiscovery(pullop)
577 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
561 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
578 and pullop.remote.capable('bundle2-exp')):
562 and pullop.remote.capable('bundle2-exp')):
579 _pullbundle2(pullop)
563 _pullbundle2(pullop)
580 if 'changegroup' in pullop.todosteps:
564 if 'changegroup' in pullop.todosteps:
581 _pullchangeset(pullop)
565 _pullchangeset(pullop)
582 if 'phases' in pullop.todosteps:
566 if 'phases' in pullop.todosteps:
583 _pullphase(pullop)
567 _pullphase(pullop)
584 if 'obsmarkers' in pullop.todosteps:
568 if 'obsmarkers' in pullop.todosteps:
585 _pullobsolete(pullop)
569 _pullobsolete(pullop)
586 pullop.closetransaction()
570 pullop.closetransaction()
587 finally:
571 finally:
588 pullop.releasetransaction()
572 pullop.releasetransaction()
589 lock.release()
573 lock.release()
590
574
591 return pullop.cgresult
575 return pullop.cgresult
592
576
593 def _pulldiscovery(pullop):
577 def _pulldiscovery(pullop):
594 """discovery phase for the pull
578 """discovery phase for the pull
595
579
596 Current handle changeset discovery only, will change handle all discovery
580 Current handle changeset discovery only, will change handle all discovery
597 at some point."""
581 at some point."""
598 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
582 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
599 pullop.remote,
583 pullop.remote,
600 heads=pullop.heads,
584 heads=pullop.heads,
601 force=pullop.force)
585 force=pullop.force)
602 pullop.common, pullop.fetch, pullop.rheads = tmp
586 pullop.common, pullop.fetch, pullop.rheads = tmp
603
587
604 def _pullbundle2(pullop):
588 def _pullbundle2(pullop):
605 """pull data using bundle2
589 """pull data using bundle2
606
590
607 For now, the only supported data are changegroup."""
591 For now, the only supported data are changegroup."""
608 remotecaps = bundle2.bundle2caps(pullop.remote)
592 remotecaps = bundle2.bundle2caps(pullop.remote)
609 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
593 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
610 # pulling changegroup
594 # pulling changegroup
611 pullop.todosteps.remove('changegroup')
595 pullop.todosteps.remove('changegroup')
612
596
613 kwargs['common'] = pullop.common
597 kwargs['common'] = pullop.common
614 kwargs['heads'] = pullop.heads or pullop.rheads
598 kwargs['heads'] = pullop.heads or pullop.rheads
615 if 'b2x:listkeys' in remotecaps:
599 if 'b2x:listkeys' in remotecaps:
616 kwargs['listkeys'] = ['phase']
600 kwargs['listkeys'] = ['phase']
617 if not pullop.fetch:
601 if not pullop.fetch:
618 pullop.repo.ui.status(_("no changes found\n"))
602 pullop.repo.ui.status(_("no changes found\n"))
619 pullop.cgresult = 0
603 pullop.cgresult = 0
620 else:
604 else:
621 if pullop.heads is None and list(pullop.common) == [nullid]:
605 if pullop.heads is None and list(pullop.common) == [nullid]:
622 pullop.repo.ui.status(_("requesting all changes\n"))
606 pullop.repo.ui.status(_("requesting all changes\n"))
623 _pullbundle2extraprepare(pullop, kwargs)
607 _pullbundle2extraprepare(pullop, kwargs)
624 if kwargs.keys() == ['format']:
608 if kwargs.keys() == ['format']:
625 return # nothing to pull
609 return # nothing to pull
626 bundle = pullop.remote.getbundle('pull', **kwargs)
610 bundle = pullop.remote.getbundle('pull', **kwargs)
627 try:
611 try:
628 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
612 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
629 except error.BundleValueError, exc:
613 except error.BundleValueError, exc:
630 raise util.Abort('missing support for %s' % exc)
614 raise util.Abort('missing support for %s' % exc)
631
615
632 if pullop.fetch:
616 if pullop.fetch:
633 assert len(op.records['changegroup']) == 1
617 assert len(op.records['changegroup']) == 1
634 pullop.cgresult = op.records['changegroup'][0]['return']
618 pullop.cgresult = op.records['changegroup'][0]['return']
635
619
636 # processing phases change
620 # processing phases change
637 for namespace, value in op.records['listkeys']:
621 for namespace, value in op.records['listkeys']:
638 if namespace == 'phases':
622 if namespace == 'phases':
639 _pullapplyphases(pullop, value)
623 _pullapplyphases(pullop, value)
640
624
641 def _pullbundle2extraprepare(pullop, kwargs):
625 def _pullbundle2extraprepare(pullop, kwargs):
642 """hook function so that extensions can extend the getbundle call"""
626 """hook function so that extensions can extend the getbundle call"""
643 pass
627 pass
644
628
645 def _pullchangeset(pullop):
629 def _pullchangeset(pullop):
646 """pull changeset from unbundle into the local repo"""
630 """pull changeset from unbundle into the local repo"""
647 # We delay the open of the transaction as late as possible so we
631 # We delay the open of the transaction as late as possible so we
648 # don't open transaction for nothing or you break future useful
632 # don't open transaction for nothing or you break future useful
649 # rollback call
633 # rollback call
650 pullop.todosteps.remove('changegroup')
634 pullop.todosteps.remove('changegroup')
651 if not pullop.fetch:
635 if not pullop.fetch:
652 pullop.repo.ui.status(_("no changes found\n"))
636 pullop.repo.ui.status(_("no changes found\n"))
653 pullop.cgresult = 0
637 pullop.cgresult = 0
654 return
638 return
655 pullop.gettransaction()
639 pullop.gettransaction()
656 if pullop.heads is None and list(pullop.common) == [nullid]:
640 if pullop.heads is None and list(pullop.common) == [nullid]:
657 pullop.repo.ui.status(_("requesting all changes\n"))
641 pullop.repo.ui.status(_("requesting all changes\n"))
658 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
642 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
659 # issue1320, avoid a race if remote changed after discovery
643 # issue1320, avoid a race if remote changed after discovery
660 pullop.heads = pullop.rheads
644 pullop.heads = pullop.rheads
661
645
662 if pullop.remote.capable('getbundle'):
646 if pullop.remote.capable('getbundle'):
663 # TODO: get bundlecaps from remote
647 # TODO: get bundlecaps from remote
664 cg = pullop.remote.getbundle('pull', common=pullop.common,
648 cg = pullop.remote.getbundle('pull', common=pullop.common,
665 heads=pullop.heads or pullop.rheads)
649 heads=pullop.heads or pullop.rheads)
666 elif pullop.heads is None:
650 elif pullop.heads is None:
667 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
651 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
668 elif not pullop.remote.capable('changegroupsubset'):
652 elif not pullop.remote.capable('changegroupsubset'):
669 raise util.Abort(_("partial pull cannot be done because "
653 raise util.Abort(_("partial pull cannot be done because "
670 "other repository doesn't support "
654 "other repository doesn't support "
671 "changegroupsubset."))
655 "changegroupsubset."))
672 else:
656 else:
673 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
657 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
674 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
658 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
675 pullop.remote.url())
659 pullop.remote.url())
676
660
677 def _pullphase(pullop):
661 def _pullphase(pullop):
678 # Get remote phases data from remote
662 # Get remote phases data from remote
679 remotephases = pullop.remote.listkeys('phases')
663 remotephases = pullop.remote.listkeys('phases')
680 _pullapplyphases(pullop, remotephases)
664 _pullapplyphases(pullop, remotephases)
681
665
682 def _pullapplyphases(pullop, remotephases):
666 def _pullapplyphases(pullop, remotephases):
683 """apply phase movement from observed remote state"""
667 """apply phase movement from observed remote state"""
684 pullop.todosteps.remove('phases')
668 pullop.todosteps.remove('phases')
685 publishing = bool(remotephases.get('publishing', False))
669 publishing = bool(remotephases.get('publishing', False))
686 if remotephases and not publishing:
670 if remotephases and not publishing:
687 # remote is new and unpublishing
671 # remote is new and unpublishing
688 pheads, _dr = phases.analyzeremotephases(pullop.repo,
672 pheads, _dr = phases.analyzeremotephases(pullop.repo,
689 pullop.pulledsubset,
673 pullop.pulledsubset,
690 remotephases)
674 remotephases)
691 phases.advanceboundary(pullop.repo, phases.public, pheads)
675 phases.advanceboundary(pullop.repo, phases.public, pheads)
692 phases.advanceboundary(pullop.repo, phases.draft,
676 phases.advanceboundary(pullop.repo, phases.draft,
693 pullop.pulledsubset)
677 pullop.pulledsubset)
694 else:
678 else:
695 # Remote is old or publishing all common changesets
679 # Remote is old or publishing all common changesets
696 # should be seen as public
680 # should be seen as public
697 phases.advanceboundary(pullop.repo, phases.public,
681 phases.advanceboundary(pullop.repo, phases.public,
698 pullop.pulledsubset)
682 pullop.pulledsubset)
699
683
700 def _pullobsolete(pullop):
684 def _pullobsolete(pullop):
701 """utility function to pull obsolete markers from a remote
685 """utility function to pull obsolete markers from a remote
702
686
703 The `gettransaction` is function that return the pull transaction, creating
687 The `gettransaction` is function that return the pull transaction, creating
704 one if necessary. We return the transaction to inform the calling code that
688 one if necessary. We return the transaction to inform the calling code that
705 a new transaction have been created (when applicable).
689 a new transaction have been created (when applicable).
706
690
707 Exists mostly to allow overriding for experimentation purpose"""
691 Exists mostly to allow overriding for experimentation purpose"""
708 pullop.todosteps.remove('obsmarkers')
692 pullop.todosteps.remove('obsmarkers')
709 tr = None
693 tr = None
710 if obsolete._enabled:
694 if obsolete._enabled:
711 pullop.repo.ui.debug('fetching remote obsolete markers\n')
695 pullop.repo.ui.debug('fetching remote obsolete markers\n')
712 remoteobs = pullop.remote.listkeys('obsolete')
696 remoteobs = pullop.remote.listkeys('obsolete')
713 if 'dump0' in remoteobs:
697 if 'dump0' in remoteobs:
714 tr = pullop.gettransaction()
698 tr = pullop.gettransaction()
715 for key in sorted(remoteobs, reverse=True):
699 for key in sorted(remoteobs, reverse=True):
716 if key.startswith('dump'):
700 if key.startswith('dump'):
717 data = base85.b85decode(remoteobs[key])
701 data = base85.b85decode(remoteobs[key])
718 pullop.repo.obsstore.mergemarkers(tr, data)
702 pullop.repo.obsstore.mergemarkers(tr, data)
719 pullop.repo.invalidatevolatilesets()
703 pullop.repo.invalidatevolatilesets()
720 return tr
704 return tr
721
705
722 def caps20to10(repo):
706 def caps20to10(repo):
723 """return a set with appropriate options to use bundle20 during getbundle"""
707 """return a set with appropriate options to use bundle20 during getbundle"""
724 caps = set(['HG2X'])
708 caps = set(['HG2X'])
725 capsblob = bundle2.encodecaps(repo.bundle2caps)
709 capsblob = bundle2.encodecaps(repo.bundle2caps)
726 caps.add('bundle2=' + urllib.quote(capsblob))
710 caps.add('bundle2=' + urllib.quote(capsblob))
727 return caps
711 return caps
728
712
729 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
713 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
730 **kwargs):
714 **kwargs):
731 """return a full bundle (with potentially multiple kind of parts)
715 """return a full bundle (with potentially multiple kind of parts)
732
716
733 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
717 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
734 passed. For now, the bundle can contain only changegroup, but this will
718 passed. For now, the bundle can contain only changegroup, but this will
735 changes when more part type will be available for bundle2.
719 changes when more part type will be available for bundle2.
736
720
737 This is different from changegroup.getbundle that only returns an HG10
721 This is different from changegroup.getbundle that only returns an HG10
738 changegroup bundle. They may eventually get reunited in the future when we
722 changegroup bundle. They may eventually get reunited in the future when we
739 have a clearer idea of the API we what to query different data.
723 have a clearer idea of the API we what to query different data.
740
724
741 The implementation is at a very early stage and will get massive rework
725 The implementation is at a very early stage and will get massive rework
742 when the API of bundle is refined.
726 when the API of bundle is refined.
743 """
727 """
744 # build changegroup bundle here.
728 # build changegroup bundle here.
745 cg = changegroup.getbundle(repo, source, heads=heads,
729 cg = changegroup.getbundle(repo, source, heads=heads,
746 common=common, bundlecaps=bundlecaps)
730 common=common, bundlecaps=bundlecaps)
747 if bundlecaps is None or 'HG2X' not in bundlecaps:
731 if bundlecaps is None or 'HG2X' not in bundlecaps:
748 if kwargs:
732 if kwargs:
749 raise ValueError(_('unsupported getbundle arguments: %s')
733 raise ValueError(_('unsupported getbundle arguments: %s')
750 % ', '.join(sorted(kwargs.keys())))
734 % ', '.join(sorted(kwargs.keys())))
751 return cg
735 return cg
752 # very crude first implementation,
736 # very crude first implementation,
753 # the bundle API will change and the generation will be done lazily.
737 # the bundle API will change and the generation will be done lazily.
754 b2caps = {}
738 b2caps = {}
755 for bcaps in bundlecaps:
739 for bcaps in bundlecaps:
756 if bcaps.startswith('bundle2='):
740 if bcaps.startswith('bundle2='):
757 blob = urllib.unquote(bcaps[len('bundle2='):])
741 blob = urllib.unquote(bcaps[len('bundle2='):])
758 b2caps.update(bundle2.decodecaps(blob))
742 b2caps.update(bundle2.decodecaps(blob))
759 bundler = bundle2.bundle20(repo.ui, b2caps)
743 bundler = bundle2.bundle20(repo.ui, b2caps)
760 if cg:
744 if cg:
761 bundler.newpart('b2x:changegroup', data=cg.getchunks())
745 bundler.newpart('b2x:changegroup', data=cg.getchunks())
762 listkeys = kwargs.get('listkeys', ())
746 listkeys = kwargs.get('listkeys', ())
763 for namespace in listkeys:
747 for namespace in listkeys:
764 part = bundler.newpart('b2x:listkeys')
748 part = bundler.newpart('b2x:listkeys')
765 part.addparam('namespace', namespace)
749 part.addparam('namespace', namespace)
766 keys = repo.listkeys(namespace).items()
750 keys = repo.listkeys(namespace).items()
767 part.data = pushkey.encodekeys(keys)
751 part.data = pushkey.encodekeys(keys)
768 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
752 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
769 bundlecaps=bundlecaps, **kwargs)
753 bundlecaps=bundlecaps, **kwargs)
770 return util.chunkbuffer(bundler.getchunks())
754 return util.chunkbuffer(bundler.getchunks())
771
755
772 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
756 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
773 bundlecaps=None, **kwargs):
757 bundlecaps=None, **kwargs):
774 """hook function to let extensions add parts to the requested bundle"""
758 """hook function to let extensions add parts to the requested bundle"""
775 pass
759 pass
776
760
777 def check_heads(repo, their_heads, context):
761 def check_heads(repo, their_heads, context):
778 """check if the heads of a repo have been modified
762 """check if the heads of a repo have been modified
779
763
780 Used by peer for unbundling.
764 Used by peer for unbundling.
781 """
765 """
782 heads = repo.heads()
766 heads = repo.heads()
783 heads_hash = util.sha1(''.join(sorted(heads))).digest()
767 heads_hash = util.sha1(''.join(sorted(heads))).digest()
784 if not (their_heads == ['force'] or their_heads == heads or
768 if not (their_heads == ['force'] or their_heads == heads or
785 their_heads == ['hashed', heads_hash]):
769 their_heads == ['hashed', heads_hash]):
786 # someone else committed/pushed/unbundled while we
770 # someone else committed/pushed/unbundled while we
787 # were transferring data
771 # were transferring data
788 raise error.PushRaced('repository changed while %s - '
772 raise error.PushRaced('repository changed while %s - '
789 'please try again' % context)
773 'please try again' % context)
790
774
791 def unbundle(repo, cg, heads, source, url):
775 def unbundle(repo, cg, heads, source, url):
792 """Apply a bundle to a repo.
776 """Apply a bundle to a repo.
793
777
794 this function makes sure the repo is locked during the application and have
778 this function makes sure the repo is locked during the application and have
795 mechanism to check that no push race occurred between the creation of the
779 mechanism to check that no push race occurred between the creation of the
796 bundle and its application.
780 bundle and its application.
797
781
798 If the push was raced as PushRaced exception is raised."""
782 If the push was raced as PushRaced exception is raised."""
799 r = 0
783 r = 0
800 # need a transaction when processing a bundle2 stream
784 # need a transaction when processing a bundle2 stream
801 tr = None
785 tr = None
802 lock = repo.lock()
786 lock = repo.lock()
803 try:
787 try:
804 check_heads(repo, heads, 'uploading changes')
788 check_heads(repo, heads, 'uploading changes')
805 # push can proceed
789 # push can proceed
806 if util.safehasattr(cg, 'params'):
790 if util.safehasattr(cg, 'params'):
807 try:
791 try:
808 tr = repo.transaction('unbundle')
792 tr = repo.transaction('unbundle')
809 tr.hookargs['bundle2-exp'] = '1'
793 tr.hookargs['bundle2-exp'] = '1'
810 r = bundle2.processbundle(repo, cg, lambda: tr).reply
794 r = bundle2.processbundle(repo, cg, lambda: tr).reply
811 cl = repo.unfiltered().changelog
795 cl = repo.unfiltered().changelog
812 p = cl.writepending() and repo.root or ""
796 p = cl.writepending() and repo.root or ""
813 repo.hook('b2x-pretransactionclose', throw=True, source=source,
797 repo.hook('b2x-pretransactionclose', throw=True, source=source,
814 url=url, pending=p, **tr.hookargs)
798 url=url, pending=p, **tr.hookargs)
815 tr.close()
799 tr.close()
816 repo.hook('b2x-transactionclose', source=source, url=url,
800 repo.hook('b2x-transactionclose', source=source, url=url,
817 **tr.hookargs)
801 **tr.hookargs)
818 except Exception, exc:
802 except Exception, exc:
819 exc.duringunbundle2 = True
803 exc.duringunbundle2 = True
820 raise
804 raise
821 else:
805 else:
822 r = changegroup.addchangegroup(repo, cg, source, url)
806 r = changegroup.addchangegroup(repo, cg, source, url)
823 finally:
807 finally:
824 if tr is not None:
808 if tr is not None:
825 tr.release()
809 tr.release()
826 lock.release()
810 lock.release()
827 return r
811 return r
General Comments 0
You need to be logged in to leave comments. Login now