##// END OF EJS Templates
bundle2-push: extract changegroup logic in its own function...
Pierre-Yves David -
r21899:52ab44b9 default
parent child Browse files
Show More
@@ -1,792 +1,803 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # Integer version of the push result
64 # Integer version of the push result
65 # - None means nothing to push
65 # - None means nothing to push
66 # - 0 means HTTP error
66 # - 0 means HTTP error
67 # - 1 means we pushed and remote head count is unchanged *or*
67 # - 1 means we pushed and remote head count is unchanged *or*
68 # we have outgoing changesets but refused to push
68 # we have outgoing changesets but refused to push
69 # - other values as described by addchangegroup()
69 # - other values as described by addchangegroup()
70 self.ret = None
70 self.ret = None
71 # discover.outgoing object (contains common and outgoing data)
71 # discover.outgoing object (contains common and outgoing data)
72 self.outgoing = None
72 self.outgoing = None
73 # all remote heads before the push
73 # all remote heads before the push
74 self.remoteheads = None
74 self.remoteheads = None
75 # testable as a boolean indicating if any nodes are missing locally.
75 # testable as a boolean indicating if any nodes are missing locally.
76 self.incoming = None
76 self.incoming = None
77 # set of all heads common after changeset bundle push
77 # set of all heads common after changeset bundle push
78 self.commonheads = None
78 self.commonheads = None
79
79
80 def push(repo, remote, force=False, revs=None, newbranch=False):
80 def push(repo, remote, force=False, revs=None, newbranch=False):
81 '''Push outgoing changesets (limited by revs) from a local
81 '''Push outgoing changesets (limited by revs) from a local
82 repository to remote. Return an integer:
82 repository to remote. Return an integer:
83 - None means nothing to push
83 - None means nothing to push
84 - 0 means HTTP error
84 - 0 means HTTP error
85 - 1 means we pushed and remote head count is unchanged *or*
85 - 1 means we pushed and remote head count is unchanged *or*
86 we have outgoing changesets but refused to push
86 we have outgoing changesets but refused to push
87 - other values as described by addchangegroup()
87 - other values as described by addchangegroup()
88 '''
88 '''
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
90 if pushop.remote.local():
90 if pushop.remote.local():
91 missing = (set(pushop.repo.requirements)
91 missing = (set(pushop.repo.requirements)
92 - pushop.remote.local().supported)
92 - pushop.remote.local().supported)
93 if missing:
93 if missing:
94 msg = _("required features are not"
94 msg = _("required features are not"
95 " supported in the destination:"
95 " supported in the destination:"
96 " %s") % (', '.join(sorted(missing)))
96 " %s") % (', '.join(sorted(missing)))
97 raise util.Abort(msg)
97 raise util.Abort(msg)
98
98
99 # there are two ways to push to remote repo:
99 # there are two ways to push to remote repo:
100 #
100 #
101 # addchangegroup assumes local user can lock remote
101 # addchangegroup assumes local user can lock remote
102 # repo (local filesystem, old ssh servers).
102 # repo (local filesystem, old ssh servers).
103 #
103 #
104 # unbundle assumes local user cannot lock remote repo (new ssh
104 # unbundle assumes local user cannot lock remote repo (new ssh
105 # servers, http servers).
105 # servers, http servers).
106
106
107 if not pushop.remote.canpush():
107 if not pushop.remote.canpush():
108 raise util.Abort(_("destination does not support push"))
108 raise util.Abort(_("destination does not support push"))
109 # get local lock as we might write phase data
109 # get local lock as we might write phase data
110 locallock = None
110 locallock = None
111 try:
111 try:
112 locallock = pushop.repo.lock()
112 locallock = pushop.repo.lock()
113 pushop.locallocked = True
113 pushop.locallocked = True
114 except IOError, err:
114 except IOError, err:
115 pushop.locallocked = False
115 pushop.locallocked = False
116 if err.errno != errno.EACCES:
116 if err.errno != errno.EACCES:
117 raise
117 raise
118 # source repo cannot be locked.
118 # source repo cannot be locked.
119 # We do not abort the push, but just disable the local phase
119 # We do not abort the push, but just disable the local phase
120 # synchronisation.
120 # synchronisation.
121 msg = 'cannot lock source repository: %s\n' % err
121 msg = 'cannot lock source repository: %s\n' % err
122 pushop.ui.debug(msg)
122 pushop.ui.debug(msg)
123 try:
123 try:
124 pushop.repo.checkpush(pushop)
124 pushop.repo.checkpush(pushop)
125 lock = None
125 lock = None
126 unbundle = pushop.remote.capable('unbundle')
126 unbundle = pushop.remote.capable('unbundle')
127 if not unbundle:
127 if not unbundle:
128 lock = pushop.remote.lock()
128 lock = pushop.remote.lock()
129 try:
129 try:
130 _pushdiscovery(pushop)
130 _pushdiscovery(pushop)
131 if _pushcheckoutgoing(pushop):
131 if _pushcheckoutgoing(pushop):
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
133 pushop.remote,
133 pushop.remote,
134 pushop.outgoing)
134 pushop.outgoing)
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
136 False)
136 False)
137 and pushop.remote.capable('bundle2-exp')):
137 and pushop.remote.capable('bundle2-exp')):
138 _pushbundle2(pushop)
138 _pushbundle2(pushop)
139 else:
139 else:
140 _pushchangeset(pushop)
140 _pushchangeset(pushop)
141 _pushcomputecommonheads(pushop)
141 _pushcomputecommonheads(pushop)
142 _pushsyncphase(pushop)
142 _pushsyncphase(pushop)
143 _pushobsolete(pushop)
143 _pushobsolete(pushop)
144 finally:
144 finally:
145 if lock is not None:
145 if lock is not None:
146 lock.release()
146 lock.release()
147 finally:
147 finally:
148 if locallock is not None:
148 if locallock is not None:
149 locallock.release()
149 locallock.release()
150
150
151 _pushbookmark(pushop)
151 _pushbookmark(pushop)
152 return pushop.ret
152 return pushop.ret
153
153
154 def _pushdiscovery(pushop):
154 def _pushdiscovery(pushop):
155 # discovery
155 # discovery
156 unfi = pushop.repo.unfiltered()
156 unfi = pushop.repo.unfiltered()
157 fci = discovery.findcommonincoming
157 fci = discovery.findcommonincoming
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
159 common, inc, remoteheads = commoninc
159 common, inc, remoteheads = commoninc
160 fco = discovery.findcommonoutgoing
160 fco = discovery.findcommonoutgoing
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
162 commoninc=commoninc, force=pushop.force)
162 commoninc=commoninc, force=pushop.force)
163 pushop.outgoing = outgoing
163 pushop.outgoing = outgoing
164 pushop.remoteheads = remoteheads
164 pushop.remoteheads = remoteheads
165 pushop.incoming = inc
165 pushop.incoming = inc
166
166
167 def _pushcheckoutgoing(pushop):
167 def _pushcheckoutgoing(pushop):
168 outgoing = pushop.outgoing
168 outgoing = pushop.outgoing
169 unfi = pushop.repo.unfiltered()
169 unfi = pushop.repo.unfiltered()
170 if not outgoing.missing:
170 if not outgoing.missing:
171 # nothing to push
171 # nothing to push
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
173 return False
173 return False
174 # something to push
174 # something to push
175 if not pushop.force:
175 if not pushop.force:
176 # if repo.obsstore == False --> no obsolete
176 # if repo.obsstore == False --> no obsolete
177 # then, save the iteration
177 # then, save the iteration
178 if unfi.obsstore:
178 if unfi.obsstore:
179 # this message are here for 80 char limit reason
179 # this message are here for 80 char limit reason
180 mso = _("push includes obsolete changeset: %s!")
180 mso = _("push includes obsolete changeset: %s!")
181 mst = "push includes %s changeset: %s!"
181 mst = "push includes %s changeset: %s!"
182 # plain versions for i18n tool to detect them
182 # plain versions for i18n tool to detect them
183 _("push includes unstable changeset: %s!")
183 _("push includes unstable changeset: %s!")
184 _("push includes bumped changeset: %s!")
184 _("push includes bumped changeset: %s!")
185 _("push includes divergent changeset: %s!")
185 _("push includes divergent changeset: %s!")
186 # If we are to push if there is at least one
186 # If we are to push if there is at least one
187 # obsolete or unstable changeset in missing, at
187 # obsolete or unstable changeset in missing, at
188 # least one of the missinghead will be obsolete or
188 # least one of the missinghead will be obsolete or
189 # unstable. So checking heads only is ok
189 # unstable. So checking heads only is ok
190 for node in outgoing.missingheads:
190 for node in outgoing.missingheads:
191 ctx = unfi[node]
191 ctx = unfi[node]
192 if ctx.obsolete():
192 if ctx.obsolete():
193 raise util.Abort(mso % ctx)
193 raise util.Abort(mso % ctx)
194 elif ctx.troubled():
194 elif ctx.troubled():
195 raise util.Abort(_(mst)
195 raise util.Abort(_(mst)
196 % (ctx.troubles()[0],
196 % (ctx.troubles()[0],
197 ctx))
197 ctx))
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
199 discovery.checkheads(unfi, pushop.remote, outgoing,
199 discovery.checkheads(unfi, pushop.remote, outgoing,
200 pushop.remoteheads,
200 pushop.remoteheads,
201 pushop.newbranch,
201 pushop.newbranch,
202 bool(pushop.incoming),
202 bool(pushop.incoming),
203 newbm)
203 newbm)
204 return True
204 return True
205
205
206 def _pushb2ctx(pushop, bundler):
207 """handle changegroup push through bundle2
208
209 addchangegroup result is stored in the ``pushop.ret`` attribute.
210 """
211 # Send known heads to the server for race detection.
212 if not pushop.force:
213 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
214 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
215 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
216 def handlereply(op):
217 """extract addchangroup returns from server reply"""
218 cgreplies = op.records.getreplies(cgpart.id)
219 assert len(cgreplies['changegroup']) == 1
220 pushop.ret = cgreplies['changegroup'][0]['return']
221 return handlereply
222
206 def _pushbundle2(pushop):
223 def _pushbundle2(pushop):
207 """push data to the remote using bundle2
224 """push data to the remote using bundle2
208
225
209 The only currently supported type of data is changegroup but this will
226 The only currently supported type of data is changegroup but this will
210 evolve in the future."""
227 evolve in the future."""
211 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
228 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
212 # create reply capability
229 # create reply capability
213 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
230 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
214 bundler.newpart('b2x:replycaps', data=capsblob)
231 bundler.newpart('b2x:replycaps', data=capsblob)
215 extrainfo = _pushbundle2extraparts(pushop, bundler)
232 extrainfo = _pushbundle2extraparts(pushop, bundler)
216 # Send known heads to the server for race detection.
217 if not pushop.force:
218 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
219 # add the changegroup bundle
233 # add the changegroup bundle
220 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
234 cgreplyhandler = _pushb2ctx(pushop, bundler)
221 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
222 stream = util.chunkbuffer(bundler.getchunks())
235 stream = util.chunkbuffer(bundler.getchunks())
223 try:
236 try:
224 reply = pushop.remote.unbundle(stream, ['force'], 'push')
237 reply = pushop.remote.unbundle(stream, ['force'], 'push')
225 except error.BundleValueError, exc:
238 except error.BundleValueError, exc:
226 raise util.Abort('missing support for %s' % exc)
239 raise util.Abort('missing support for %s' % exc)
227 try:
240 try:
228 op = bundle2.processbundle(pushop.repo, reply)
241 op = bundle2.processbundle(pushop.repo, reply)
229 except error.BundleValueError, exc:
242 except error.BundleValueError, exc:
230 raise util.Abort('missing support for %s' % exc)
243 raise util.Abort('missing support for %s' % exc)
231 cgreplies = op.records.getreplies(cgpart.id)
244 cgreplyhandler(op)
232 assert len(cgreplies['changegroup']) == 1
233 pushop.ret = cgreplies['changegroup'][0]['return']
234 _pushbundle2extrareply(pushop, op, extrainfo)
245 _pushbundle2extrareply(pushop, op, extrainfo)
235
246
236 def _pushbundle2extraparts(pushop, bundler):
247 def _pushbundle2extraparts(pushop, bundler):
237 """hook function to let extensions add parts
248 """hook function to let extensions add parts
238
249
239 Return a dict to let extensions pass data to the reply processing.
250 Return a dict to let extensions pass data to the reply processing.
240 """
251 """
241 return {}
252 return {}
242
253
243 def _pushbundle2extrareply(pushop, op, extrainfo):
254 def _pushbundle2extrareply(pushop, op, extrainfo):
244 """hook function to let extensions react to part replies
255 """hook function to let extensions react to part replies
245
256
246 The dict from _pushbundle2extrareply is fed to this function.
257 The dict from _pushbundle2extrareply is fed to this function.
247 """
258 """
248 pass
259 pass
249
260
250 def _pushchangeset(pushop):
261 def _pushchangeset(pushop):
251 """Make the actual push of changeset bundle to remote repo"""
262 """Make the actual push of changeset bundle to remote repo"""
252 outgoing = pushop.outgoing
263 outgoing = pushop.outgoing
253 unbundle = pushop.remote.capable('unbundle')
264 unbundle = pushop.remote.capable('unbundle')
254 # TODO: get bundlecaps from remote
265 # TODO: get bundlecaps from remote
255 bundlecaps = None
266 bundlecaps = None
256 # create a changegroup from local
267 # create a changegroup from local
257 if pushop.revs is None and not (outgoing.excluded
268 if pushop.revs is None and not (outgoing.excluded
258 or pushop.repo.changelog.filteredrevs):
269 or pushop.repo.changelog.filteredrevs):
259 # push everything,
270 # push everything,
260 # use the fast path, no race possible on push
271 # use the fast path, no race possible on push
261 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
272 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
262 cg = changegroup.getsubset(pushop.repo,
273 cg = changegroup.getsubset(pushop.repo,
263 outgoing,
274 outgoing,
264 bundler,
275 bundler,
265 'push',
276 'push',
266 fastpath=True)
277 fastpath=True)
267 else:
278 else:
268 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
279 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
269 bundlecaps)
280 bundlecaps)
270
281
271 # apply changegroup to remote
282 # apply changegroup to remote
272 if unbundle:
283 if unbundle:
273 # local repo finds heads on server, finds out what
284 # local repo finds heads on server, finds out what
274 # revs it must push. once revs transferred, if server
285 # revs it must push. once revs transferred, if server
275 # finds it has different heads (someone else won
286 # finds it has different heads (someone else won
276 # commit/push race), server aborts.
287 # commit/push race), server aborts.
277 if pushop.force:
288 if pushop.force:
278 remoteheads = ['force']
289 remoteheads = ['force']
279 else:
290 else:
280 remoteheads = pushop.remoteheads
291 remoteheads = pushop.remoteheads
281 # ssh: return remote's addchangegroup()
292 # ssh: return remote's addchangegroup()
282 # http: return remote's addchangegroup() or 0 for error
293 # http: return remote's addchangegroup() or 0 for error
283 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
294 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
284 pushop.repo.url())
295 pushop.repo.url())
285 else:
296 else:
286 # we return an integer indicating remote head count
297 # we return an integer indicating remote head count
287 # change
298 # change
288 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
299 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
289
300
290 def _pushcomputecommonheads(pushop):
301 def _pushcomputecommonheads(pushop):
291 unfi = pushop.repo.unfiltered()
302 unfi = pushop.repo.unfiltered()
292 if pushop.ret:
303 if pushop.ret:
293 # push succeed, synchronize target of the push
304 # push succeed, synchronize target of the push
294 cheads = pushop.outgoing.missingheads
305 cheads = pushop.outgoing.missingheads
295 elif pushop.revs is None:
306 elif pushop.revs is None:
296 # All out push fails. synchronize all common
307 # All out push fails. synchronize all common
297 cheads = pushop.outgoing.commonheads
308 cheads = pushop.outgoing.commonheads
298 else:
309 else:
299 # I want cheads = heads(::missingheads and ::commonheads)
310 # I want cheads = heads(::missingheads and ::commonheads)
300 # (missingheads is revs with secret changeset filtered out)
311 # (missingheads is revs with secret changeset filtered out)
301 #
312 #
302 # This can be expressed as:
313 # This can be expressed as:
303 # cheads = ( (missingheads and ::commonheads)
314 # cheads = ( (missingheads and ::commonheads)
304 # + (commonheads and ::missingheads))"
315 # + (commonheads and ::missingheads))"
305 # )
316 # )
306 #
317 #
307 # while trying to push we already computed the following:
318 # while trying to push we already computed the following:
308 # common = (::commonheads)
319 # common = (::commonheads)
309 # missing = ((commonheads::missingheads) - commonheads)
320 # missing = ((commonheads::missingheads) - commonheads)
310 #
321 #
311 # We can pick:
322 # We can pick:
312 # * missingheads part of common (::commonheads)
323 # * missingheads part of common (::commonheads)
313 common = set(pushop.outgoing.common)
324 common = set(pushop.outgoing.common)
314 nm = pushop.repo.changelog.nodemap
325 nm = pushop.repo.changelog.nodemap
315 cheads = [node for node in pushop.revs if nm[node] in common]
326 cheads = [node for node in pushop.revs if nm[node] in common]
316 # and
327 # and
317 # * commonheads parents on missing
328 # * commonheads parents on missing
318 revset = unfi.set('%ln and parents(roots(%ln))',
329 revset = unfi.set('%ln and parents(roots(%ln))',
319 pushop.outgoing.commonheads,
330 pushop.outgoing.commonheads,
320 pushop.outgoing.missing)
331 pushop.outgoing.missing)
321 cheads.extend(c.node() for c in revset)
332 cheads.extend(c.node() for c in revset)
322 pushop.commonheads = cheads
333 pushop.commonheads = cheads
323
334
324 def _pushsyncphase(pushop):
335 def _pushsyncphase(pushop):
325 """synchronise phase information locally and remotely"""
336 """synchronise phase information locally and remotely"""
326 unfi = pushop.repo.unfiltered()
337 unfi = pushop.repo.unfiltered()
327 cheads = pushop.commonheads
338 cheads = pushop.commonheads
328 # even when we don't push, exchanging phase data is useful
339 # even when we don't push, exchanging phase data is useful
329 remotephases = pushop.remote.listkeys('phases')
340 remotephases = pushop.remote.listkeys('phases')
330 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
341 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
331 and remotephases # server supports phases
342 and remotephases # server supports phases
332 and pushop.ret is None # nothing was pushed
343 and pushop.ret is None # nothing was pushed
333 and remotephases.get('publishing', False)):
344 and remotephases.get('publishing', False)):
334 # When:
345 # When:
335 # - this is a subrepo push
346 # - this is a subrepo push
336 # - and remote support phase
347 # - and remote support phase
337 # - and no changeset was pushed
348 # - and no changeset was pushed
338 # - and remote is publishing
349 # - and remote is publishing
339 # We may be in issue 3871 case!
350 # We may be in issue 3871 case!
340 # We drop the possible phase synchronisation done by
351 # We drop the possible phase synchronisation done by
341 # courtesy to publish changesets possibly locally draft
352 # courtesy to publish changesets possibly locally draft
342 # on the remote.
353 # on the remote.
343 remotephases = {'publishing': 'True'}
354 remotephases = {'publishing': 'True'}
344 if not remotephases: # old server or public only reply from non-publishing
355 if not remotephases: # old server or public only reply from non-publishing
345 _localphasemove(pushop, cheads)
356 _localphasemove(pushop, cheads)
346 # don't push any phase data as there is nothing to push
357 # don't push any phase data as there is nothing to push
347 else:
358 else:
348 ana = phases.analyzeremotephases(pushop.repo, cheads,
359 ana = phases.analyzeremotephases(pushop.repo, cheads,
349 remotephases)
360 remotephases)
350 pheads, droots = ana
361 pheads, droots = ana
351 ### Apply remote phase on local
362 ### Apply remote phase on local
352 if remotephases.get('publishing', False):
363 if remotephases.get('publishing', False):
353 _localphasemove(pushop, cheads)
364 _localphasemove(pushop, cheads)
354 else: # publish = False
365 else: # publish = False
355 _localphasemove(pushop, pheads)
366 _localphasemove(pushop, pheads)
356 _localphasemove(pushop, cheads, phases.draft)
367 _localphasemove(pushop, cheads, phases.draft)
357 ### Apply local phase on remote
368 ### Apply local phase on remote
358
369
359 # Get the list of all revs draft on remote by public here.
370 # Get the list of all revs draft on remote by public here.
360 # XXX Beware that revset break if droots is not strictly
371 # XXX Beware that revset break if droots is not strictly
361 # XXX root we may want to ensure it is but it is costly
372 # XXX root we may want to ensure it is but it is costly
362 outdated = unfi.set('heads((%ln::%ln) and public())',
373 outdated = unfi.set('heads((%ln::%ln) and public())',
363 droots, cheads)
374 droots, cheads)
364
375
365 b2caps = bundle2.bundle2caps(pushop.remote)
376 b2caps = bundle2.bundle2caps(pushop.remote)
366 if 'b2x:pushkey' in b2caps:
377 if 'b2x:pushkey' in b2caps:
367 # server supports bundle2, let's do a batched push through it
378 # server supports bundle2, let's do a batched push through it
368 #
379 #
369 # This will eventually be unified with the changesets bundle2 push
380 # This will eventually be unified with the changesets bundle2 push
370 bundler = bundle2.bundle20(pushop.ui, b2caps)
381 bundler = bundle2.bundle20(pushop.ui, b2caps)
371 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
382 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
372 bundler.newpart('b2x:replycaps', data=capsblob)
383 bundler.newpart('b2x:replycaps', data=capsblob)
373 part2node = []
384 part2node = []
374 enc = pushkey.encode
385 enc = pushkey.encode
375 for newremotehead in outdated:
386 for newremotehead in outdated:
376 part = bundler.newpart('b2x:pushkey')
387 part = bundler.newpart('b2x:pushkey')
377 part.addparam('namespace', enc('phases'))
388 part.addparam('namespace', enc('phases'))
378 part.addparam('key', enc(newremotehead.hex()))
389 part.addparam('key', enc(newremotehead.hex()))
379 part.addparam('old', enc(str(phases.draft)))
390 part.addparam('old', enc(str(phases.draft)))
380 part.addparam('new', enc(str(phases.public)))
391 part.addparam('new', enc(str(phases.public)))
381 part2node.append((part.id, newremotehead))
392 part2node.append((part.id, newremotehead))
382 stream = util.chunkbuffer(bundler.getchunks())
393 stream = util.chunkbuffer(bundler.getchunks())
383 try:
394 try:
384 reply = pushop.remote.unbundle(stream, ['force'], 'push')
395 reply = pushop.remote.unbundle(stream, ['force'], 'push')
385 op = bundle2.processbundle(pushop.repo, reply)
396 op = bundle2.processbundle(pushop.repo, reply)
386 except error.BundleValueError, exc:
397 except error.BundleValueError, exc:
387 raise util.Abort('missing support for %s' % exc)
398 raise util.Abort('missing support for %s' % exc)
388 for partid, node in part2node:
399 for partid, node in part2node:
389 partrep = op.records.getreplies(partid)
400 partrep = op.records.getreplies(partid)
390 results = partrep['pushkey']
401 results = partrep['pushkey']
391 assert len(results) <= 1
402 assert len(results) <= 1
392 msg = None
403 msg = None
393 if not results:
404 if not results:
394 msg = _('server ignored update of %s to public!\n') % node
405 msg = _('server ignored update of %s to public!\n') % node
395 elif not int(results[0]['return']):
406 elif not int(results[0]['return']):
396 msg = _('updating %s to public failed!\n') % node
407 msg = _('updating %s to public failed!\n') % node
397 if msg is not None:
408 if msg is not None:
398 pushop.ui.warn(msg)
409 pushop.ui.warn(msg)
399
410
400 else:
411 else:
401 # fallback to independant pushkey command
412 # fallback to independant pushkey command
402 for newremotehead in outdated:
413 for newremotehead in outdated:
403 r = pushop.remote.pushkey('phases',
414 r = pushop.remote.pushkey('phases',
404 newremotehead.hex(),
415 newremotehead.hex(),
405 str(phases.draft),
416 str(phases.draft),
406 str(phases.public))
417 str(phases.public))
407 if not r:
418 if not r:
408 pushop.ui.warn(_('updating %s to public failed!\n')
419 pushop.ui.warn(_('updating %s to public failed!\n')
409 % newremotehead)
420 % newremotehead)
410
421
411 def _localphasemove(pushop, nodes, phase=phases.public):
422 def _localphasemove(pushop, nodes, phase=phases.public):
412 """move <nodes> to <phase> in the local source repo"""
423 """move <nodes> to <phase> in the local source repo"""
413 if pushop.locallocked:
424 if pushop.locallocked:
414 phases.advanceboundary(pushop.repo, phase, nodes)
425 phases.advanceboundary(pushop.repo, phase, nodes)
415 else:
426 else:
416 # repo is not locked, do not change any phases!
427 # repo is not locked, do not change any phases!
417 # Informs the user that phases should have been moved when
428 # Informs the user that phases should have been moved when
418 # applicable.
429 # applicable.
419 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
430 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
420 phasestr = phases.phasenames[phase]
431 phasestr = phases.phasenames[phase]
421 if actualmoves:
432 if actualmoves:
422 pushop.ui.status(_('cannot lock source repo, skipping '
433 pushop.ui.status(_('cannot lock source repo, skipping '
423 'local %s phase update\n') % phasestr)
434 'local %s phase update\n') % phasestr)
424
435
425 def _pushobsolete(pushop):
436 def _pushobsolete(pushop):
426 """utility function to push obsolete markers to a remote"""
437 """utility function to push obsolete markers to a remote"""
427 pushop.ui.debug('try to push obsolete markers to remote\n')
438 pushop.ui.debug('try to push obsolete markers to remote\n')
428 repo = pushop.repo
439 repo = pushop.repo
429 remote = pushop.remote
440 remote = pushop.remote
430 if (obsolete._enabled and repo.obsstore and
441 if (obsolete._enabled and repo.obsstore and
431 'obsolete' in remote.listkeys('namespaces')):
442 'obsolete' in remote.listkeys('namespaces')):
432 rslts = []
443 rslts = []
433 remotedata = repo.listkeys('obsolete')
444 remotedata = repo.listkeys('obsolete')
434 for key in sorted(remotedata, reverse=True):
445 for key in sorted(remotedata, reverse=True):
435 # reverse sort to ensure we end with dump0
446 # reverse sort to ensure we end with dump0
436 data = remotedata[key]
447 data = remotedata[key]
437 rslts.append(remote.pushkey('obsolete', key, '', data))
448 rslts.append(remote.pushkey('obsolete', key, '', data))
438 if [r for r in rslts if not r]:
449 if [r for r in rslts if not r]:
439 msg = _('failed to push some obsolete markers!\n')
450 msg = _('failed to push some obsolete markers!\n')
440 repo.ui.warn(msg)
451 repo.ui.warn(msg)
441
452
442 def _pushbookmark(pushop):
453 def _pushbookmark(pushop):
443 """Update bookmark position on remote"""
454 """Update bookmark position on remote"""
444 ui = pushop.ui
455 ui = pushop.ui
445 repo = pushop.repo.unfiltered()
456 repo = pushop.repo.unfiltered()
446 remote = pushop.remote
457 remote = pushop.remote
447 ui.debug("checking for updated bookmarks\n")
458 ui.debug("checking for updated bookmarks\n")
448 revnums = map(repo.changelog.rev, pushop.revs or [])
459 revnums = map(repo.changelog.rev, pushop.revs or [])
449 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
460 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
450 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
461 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
451 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
462 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
452 srchex=hex)
463 srchex=hex)
453
464
454 for b, scid, dcid in advsrc:
465 for b, scid, dcid in advsrc:
455 if ancestors and repo[scid].rev() not in ancestors:
466 if ancestors and repo[scid].rev() not in ancestors:
456 continue
467 continue
457 if remote.pushkey('bookmarks', b, dcid, scid):
468 if remote.pushkey('bookmarks', b, dcid, scid):
458 ui.status(_("updating bookmark %s\n") % b)
469 ui.status(_("updating bookmark %s\n") % b)
459 else:
470 else:
460 ui.warn(_('updating bookmark %s failed!\n') % b)
471 ui.warn(_('updating bookmark %s failed!\n') % b)
461
472
462 class pulloperation(object):
473 class pulloperation(object):
463 """A object that represent a single pull operation
474 """A object that represent a single pull operation
464
475
465 It purpose is to carry push related state and very common operation.
476 It purpose is to carry push related state and very common operation.
466
477
467 A new should be created at the beginning of each pull and discarded
478 A new should be created at the beginning of each pull and discarded
468 afterward.
479 afterward.
469 """
480 """
470
481
471 def __init__(self, repo, remote, heads=None, force=False):
482 def __init__(self, repo, remote, heads=None, force=False):
472 # repo we pull into
483 # repo we pull into
473 self.repo = repo
484 self.repo = repo
474 # repo we pull from
485 # repo we pull from
475 self.remote = remote
486 self.remote = remote
476 # revision we try to pull (None is "all")
487 # revision we try to pull (None is "all")
477 self.heads = heads
488 self.heads = heads
478 # do we force pull?
489 # do we force pull?
479 self.force = force
490 self.force = force
480 # the name the pull transaction
491 # the name the pull transaction
481 self._trname = 'pull\n' + util.hidepassword(remote.url())
492 self._trname = 'pull\n' + util.hidepassword(remote.url())
482 # hold the transaction once created
493 # hold the transaction once created
483 self._tr = None
494 self._tr = None
484 # set of common changeset between local and remote before pull
495 # set of common changeset between local and remote before pull
485 self.common = None
496 self.common = None
486 # set of pulled head
497 # set of pulled head
487 self.rheads = None
498 self.rheads = None
488 # list of missing changeset to fetch remotely
499 # list of missing changeset to fetch remotely
489 self.fetch = None
500 self.fetch = None
490 # result of changegroup pulling (used as return code by pull)
501 # result of changegroup pulling (used as return code by pull)
491 self.cgresult = None
502 self.cgresult = None
492 # list of step remaining todo (related to future bundle2 usage)
503 # list of step remaining todo (related to future bundle2 usage)
493 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
504 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
494
505
495 @util.propertycache
506 @util.propertycache
496 def pulledsubset(self):
507 def pulledsubset(self):
497 """heads of the set of changeset target by the pull"""
508 """heads of the set of changeset target by the pull"""
498 # compute target subset
509 # compute target subset
499 if self.heads is None:
510 if self.heads is None:
500 # We pulled every thing possible
511 # We pulled every thing possible
501 # sync on everything common
512 # sync on everything common
502 c = set(self.common)
513 c = set(self.common)
503 ret = list(self.common)
514 ret = list(self.common)
504 for n in self.rheads:
515 for n in self.rheads:
505 if n not in c:
516 if n not in c:
506 ret.append(n)
517 ret.append(n)
507 return ret
518 return ret
508 else:
519 else:
509 # We pulled a specific subset
520 # We pulled a specific subset
510 # sync on this subset
521 # sync on this subset
511 return self.heads
522 return self.heads
512
523
513 def gettransaction(self):
524 def gettransaction(self):
514 """get appropriate pull transaction, creating it if needed"""
525 """get appropriate pull transaction, creating it if needed"""
515 if self._tr is None:
526 if self._tr is None:
516 self._tr = self.repo.transaction(self._trname)
527 self._tr = self.repo.transaction(self._trname)
517 return self._tr
528 return self._tr
518
529
519 def closetransaction(self):
530 def closetransaction(self):
520 """close transaction if created"""
531 """close transaction if created"""
521 if self._tr is not None:
532 if self._tr is not None:
522 self._tr.close()
533 self._tr.close()
523
534
524 def releasetransaction(self):
535 def releasetransaction(self):
525 """release transaction if created"""
536 """release transaction if created"""
526 if self._tr is not None:
537 if self._tr is not None:
527 self._tr.release()
538 self._tr.release()
528
539
529 def pull(repo, remote, heads=None, force=False):
540 def pull(repo, remote, heads=None, force=False):
530 pullop = pulloperation(repo, remote, heads, force)
541 pullop = pulloperation(repo, remote, heads, force)
531 if pullop.remote.local():
542 if pullop.remote.local():
532 missing = set(pullop.remote.requirements) - pullop.repo.supported
543 missing = set(pullop.remote.requirements) - pullop.repo.supported
533 if missing:
544 if missing:
534 msg = _("required features are not"
545 msg = _("required features are not"
535 " supported in the destination:"
546 " supported in the destination:"
536 " %s") % (', '.join(sorted(missing)))
547 " %s") % (', '.join(sorted(missing)))
537 raise util.Abort(msg)
548 raise util.Abort(msg)
538
549
539 lock = pullop.repo.lock()
550 lock = pullop.repo.lock()
540 try:
551 try:
541 _pulldiscovery(pullop)
552 _pulldiscovery(pullop)
542 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
553 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
543 and pullop.remote.capable('bundle2-exp')):
554 and pullop.remote.capable('bundle2-exp')):
544 _pullbundle2(pullop)
555 _pullbundle2(pullop)
545 if 'changegroup' in pullop.todosteps:
556 if 'changegroup' in pullop.todosteps:
546 _pullchangeset(pullop)
557 _pullchangeset(pullop)
547 if 'phases' in pullop.todosteps:
558 if 'phases' in pullop.todosteps:
548 _pullphase(pullop)
559 _pullphase(pullop)
549 if 'obsmarkers' in pullop.todosteps:
560 if 'obsmarkers' in pullop.todosteps:
550 _pullobsolete(pullop)
561 _pullobsolete(pullop)
551 pullop.closetransaction()
562 pullop.closetransaction()
552 finally:
563 finally:
553 pullop.releasetransaction()
564 pullop.releasetransaction()
554 lock.release()
565 lock.release()
555
566
556 return pullop.cgresult
567 return pullop.cgresult
557
568
558 def _pulldiscovery(pullop):
569 def _pulldiscovery(pullop):
559 """discovery phase for the pull
570 """discovery phase for the pull
560
571
561 Current handle changeset discovery only, will change handle all discovery
572 Current handle changeset discovery only, will change handle all discovery
562 at some point."""
573 at some point."""
563 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
574 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
564 pullop.remote,
575 pullop.remote,
565 heads=pullop.heads,
576 heads=pullop.heads,
566 force=pullop.force)
577 force=pullop.force)
567 pullop.common, pullop.fetch, pullop.rheads = tmp
578 pullop.common, pullop.fetch, pullop.rheads = tmp
568
579
569 def _pullbundle2(pullop):
580 def _pullbundle2(pullop):
570 """pull data using bundle2
581 """pull data using bundle2
571
582
572 For now, the only supported data are changegroup."""
583 For now, the only supported data are changegroup."""
573 remotecaps = bundle2.bundle2caps(pullop.remote)
584 remotecaps = bundle2.bundle2caps(pullop.remote)
574 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
585 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
575 # pulling changegroup
586 # pulling changegroup
576 pullop.todosteps.remove('changegroup')
587 pullop.todosteps.remove('changegroup')
577
588
578 kwargs['common'] = pullop.common
589 kwargs['common'] = pullop.common
579 kwargs['heads'] = pullop.heads or pullop.rheads
590 kwargs['heads'] = pullop.heads or pullop.rheads
580 if 'b2x:listkeys' in remotecaps:
591 if 'b2x:listkeys' in remotecaps:
581 kwargs['listkeys'] = ['phase']
592 kwargs['listkeys'] = ['phase']
582 if not pullop.fetch:
593 if not pullop.fetch:
583 pullop.repo.ui.status(_("no changes found\n"))
594 pullop.repo.ui.status(_("no changes found\n"))
584 pullop.cgresult = 0
595 pullop.cgresult = 0
585 else:
596 else:
586 if pullop.heads is None and list(pullop.common) == [nullid]:
597 if pullop.heads is None and list(pullop.common) == [nullid]:
587 pullop.repo.ui.status(_("requesting all changes\n"))
598 pullop.repo.ui.status(_("requesting all changes\n"))
588 _pullbundle2extraprepare(pullop, kwargs)
599 _pullbundle2extraprepare(pullop, kwargs)
589 if kwargs.keys() == ['format']:
600 if kwargs.keys() == ['format']:
590 return # nothing to pull
601 return # nothing to pull
591 bundle = pullop.remote.getbundle('pull', **kwargs)
602 bundle = pullop.remote.getbundle('pull', **kwargs)
592 try:
603 try:
593 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
604 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
594 except error.BundleValueError, exc:
605 except error.BundleValueError, exc:
595 raise util.Abort('missing support for %s' % exc)
606 raise util.Abort('missing support for %s' % exc)
596
607
597 if pullop.fetch:
608 if pullop.fetch:
598 assert len(op.records['changegroup']) == 1
609 assert len(op.records['changegroup']) == 1
599 pullop.cgresult = op.records['changegroup'][0]['return']
610 pullop.cgresult = op.records['changegroup'][0]['return']
600
611
601 # processing phases change
612 # processing phases change
602 for namespace, value in op.records['listkeys']:
613 for namespace, value in op.records['listkeys']:
603 if namespace == 'phases':
614 if namespace == 'phases':
604 _pullapplyphases(pullop, value)
615 _pullapplyphases(pullop, value)
605
616
606 def _pullbundle2extraprepare(pullop, kwargs):
617 def _pullbundle2extraprepare(pullop, kwargs):
607 """hook function so that extensions can extend the getbundle call"""
618 """hook function so that extensions can extend the getbundle call"""
608 pass
619 pass
609
620
610 def _pullchangeset(pullop):
621 def _pullchangeset(pullop):
611 """pull changeset from unbundle into the local repo"""
622 """pull changeset from unbundle into the local repo"""
612 # We delay the open of the transaction as late as possible so we
623 # We delay the open of the transaction as late as possible so we
613 # don't open transaction for nothing or you break future useful
624 # don't open transaction for nothing or you break future useful
614 # rollback call
625 # rollback call
615 pullop.todosteps.remove('changegroup')
626 pullop.todosteps.remove('changegroup')
616 if not pullop.fetch:
627 if not pullop.fetch:
617 pullop.repo.ui.status(_("no changes found\n"))
628 pullop.repo.ui.status(_("no changes found\n"))
618 pullop.cgresult = 0
629 pullop.cgresult = 0
619 return
630 return
620 pullop.gettransaction()
631 pullop.gettransaction()
621 if pullop.heads is None and list(pullop.common) == [nullid]:
632 if pullop.heads is None and list(pullop.common) == [nullid]:
622 pullop.repo.ui.status(_("requesting all changes\n"))
633 pullop.repo.ui.status(_("requesting all changes\n"))
623 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
634 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
624 # issue1320, avoid a race if remote changed after discovery
635 # issue1320, avoid a race if remote changed after discovery
625 pullop.heads = pullop.rheads
636 pullop.heads = pullop.rheads
626
637
627 if pullop.remote.capable('getbundle'):
638 if pullop.remote.capable('getbundle'):
628 # TODO: get bundlecaps from remote
639 # TODO: get bundlecaps from remote
629 cg = pullop.remote.getbundle('pull', common=pullop.common,
640 cg = pullop.remote.getbundle('pull', common=pullop.common,
630 heads=pullop.heads or pullop.rheads)
641 heads=pullop.heads or pullop.rheads)
631 elif pullop.heads is None:
642 elif pullop.heads is None:
632 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
643 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
633 elif not pullop.remote.capable('changegroupsubset'):
644 elif not pullop.remote.capable('changegroupsubset'):
634 raise util.Abort(_("partial pull cannot be done because "
645 raise util.Abort(_("partial pull cannot be done because "
635 "other repository doesn't support "
646 "other repository doesn't support "
636 "changegroupsubset."))
647 "changegroupsubset."))
637 else:
648 else:
638 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
649 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
639 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
650 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
640 pullop.remote.url())
651 pullop.remote.url())
641
652
642 def _pullphase(pullop):
653 def _pullphase(pullop):
643 # Get remote phases data from remote
654 # Get remote phases data from remote
644 remotephases = pullop.remote.listkeys('phases')
655 remotephases = pullop.remote.listkeys('phases')
645 _pullapplyphases(pullop, remotephases)
656 _pullapplyphases(pullop, remotephases)
646
657
647 def _pullapplyphases(pullop, remotephases):
658 def _pullapplyphases(pullop, remotephases):
648 """apply phase movement from observed remote state"""
659 """apply phase movement from observed remote state"""
649 pullop.todosteps.remove('phases')
660 pullop.todosteps.remove('phases')
650 publishing = bool(remotephases.get('publishing', False))
661 publishing = bool(remotephases.get('publishing', False))
651 if remotephases and not publishing:
662 if remotephases and not publishing:
652 # remote is new and unpublishing
663 # remote is new and unpublishing
653 pheads, _dr = phases.analyzeremotephases(pullop.repo,
664 pheads, _dr = phases.analyzeremotephases(pullop.repo,
654 pullop.pulledsubset,
665 pullop.pulledsubset,
655 remotephases)
666 remotephases)
656 phases.advanceboundary(pullop.repo, phases.public, pheads)
667 phases.advanceboundary(pullop.repo, phases.public, pheads)
657 phases.advanceboundary(pullop.repo, phases.draft,
668 phases.advanceboundary(pullop.repo, phases.draft,
658 pullop.pulledsubset)
669 pullop.pulledsubset)
659 else:
670 else:
660 # Remote is old or publishing all common changesets
671 # Remote is old or publishing all common changesets
661 # should be seen as public
672 # should be seen as public
662 phases.advanceboundary(pullop.repo, phases.public,
673 phases.advanceboundary(pullop.repo, phases.public,
663 pullop.pulledsubset)
674 pullop.pulledsubset)
664
675
665 def _pullobsolete(pullop):
676 def _pullobsolete(pullop):
666 """utility function to pull obsolete markers from a remote
677 """utility function to pull obsolete markers from a remote
667
678
668 The `gettransaction` is function that return the pull transaction, creating
679 The `gettransaction` is function that return the pull transaction, creating
669 one if necessary. We return the transaction to inform the calling code that
680 one if necessary. We return the transaction to inform the calling code that
670 a new transaction have been created (when applicable).
681 a new transaction have been created (when applicable).
671
682
672 Exists mostly to allow overriding for experimentation purpose"""
683 Exists mostly to allow overriding for experimentation purpose"""
673 pullop.todosteps.remove('obsmarkers')
684 pullop.todosteps.remove('obsmarkers')
674 tr = None
685 tr = None
675 if obsolete._enabled:
686 if obsolete._enabled:
676 pullop.repo.ui.debug('fetching remote obsolete markers\n')
687 pullop.repo.ui.debug('fetching remote obsolete markers\n')
677 remoteobs = pullop.remote.listkeys('obsolete')
688 remoteobs = pullop.remote.listkeys('obsolete')
678 if 'dump0' in remoteobs:
689 if 'dump0' in remoteobs:
679 tr = pullop.gettransaction()
690 tr = pullop.gettransaction()
680 for key in sorted(remoteobs, reverse=True):
691 for key in sorted(remoteobs, reverse=True):
681 if key.startswith('dump'):
692 if key.startswith('dump'):
682 data = base85.b85decode(remoteobs[key])
693 data = base85.b85decode(remoteobs[key])
683 pullop.repo.obsstore.mergemarkers(tr, data)
694 pullop.repo.obsstore.mergemarkers(tr, data)
684 pullop.repo.invalidatevolatilesets()
695 pullop.repo.invalidatevolatilesets()
685 return tr
696 return tr
686
697
687 def caps20to10(repo):
698 def caps20to10(repo):
688 """return a set with appropriate options to use bundle20 during getbundle"""
699 """return a set with appropriate options to use bundle20 during getbundle"""
689 caps = set(['HG2X'])
700 caps = set(['HG2X'])
690 capsblob = bundle2.encodecaps(repo.bundle2caps)
701 capsblob = bundle2.encodecaps(repo.bundle2caps)
691 caps.add('bundle2=' + urllib.quote(capsblob))
702 caps.add('bundle2=' + urllib.quote(capsblob))
692 return caps
703 return caps
693
704
694 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
705 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
695 **kwargs):
706 **kwargs):
696 """return a full bundle (with potentially multiple kind of parts)
707 """return a full bundle (with potentially multiple kind of parts)
697
708
698 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
709 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
699 passed. For now, the bundle can contain only changegroup, but this will
710 passed. For now, the bundle can contain only changegroup, but this will
700 changes when more part type will be available for bundle2.
711 changes when more part type will be available for bundle2.
701
712
702 This is different from changegroup.getbundle that only returns an HG10
713 This is different from changegroup.getbundle that only returns an HG10
703 changegroup bundle. They may eventually get reunited in the future when we
714 changegroup bundle. They may eventually get reunited in the future when we
704 have a clearer idea of the API we what to query different data.
715 have a clearer idea of the API we what to query different data.
705
716
706 The implementation is at a very early stage and will get massive rework
717 The implementation is at a very early stage and will get massive rework
707 when the API of bundle is refined.
718 when the API of bundle is refined.
708 """
719 """
709 # build changegroup bundle here.
720 # build changegroup bundle here.
710 cg = changegroup.getbundle(repo, source, heads=heads,
721 cg = changegroup.getbundle(repo, source, heads=heads,
711 common=common, bundlecaps=bundlecaps)
722 common=common, bundlecaps=bundlecaps)
712 if bundlecaps is None or 'HG2X' not in bundlecaps:
723 if bundlecaps is None or 'HG2X' not in bundlecaps:
713 if kwargs:
724 if kwargs:
714 raise ValueError(_('unsupported getbundle arguments: %s')
725 raise ValueError(_('unsupported getbundle arguments: %s')
715 % ', '.join(sorted(kwargs.keys())))
726 % ', '.join(sorted(kwargs.keys())))
716 return cg
727 return cg
717 # very crude first implementation,
728 # very crude first implementation,
718 # the bundle API will change and the generation will be done lazily.
729 # the bundle API will change and the generation will be done lazily.
719 b2caps = {}
730 b2caps = {}
720 for bcaps in bundlecaps:
731 for bcaps in bundlecaps:
721 if bcaps.startswith('bundle2='):
732 if bcaps.startswith('bundle2='):
722 blob = urllib.unquote(bcaps[len('bundle2='):])
733 blob = urllib.unquote(bcaps[len('bundle2='):])
723 b2caps.update(bundle2.decodecaps(blob))
734 b2caps.update(bundle2.decodecaps(blob))
724 bundler = bundle2.bundle20(repo.ui, b2caps)
735 bundler = bundle2.bundle20(repo.ui, b2caps)
725 if cg:
736 if cg:
726 bundler.newpart('b2x:changegroup', data=cg.getchunks())
737 bundler.newpart('b2x:changegroup', data=cg.getchunks())
727 listkeys = kwargs.get('listkeys', ())
738 listkeys = kwargs.get('listkeys', ())
728 for namespace in listkeys:
739 for namespace in listkeys:
729 part = bundler.newpart('b2x:listkeys')
740 part = bundler.newpart('b2x:listkeys')
730 part.addparam('namespace', namespace)
741 part.addparam('namespace', namespace)
731 keys = repo.listkeys(namespace).items()
742 keys = repo.listkeys(namespace).items()
732 part.data = pushkey.encodekeys(keys)
743 part.data = pushkey.encodekeys(keys)
733 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
744 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
734 bundlecaps=bundlecaps, **kwargs)
745 bundlecaps=bundlecaps, **kwargs)
735 return util.chunkbuffer(bundler.getchunks())
746 return util.chunkbuffer(bundler.getchunks())
736
747
737 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
748 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
738 bundlecaps=None, **kwargs):
749 bundlecaps=None, **kwargs):
739 """hook function to let extensions add parts to the requested bundle"""
750 """hook function to let extensions add parts to the requested bundle"""
740 pass
751 pass
741
752
742 def check_heads(repo, their_heads, context):
753 def check_heads(repo, their_heads, context):
743 """check if the heads of a repo have been modified
754 """check if the heads of a repo have been modified
744
755
745 Used by peer for unbundling.
756 Used by peer for unbundling.
746 """
757 """
747 heads = repo.heads()
758 heads = repo.heads()
748 heads_hash = util.sha1(''.join(sorted(heads))).digest()
759 heads_hash = util.sha1(''.join(sorted(heads))).digest()
749 if not (their_heads == ['force'] or their_heads == heads or
760 if not (their_heads == ['force'] or their_heads == heads or
750 their_heads == ['hashed', heads_hash]):
761 their_heads == ['hashed', heads_hash]):
751 # someone else committed/pushed/unbundled while we
762 # someone else committed/pushed/unbundled while we
752 # were transferring data
763 # were transferring data
753 raise error.PushRaced('repository changed while %s - '
764 raise error.PushRaced('repository changed while %s - '
754 'please try again' % context)
765 'please try again' % context)
755
766
756 def unbundle(repo, cg, heads, source, url):
767 def unbundle(repo, cg, heads, source, url):
757 """Apply a bundle to a repo.
768 """Apply a bundle to a repo.
758
769
759 this function makes sure the repo is locked during the application and have
770 this function makes sure the repo is locked during the application and have
760 mechanism to check that no push race occurred between the creation of the
771 mechanism to check that no push race occurred between the creation of the
761 bundle and its application.
772 bundle and its application.
762
773
763 If the push was raced as PushRaced exception is raised."""
774 If the push was raced as PushRaced exception is raised."""
764 r = 0
775 r = 0
765 # need a transaction when processing a bundle2 stream
776 # need a transaction when processing a bundle2 stream
766 tr = None
777 tr = None
767 lock = repo.lock()
778 lock = repo.lock()
768 try:
779 try:
769 check_heads(repo, heads, 'uploading changes')
780 check_heads(repo, heads, 'uploading changes')
770 # push can proceed
781 # push can proceed
771 if util.safehasattr(cg, 'params'):
782 if util.safehasattr(cg, 'params'):
772 try:
783 try:
773 tr = repo.transaction('unbundle')
784 tr = repo.transaction('unbundle')
774 tr.hookargs['bundle2-exp'] = '1'
785 tr.hookargs['bundle2-exp'] = '1'
775 r = bundle2.processbundle(repo, cg, lambda: tr).reply
786 r = bundle2.processbundle(repo, cg, lambda: tr).reply
776 cl = repo.unfiltered().changelog
787 cl = repo.unfiltered().changelog
777 p = cl.writepending() and repo.root or ""
788 p = cl.writepending() and repo.root or ""
778 repo.hook('b2x-pretransactionclose', throw=True, source=source,
789 repo.hook('b2x-pretransactionclose', throw=True, source=source,
779 url=url, pending=p, **tr.hookargs)
790 url=url, pending=p, **tr.hookargs)
780 tr.close()
791 tr.close()
781 repo.hook('b2x-transactionclose', source=source, url=url,
792 repo.hook('b2x-transactionclose', source=source, url=url,
782 **tr.hookargs)
793 **tr.hookargs)
783 except Exception, exc:
794 except Exception, exc:
784 exc.duringunbundle2 = True
795 exc.duringunbundle2 = True
785 raise
796 raise
786 else:
797 else:
787 r = changegroup.addchangegroup(repo, cg, source, url)
798 r = changegroup.addchangegroup(repo, cg, source, url)
788 finally:
799 finally:
789 if tr is not None:
800 if tr is not None:
790 tr.release()
801 tr.release()
791 lock.release()
802 lock.release()
792 return r
803 return r
General Comments 0
You need to be logged in to leave comments. Login now