##// END OF EJS Templates
push: use `stepsdone` to control changegroup push through bundle10 or bundle20...
Pierre-Yves David -
r21902:7a7def85 default
parent child Browse files
Show More
@@ -1,806 +1,811 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # step already performed
64 # step already performed
65 # (used to check what steps have been already performed through bundle2)
65 # (used to check what steps have been already performed through bundle2)
66 self.stepsdone = set()
66 self.stepsdone = set()
67 # Integer version of the push result
67 # Integer version of the push result
68 # - None means nothing to push
68 # - None means nothing to push
69 # - 0 means HTTP error
69 # - 0 means HTTP error
70 # - 1 means we pushed and remote head count is unchanged *or*
70 # - 1 means we pushed and remote head count is unchanged *or*
71 # we have outgoing changesets but refused to push
71 # we have outgoing changesets but refused to push
72 # - other values as described by addchangegroup()
72 # - other values as described by addchangegroup()
73 self.ret = None
73 self.ret = None
74 # discover.outgoing object (contains common and outgoing data)
74 # discover.outgoing object (contains common and outgoing data)
75 self.outgoing = None
75 self.outgoing = None
76 # all remote heads before the push
76 # all remote heads before the push
77 self.remoteheads = None
77 self.remoteheads = None
78 # testable as a boolean indicating if any nodes are missing locally.
78 # testable as a boolean indicating if any nodes are missing locally.
79 self.incoming = None
79 self.incoming = None
80 # set of all heads common after changeset bundle push
80 # set of all heads common after changeset bundle push
81 self.commonheads = None
81 self.commonheads = None
82
82
83 def push(repo, remote, force=False, revs=None, newbranch=False):
83 def push(repo, remote, force=False, revs=None, newbranch=False):
84 '''Push outgoing changesets (limited by revs) from a local
84 '''Push outgoing changesets (limited by revs) from a local
85 repository to remote. Return an integer:
85 repository to remote. Return an integer:
86 - None means nothing to push
86 - None means nothing to push
87 - 0 means HTTP error
87 - 0 means HTTP error
88 - 1 means we pushed and remote head count is unchanged *or*
88 - 1 means we pushed and remote head count is unchanged *or*
89 we have outgoing changesets but refused to push
89 we have outgoing changesets but refused to push
90 - other values as described by addchangegroup()
90 - other values as described by addchangegroup()
91 '''
91 '''
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
92 pushop = pushoperation(repo, remote, force, revs, newbranch)
93 if pushop.remote.local():
93 if pushop.remote.local():
94 missing = (set(pushop.repo.requirements)
94 missing = (set(pushop.repo.requirements)
95 - pushop.remote.local().supported)
95 - pushop.remote.local().supported)
96 if missing:
96 if missing:
97 msg = _("required features are not"
97 msg = _("required features are not"
98 " supported in the destination:"
98 " supported in the destination:"
99 " %s") % (', '.join(sorted(missing)))
99 " %s") % (', '.join(sorted(missing)))
100 raise util.Abort(msg)
100 raise util.Abort(msg)
101
101
102 # there are two ways to push to remote repo:
102 # there are two ways to push to remote repo:
103 #
103 #
104 # addchangegroup assumes local user can lock remote
104 # addchangegroup assumes local user can lock remote
105 # repo (local filesystem, old ssh servers).
105 # repo (local filesystem, old ssh servers).
106 #
106 #
107 # unbundle assumes local user cannot lock remote repo (new ssh
107 # unbundle assumes local user cannot lock remote repo (new ssh
108 # servers, http servers).
108 # servers, http servers).
109
109
110 if not pushop.remote.canpush():
110 if not pushop.remote.canpush():
111 raise util.Abort(_("destination does not support push"))
111 raise util.Abort(_("destination does not support push"))
112 # get local lock as we might write phase data
112 # get local lock as we might write phase data
113 locallock = None
113 locallock = None
114 try:
114 try:
115 locallock = pushop.repo.lock()
115 locallock = pushop.repo.lock()
116 pushop.locallocked = True
116 pushop.locallocked = True
117 except IOError, err:
117 except IOError, err:
118 pushop.locallocked = False
118 pushop.locallocked = False
119 if err.errno != errno.EACCES:
119 if err.errno != errno.EACCES:
120 raise
120 raise
121 # source repo cannot be locked.
121 # source repo cannot be locked.
122 # We do not abort the push, but just disable the local phase
122 # We do not abort the push, but just disable the local phase
123 # synchronisation.
123 # synchronisation.
124 msg = 'cannot lock source repository: %s\n' % err
124 msg = 'cannot lock source repository: %s\n' % err
125 pushop.ui.debug(msg)
125 pushop.ui.debug(msg)
126 try:
126 try:
127 pushop.repo.checkpush(pushop)
127 pushop.repo.checkpush(pushop)
128 lock = None
128 lock = None
129 unbundle = pushop.remote.capable('unbundle')
129 unbundle = pushop.remote.capable('unbundle')
130 if not unbundle:
130 if not unbundle:
131 lock = pushop.remote.lock()
131 lock = pushop.remote.lock()
132 try:
132 try:
133 _pushdiscovery(pushop)
133 _pushdiscovery(pushop)
134 if _pushcheckoutgoing(pushop):
134 if _pushcheckoutgoing(pushop):
135 pushop.repo.prepushoutgoinghooks(pushop.repo,
135 pushop.repo.prepushoutgoinghooks(pushop.repo,
136 pushop.remote,
136 pushop.remote,
137 pushop.outgoing)
137 pushop.outgoing)
138 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
138 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
139 False)
139 False)
140 and pushop.remote.capable('bundle2-exp')):
140 and pushop.remote.capable('bundle2-exp')):
141 _pushbundle2(pushop)
141 _pushbundle2(pushop)
142 else:
142 _pushchangeset(pushop)
143 _pushchangeset(pushop)
144 _pushcomputecommonheads(pushop)
143 _pushcomputecommonheads(pushop)
145 _pushsyncphase(pushop)
144 _pushsyncphase(pushop)
146 _pushobsolete(pushop)
145 _pushobsolete(pushop)
147 finally:
146 finally:
148 if lock is not None:
147 if lock is not None:
149 lock.release()
148 lock.release()
150 finally:
149 finally:
151 if locallock is not None:
150 if locallock is not None:
152 locallock.release()
151 locallock.release()
153
152
154 _pushbookmark(pushop)
153 _pushbookmark(pushop)
155 return pushop.ret
154 return pushop.ret
156
155
157 def _pushdiscovery(pushop):
156 def _pushdiscovery(pushop):
158 # discovery
157 # discovery
159 unfi = pushop.repo.unfiltered()
158 unfi = pushop.repo.unfiltered()
160 fci = discovery.findcommonincoming
159 fci = discovery.findcommonincoming
161 commoninc = fci(unfi, pushop.remote, force=pushop.force)
160 commoninc = fci(unfi, pushop.remote, force=pushop.force)
162 common, inc, remoteheads = commoninc
161 common, inc, remoteheads = commoninc
163 fco = discovery.findcommonoutgoing
162 fco = discovery.findcommonoutgoing
164 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
163 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
165 commoninc=commoninc, force=pushop.force)
164 commoninc=commoninc, force=pushop.force)
166 pushop.outgoing = outgoing
165 pushop.outgoing = outgoing
167 pushop.remoteheads = remoteheads
166 pushop.remoteheads = remoteheads
168 pushop.incoming = inc
167 pushop.incoming = inc
169
168
170 def _pushcheckoutgoing(pushop):
169 def _pushcheckoutgoing(pushop):
171 outgoing = pushop.outgoing
170 outgoing = pushop.outgoing
172 unfi = pushop.repo.unfiltered()
171 unfi = pushop.repo.unfiltered()
173 if not outgoing.missing:
172 if not outgoing.missing:
174 # nothing to push
173 # nothing to push
175 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
174 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
176 return False
175 return False
177 # something to push
176 # something to push
178 if not pushop.force:
177 if not pushop.force:
179 # if repo.obsstore == False --> no obsolete
178 # if repo.obsstore == False --> no obsolete
180 # then, save the iteration
179 # then, save the iteration
181 if unfi.obsstore:
180 if unfi.obsstore:
182 # this message are here for 80 char limit reason
181 # this message are here for 80 char limit reason
183 mso = _("push includes obsolete changeset: %s!")
182 mso = _("push includes obsolete changeset: %s!")
184 mst = "push includes %s changeset: %s!"
183 mst = "push includes %s changeset: %s!"
185 # plain versions for i18n tool to detect them
184 # plain versions for i18n tool to detect them
186 _("push includes unstable changeset: %s!")
185 _("push includes unstable changeset: %s!")
187 _("push includes bumped changeset: %s!")
186 _("push includes bumped changeset: %s!")
188 _("push includes divergent changeset: %s!")
187 _("push includes divergent changeset: %s!")
189 # If we are to push if there is at least one
188 # If we are to push if there is at least one
190 # obsolete or unstable changeset in missing, at
189 # obsolete or unstable changeset in missing, at
191 # least one of the missinghead will be obsolete or
190 # least one of the missinghead will be obsolete or
192 # unstable. So checking heads only is ok
191 # unstable. So checking heads only is ok
193 for node in outgoing.missingheads:
192 for node in outgoing.missingheads:
194 ctx = unfi[node]
193 ctx = unfi[node]
195 if ctx.obsolete():
194 if ctx.obsolete():
196 raise util.Abort(mso % ctx)
195 raise util.Abort(mso % ctx)
197 elif ctx.troubled():
196 elif ctx.troubled():
198 raise util.Abort(_(mst)
197 raise util.Abort(_(mst)
199 % (ctx.troubles()[0],
198 % (ctx.troubles()[0],
200 ctx))
199 ctx))
201 newbm = pushop.ui.configlist('bookmarks', 'pushing')
200 newbm = pushop.ui.configlist('bookmarks', 'pushing')
202 discovery.checkheads(unfi, pushop.remote, outgoing,
201 discovery.checkheads(unfi, pushop.remote, outgoing,
203 pushop.remoteheads,
202 pushop.remoteheads,
204 pushop.newbranch,
203 pushop.newbranch,
205 bool(pushop.incoming),
204 bool(pushop.incoming),
206 newbm)
205 newbm)
207 return True
206 return True
208
207
209 def _pushb2ctx(pushop, bundler):
208 def _pushb2ctx(pushop, bundler):
210 """handle changegroup push through bundle2
209 """handle changegroup push through bundle2
211
210
212 addchangegroup result is stored in the ``pushop.ret`` attribute.
211 addchangegroup result is stored in the ``pushop.ret`` attribute.
213 """
212 """
213 if 'changesets' in pushop.stepsdone:
214 return
215 pushop.stepsdone.add('changesets')
214 # Send known heads to the server for race detection.
216 # Send known heads to the server for race detection.
215 if not pushop.force:
217 if not pushop.force:
216 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
218 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
217 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
219 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
218 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
220 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
219 def handlereply(op):
221 def handlereply(op):
220 """extract addchangroup returns from server reply"""
222 """extract addchangroup returns from server reply"""
221 cgreplies = op.records.getreplies(cgpart.id)
223 cgreplies = op.records.getreplies(cgpart.id)
222 assert len(cgreplies['changegroup']) == 1
224 assert len(cgreplies['changegroup']) == 1
223 pushop.ret = cgreplies['changegroup'][0]['return']
225 pushop.ret = cgreplies['changegroup'][0]['return']
224 return handlereply
226 return handlereply
225
227
226 def _pushbundle2(pushop):
228 def _pushbundle2(pushop):
227 """push data to the remote using bundle2
229 """push data to the remote using bundle2
228
230
229 The only currently supported type of data is changegroup but this will
231 The only currently supported type of data is changegroup but this will
230 evolve in the future."""
232 evolve in the future."""
231 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
233 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
232 # create reply capability
234 # create reply capability
233 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
235 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
234 bundler.newpart('b2x:replycaps', data=capsblob)
236 bundler.newpart('b2x:replycaps', data=capsblob)
235 extrainfo = _pushbundle2extraparts(pushop, bundler)
237 extrainfo = _pushbundle2extraparts(pushop, bundler)
236 # add the changegroup bundle
238 # add the changegroup bundle
237 cgreplyhandler = _pushb2ctx(pushop, bundler)
239 cgreplyhandler = _pushb2ctx(pushop, bundler)
238 stream = util.chunkbuffer(bundler.getchunks())
240 stream = util.chunkbuffer(bundler.getchunks())
239 try:
241 try:
240 reply = pushop.remote.unbundle(stream, ['force'], 'push')
242 reply = pushop.remote.unbundle(stream, ['force'], 'push')
241 except error.BundleValueError, exc:
243 except error.BundleValueError, exc:
242 raise util.Abort('missing support for %s' % exc)
244 raise util.Abort('missing support for %s' % exc)
243 try:
245 try:
244 op = bundle2.processbundle(pushop.repo, reply)
246 op = bundle2.processbundle(pushop.repo, reply)
245 except error.BundleValueError, exc:
247 except error.BundleValueError, exc:
246 raise util.Abort('missing support for %s' % exc)
248 raise util.Abort('missing support for %s' % exc)
247 cgreplyhandler(op)
249 cgreplyhandler(op)
248 _pushbundle2extrareply(pushop, op, extrainfo)
250 _pushbundle2extrareply(pushop, op, extrainfo)
249
251
250 def _pushbundle2extraparts(pushop, bundler):
252 def _pushbundle2extraparts(pushop, bundler):
251 """hook function to let extensions add parts
253 """hook function to let extensions add parts
252
254
253 Return a dict to let extensions pass data to the reply processing.
255 Return a dict to let extensions pass data to the reply processing.
254 """
256 """
255 return {}
257 return {}
256
258
257 def _pushbundle2extrareply(pushop, op, extrainfo):
259 def _pushbundle2extrareply(pushop, op, extrainfo):
258 """hook function to let extensions react to part replies
260 """hook function to let extensions react to part replies
259
261
260 The dict from _pushbundle2extrareply is fed to this function.
262 The dict from _pushbundle2extrareply is fed to this function.
261 """
263 """
262 pass
264 pass
263
265
264 def _pushchangeset(pushop):
266 def _pushchangeset(pushop):
265 """Make the actual push of changeset bundle to remote repo"""
267 """Make the actual push of changeset bundle to remote repo"""
268 if 'changesets' in pushop.stepsdone:
269 return
270 pushop.stepsdone.add('changesets')
266 outgoing = pushop.outgoing
271 outgoing = pushop.outgoing
267 unbundle = pushop.remote.capable('unbundle')
272 unbundle = pushop.remote.capable('unbundle')
268 # TODO: get bundlecaps from remote
273 # TODO: get bundlecaps from remote
269 bundlecaps = None
274 bundlecaps = None
270 # create a changegroup from local
275 # create a changegroup from local
271 if pushop.revs is None and not (outgoing.excluded
276 if pushop.revs is None and not (outgoing.excluded
272 or pushop.repo.changelog.filteredrevs):
277 or pushop.repo.changelog.filteredrevs):
273 # push everything,
278 # push everything,
274 # use the fast path, no race possible on push
279 # use the fast path, no race possible on push
275 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
280 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
276 cg = changegroup.getsubset(pushop.repo,
281 cg = changegroup.getsubset(pushop.repo,
277 outgoing,
282 outgoing,
278 bundler,
283 bundler,
279 'push',
284 'push',
280 fastpath=True)
285 fastpath=True)
281 else:
286 else:
282 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
287 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
283 bundlecaps)
288 bundlecaps)
284
289
285 # apply changegroup to remote
290 # apply changegroup to remote
286 if unbundle:
291 if unbundle:
287 # local repo finds heads on server, finds out what
292 # local repo finds heads on server, finds out what
288 # revs it must push. once revs transferred, if server
293 # revs it must push. once revs transferred, if server
289 # finds it has different heads (someone else won
294 # finds it has different heads (someone else won
290 # commit/push race), server aborts.
295 # commit/push race), server aborts.
291 if pushop.force:
296 if pushop.force:
292 remoteheads = ['force']
297 remoteheads = ['force']
293 else:
298 else:
294 remoteheads = pushop.remoteheads
299 remoteheads = pushop.remoteheads
295 # ssh: return remote's addchangegroup()
300 # ssh: return remote's addchangegroup()
296 # http: return remote's addchangegroup() or 0 for error
301 # http: return remote's addchangegroup() or 0 for error
297 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
302 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
298 pushop.repo.url())
303 pushop.repo.url())
299 else:
304 else:
300 # we return an integer indicating remote head count
305 # we return an integer indicating remote head count
301 # change
306 # change
302 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
307 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
303
308
304 def _pushcomputecommonheads(pushop):
309 def _pushcomputecommonheads(pushop):
305 unfi = pushop.repo.unfiltered()
310 unfi = pushop.repo.unfiltered()
306 if pushop.ret:
311 if pushop.ret:
307 # push succeed, synchronize target of the push
312 # push succeed, synchronize target of the push
308 cheads = pushop.outgoing.missingheads
313 cheads = pushop.outgoing.missingheads
309 elif pushop.revs is None:
314 elif pushop.revs is None:
310 # All out push fails. synchronize all common
315 # All out push fails. synchronize all common
311 cheads = pushop.outgoing.commonheads
316 cheads = pushop.outgoing.commonheads
312 else:
317 else:
313 # I want cheads = heads(::missingheads and ::commonheads)
318 # I want cheads = heads(::missingheads and ::commonheads)
314 # (missingheads is revs with secret changeset filtered out)
319 # (missingheads is revs with secret changeset filtered out)
315 #
320 #
316 # This can be expressed as:
321 # This can be expressed as:
317 # cheads = ( (missingheads and ::commonheads)
322 # cheads = ( (missingheads and ::commonheads)
318 # + (commonheads and ::missingheads))"
323 # + (commonheads and ::missingheads))"
319 # )
324 # )
320 #
325 #
321 # while trying to push we already computed the following:
326 # while trying to push we already computed the following:
322 # common = (::commonheads)
327 # common = (::commonheads)
323 # missing = ((commonheads::missingheads) - commonheads)
328 # missing = ((commonheads::missingheads) - commonheads)
324 #
329 #
325 # We can pick:
330 # We can pick:
326 # * missingheads part of common (::commonheads)
331 # * missingheads part of common (::commonheads)
327 common = set(pushop.outgoing.common)
332 common = set(pushop.outgoing.common)
328 nm = pushop.repo.changelog.nodemap
333 nm = pushop.repo.changelog.nodemap
329 cheads = [node for node in pushop.revs if nm[node] in common]
334 cheads = [node for node in pushop.revs if nm[node] in common]
330 # and
335 # and
331 # * commonheads parents on missing
336 # * commonheads parents on missing
332 revset = unfi.set('%ln and parents(roots(%ln))',
337 revset = unfi.set('%ln and parents(roots(%ln))',
333 pushop.outgoing.commonheads,
338 pushop.outgoing.commonheads,
334 pushop.outgoing.missing)
339 pushop.outgoing.missing)
335 cheads.extend(c.node() for c in revset)
340 cheads.extend(c.node() for c in revset)
336 pushop.commonheads = cheads
341 pushop.commonheads = cheads
337
342
338 def _pushsyncphase(pushop):
343 def _pushsyncphase(pushop):
339 """synchronise phase information locally and remotely"""
344 """synchronise phase information locally and remotely"""
340 unfi = pushop.repo.unfiltered()
345 unfi = pushop.repo.unfiltered()
341 cheads = pushop.commonheads
346 cheads = pushop.commonheads
342 # even when we don't push, exchanging phase data is useful
347 # even when we don't push, exchanging phase data is useful
343 remotephases = pushop.remote.listkeys('phases')
348 remotephases = pushop.remote.listkeys('phases')
344 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
349 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
345 and remotephases # server supports phases
350 and remotephases # server supports phases
346 and pushop.ret is None # nothing was pushed
351 and pushop.ret is None # nothing was pushed
347 and remotephases.get('publishing', False)):
352 and remotephases.get('publishing', False)):
348 # When:
353 # When:
349 # - this is a subrepo push
354 # - this is a subrepo push
350 # - and remote support phase
355 # - and remote support phase
351 # - and no changeset was pushed
356 # - and no changeset was pushed
352 # - and remote is publishing
357 # - and remote is publishing
353 # We may be in issue 3871 case!
358 # We may be in issue 3871 case!
354 # We drop the possible phase synchronisation done by
359 # We drop the possible phase synchronisation done by
355 # courtesy to publish changesets possibly locally draft
360 # courtesy to publish changesets possibly locally draft
356 # on the remote.
361 # on the remote.
357 remotephases = {'publishing': 'True'}
362 remotephases = {'publishing': 'True'}
358 if not remotephases: # old server or public only reply from non-publishing
363 if not remotephases: # old server or public only reply from non-publishing
359 _localphasemove(pushop, cheads)
364 _localphasemove(pushop, cheads)
360 # don't push any phase data as there is nothing to push
365 # don't push any phase data as there is nothing to push
361 else:
366 else:
362 ana = phases.analyzeremotephases(pushop.repo, cheads,
367 ana = phases.analyzeremotephases(pushop.repo, cheads,
363 remotephases)
368 remotephases)
364 pheads, droots = ana
369 pheads, droots = ana
365 ### Apply remote phase on local
370 ### Apply remote phase on local
366 if remotephases.get('publishing', False):
371 if remotephases.get('publishing', False):
367 _localphasemove(pushop, cheads)
372 _localphasemove(pushop, cheads)
368 else: # publish = False
373 else: # publish = False
369 _localphasemove(pushop, pheads)
374 _localphasemove(pushop, pheads)
370 _localphasemove(pushop, cheads, phases.draft)
375 _localphasemove(pushop, cheads, phases.draft)
371 ### Apply local phase on remote
376 ### Apply local phase on remote
372
377
373 # Get the list of all revs draft on remote by public here.
378 # Get the list of all revs draft on remote by public here.
374 # XXX Beware that revset break if droots is not strictly
379 # XXX Beware that revset break if droots is not strictly
375 # XXX root we may want to ensure it is but it is costly
380 # XXX root we may want to ensure it is but it is costly
376 outdated = unfi.set('heads((%ln::%ln) and public())',
381 outdated = unfi.set('heads((%ln::%ln) and public())',
377 droots, cheads)
382 droots, cheads)
378
383
379 b2caps = bundle2.bundle2caps(pushop.remote)
384 b2caps = bundle2.bundle2caps(pushop.remote)
380 if 'b2x:pushkey' in b2caps:
385 if 'b2x:pushkey' in b2caps:
381 # server supports bundle2, let's do a batched push through it
386 # server supports bundle2, let's do a batched push through it
382 #
387 #
383 # This will eventually be unified with the changesets bundle2 push
388 # This will eventually be unified with the changesets bundle2 push
384 bundler = bundle2.bundle20(pushop.ui, b2caps)
389 bundler = bundle2.bundle20(pushop.ui, b2caps)
385 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
390 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
386 bundler.newpart('b2x:replycaps', data=capsblob)
391 bundler.newpart('b2x:replycaps', data=capsblob)
387 part2node = []
392 part2node = []
388 enc = pushkey.encode
393 enc = pushkey.encode
389 for newremotehead in outdated:
394 for newremotehead in outdated:
390 part = bundler.newpart('b2x:pushkey')
395 part = bundler.newpart('b2x:pushkey')
391 part.addparam('namespace', enc('phases'))
396 part.addparam('namespace', enc('phases'))
392 part.addparam('key', enc(newremotehead.hex()))
397 part.addparam('key', enc(newremotehead.hex()))
393 part.addparam('old', enc(str(phases.draft)))
398 part.addparam('old', enc(str(phases.draft)))
394 part.addparam('new', enc(str(phases.public)))
399 part.addparam('new', enc(str(phases.public)))
395 part2node.append((part.id, newremotehead))
400 part2node.append((part.id, newremotehead))
396 stream = util.chunkbuffer(bundler.getchunks())
401 stream = util.chunkbuffer(bundler.getchunks())
397 try:
402 try:
398 reply = pushop.remote.unbundle(stream, ['force'], 'push')
403 reply = pushop.remote.unbundle(stream, ['force'], 'push')
399 op = bundle2.processbundle(pushop.repo, reply)
404 op = bundle2.processbundle(pushop.repo, reply)
400 except error.BundleValueError, exc:
405 except error.BundleValueError, exc:
401 raise util.Abort('missing support for %s' % exc)
406 raise util.Abort('missing support for %s' % exc)
402 for partid, node in part2node:
407 for partid, node in part2node:
403 partrep = op.records.getreplies(partid)
408 partrep = op.records.getreplies(partid)
404 results = partrep['pushkey']
409 results = partrep['pushkey']
405 assert len(results) <= 1
410 assert len(results) <= 1
406 msg = None
411 msg = None
407 if not results:
412 if not results:
408 msg = _('server ignored update of %s to public!\n') % node
413 msg = _('server ignored update of %s to public!\n') % node
409 elif not int(results[0]['return']):
414 elif not int(results[0]['return']):
410 msg = _('updating %s to public failed!\n') % node
415 msg = _('updating %s to public failed!\n') % node
411 if msg is not None:
416 if msg is not None:
412 pushop.ui.warn(msg)
417 pushop.ui.warn(msg)
413
418
414 else:
419 else:
415 # fallback to independant pushkey command
420 # fallback to independant pushkey command
416 for newremotehead in outdated:
421 for newremotehead in outdated:
417 r = pushop.remote.pushkey('phases',
422 r = pushop.remote.pushkey('phases',
418 newremotehead.hex(),
423 newremotehead.hex(),
419 str(phases.draft),
424 str(phases.draft),
420 str(phases.public))
425 str(phases.public))
421 if not r:
426 if not r:
422 pushop.ui.warn(_('updating %s to public failed!\n')
427 pushop.ui.warn(_('updating %s to public failed!\n')
423 % newremotehead)
428 % newremotehead)
424
429
425 def _localphasemove(pushop, nodes, phase=phases.public):
430 def _localphasemove(pushop, nodes, phase=phases.public):
426 """move <nodes> to <phase> in the local source repo"""
431 """move <nodes> to <phase> in the local source repo"""
427 if pushop.locallocked:
432 if pushop.locallocked:
428 phases.advanceboundary(pushop.repo, phase, nodes)
433 phases.advanceboundary(pushop.repo, phase, nodes)
429 else:
434 else:
430 # repo is not locked, do not change any phases!
435 # repo is not locked, do not change any phases!
431 # Informs the user that phases should have been moved when
436 # Informs the user that phases should have been moved when
432 # applicable.
437 # applicable.
433 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
438 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
434 phasestr = phases.phasenames[phase]
439 phasestr = phases.phasenames[phase]
435 if actualmoves:
440 if actualmoves:
436 pushop.ui.status(_('cannot lock source repo, skipping '
441 pushop.ui.status(_('cannot lock source repo, skipping '
437 'local %s phase update\n') % phasestr)
442 'local %s phase update\n') % phasestr)
438
443
439 def _pushobsolete(pushop):
444 def _pushobsolete(pushop):
440 """utility function to push obsolete markers to a remote"""
445 """utility function to push obsolete markers to a remote"""
441 pushop.ui.debug('try to push obsolete markers to remote\n')
446 pushop.ui.debug('try to push obsolete markers to remote\n')
442 repo = pushop.repo
447 repo = pushop.repo
443 remote = pushop.remote
448 remote = pushop.remote
444 if (obsolete._enabled and repo.obsstore and
449 if (obsolete._enabled and repo.obsstore and
445 'obsolete' in remote.listkeys('namespaces')):
450 'obsolete' in remote.listkeys('namespaces')):
446 rslts = []
451 rslts = []
447 remotedata = repo.listkeys('obsolete')
452 remotedata = repo.listkeys('obsolete')
448 for key in sorted(remotedata, reverse=True):
453 for key in sorted(remotedata, reverse=True):
449 # reverse sort to ensure we end with dump0
454 # reverse sort to ensure we end with dump0
450 data = remotedata[key]
455 data = remotedata[key]
451 rslts.append(remote.pushkey('obsolete', key, '', data))
456 rslts.append(remote.pushkey('obsolete', key, '', data))
452 if [r for r in rslts if not r]:
457 if [r for r in rslts if not r]:
453 msg = _('failed to push some obsolete markers!\n')
458 msg = _('failed to push some obsolete markers!\n')
454 repo.ui.warn(msg)
459 repo.ui.warn(msg)
455
460
456 def _pushbookmark(pushop):
461 def _pushbookmark(pushop):
457 """Update bookmark position on remote"""
462 """Update bookmark position on remote"""
458 ui = pushop.ui
463 ui = pushop.ui
459 repo = pushop.repo.unfiltered()
464 repo = pushop.repo.unfiltered()
460 remote = pushop.remote
465 remote = pushop.remote
461 ui.debug("checking for updated bookmarks\n")
466 ui.debug("checking for updated bookmarks\n")
462 revnums = map(repo.changelog.rev, pushop.revs or [])
467 revnums = map(repo.changelog.rev, pushop.revs or [])
463 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
468 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
464 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
469 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
465 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
470 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
466 srchex=hex)
471 srchex=hex)
467
472
468 for b, scid, dcid in advsrc:
473 for b, scid, dcid in advsrc:
469 if ancestors and repo[scid].rev() not in ancestors:
474 if ancestors and repo[scid].rev() not in ancestors:
470 continue
475 continue
471 if remote.pushkey('bookmarks', b, dcid, scid):
476 if remote.pushkey('bookmarks', b, dcid, scid):
472 ui.status(_("updating bookmark %s\n") % b)
477 ui.status(_("updating bookmark %s\n") % b)
473 else:
478 else:
474 ui.warn(_('updating bookmark %s failed!\n') % b)
479 ui.warn(_('updating bookmark %s failed!\n') % b)
475
480
476 class pulloperation(object):
481 class pulloperation(object):
477 """A object that represent a single pull operation
482 """A object that represent a single pull operation
478
483
479 It purpose is to carry push related state and very common operation.
484 It purpose is to carry push related state and very common operation.
480
485
481 A new should be created at the beginning of each pull and discarded
486 A new should be created at the beginning of each pull and discarded
482 afterward.
487 afterward.
483 """
488 """
484
489
485 def __init__(self, repo, remote, heads=None, force=False):
490 def __init__(self, repo, remote, heads=None, force=False):
486 # repo we pull into
491 # repo we pull into
487 self.repo = repo
492 self.repo = repo
488 # repo we pull from
493 # repo we pull from
489 self.remote = remote
494 self.remote = remote
490 # revision we try to pull (None is "all")
495 # revision we try to pull (None is "all")
491 self.heads = heads
496 self.heads = heads
492 # do we force pull?
497 # do we force pull?
493 self.force = force
498 self.force = force
494 # the name the pull transaction
499 # the name the pull transaction
495 self._trname = 'pull\n' + util.hidepassword(remote.url())
500 self._trname = 'pull\n' + util.hidepassword(remote.url())
496 # hold the transaction once created
501 # hold the transaction once created
497 self._tr = None
502 self._tr = None
498 # set of common changeset between local and remote before pull
503 # set of common changeset between local and remote before pull
499 self.common = None
504 self.common = None
500 # set of pulled head
505 # set of pulled head
501 self.rheads = None
506 self.rheads = None
502 # list of missing changeset to fetch remotely
507 # list of missing changeset to fetch remotely
503 self.fetch = None
508 self.fetch = None
504 # result of changegroup pulling (used as return code by pull)
509 # result of changegroup pulling (used as return code by pull)
505 self.cgresult = None
510 self.cgresult = None
506 # list of step remaining todo (related to future bundle2 usage)
511 # list of step remaining todo (related to future bundle2 usage)
507 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
512 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
508
513
509 @util.propertycache
514 @util.propertycache
510 def pulledsubset(self):
515 def pulledsubset(self):
511 """heads of the set of changeset target by the pull"""
516 """heads of the set of changeset target by the pull"""
512 # compute target subset
517 # compute target subset
513 if self.heads is None:
518 if self.heads is None:
514 # We pulled every thing possible
519 # We pulled every thing possible
515 # sync on everything common
520 # sync on everything common
516 c = set(self.common)
521 c = set(self.common)
517 ret = list(self.common)
522 ret = list(self.common)
518 for n in self.rheads:
523 for n in self.rheads:
519 if n not in c:
524 if n not in c:
520 ret.append(n)
525 ret.append(n)
521 return ret
526 return ret
522 else:
527 else:
523 # We pulled a specific subset
528 # We pulled a specific subset
524 # sync on this subset
529 # sync on this subset
525 return self.heads
530 return self.heads
526
531
527 def gettransaction(self):
532 def gettransaction(self):
528 """get appropriate pull transaction, creating it if needed"""
533 """get appropriate pull transaction, creating it if needed"""
529 if self._tr is None:
534 if self._tr is None:
530 self._tr = self.repo.transaction(self._trname)
535 self._tr = self.repo.transaction(self._trname)
531 return self._tr
536 return self._tr
532
537
533 def closetransaction(self):
538 def closetransaction(self):
534 """close transaction if created"""
539 """close transaction if created"""
535 if self._tr is not None:
540 if self._tr is not None:
536 self._tr.close()
541 self._tr.close()
537
542
538 def releasetransaction(self):
543 def releasetransaction(self):
539 """release transaction if created"""
544 """release transaction if created"""
540 if self._tr is not None:
545 if self._tr is not None:
541 self._tr.release()
546 self._tr.release()
542
547
543 def pull(repo, remote, heads=None, force=False):
548 def pull(repo, remote, heads=None, force=False):
544 pullop = pulloperation(repo, remote, heads, force)
549 pullop = pulloperation(repo, remote, heads, force)
545 if pullop.remote.local():
550 if pullop.remote.local():
546 missing = set(pullop.remote.requirements) - pullop.repo.supported
551 missing = set(pullop.remote.requirements) - pullop.repo.supported
547 if missing:
552 if missing:
548 msg = _("required features are not"
553 msg = _("required features are not"
549 " supported in the destination:"
554 " supported in the destination:"
550 " %s") % (', '.join(sorted(missing)))
555 " %s") % (', '.join(sorted(missing)))
551 raise util.Abort(msg)
556 raise util.Abort(msg)
552
557
553 lock = pullop.repo.lock()
558 lock = pullop.repo.lock()
554 try:
559 try:
555 _pulldiscovery(pullop)
560 _pulldiscovery(pullop)
556 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
561 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
557 and pullop.remote.capable('bundle2-exp')):
562 and pullop.remote.capable('bundle2-exp')):
558 _pullbundle2(pullop)
563 _pullbundle2(pullop)
559 if 'changegroup' in pullop.todosteps:
564 if 'changegroup' in pullop.todosteps:
560 _pullchangeset(pullop)
565 _pullchangeset(pullop)
561 if 'phases' in pullop.todosteps:
566 if 'phases' in pullop.todosteps:
562 _pullphase(pullop)
567 _pullphase(pullop)
563 if 'obsmarkers' in pullop.todosteps:
568 if 'obsmarkers' in pullop.todosteps:
564 _pullobsolete(pullop)
569 _pullobsolete(pullop)
565 pullop.closetransaction()
570 pullop.closetransaction()
566 finally:
571 finally:
567 pullop.releasetransaction()
572 pullop.releasetransaction()
568 lock.release()
573 lock.release()
569
574
570 return pullop.cgresult
575 return pullop.cgresult
571
576
572 def _pulldiscovery(pullop):
577 def _pulldiscovery(pullop):
573 """discovery phase for the pull
578 """discovery phase for the pull
574
579
575 Current handle changeset discovery only, will change handle all discovery
580 Current handle changeset discovery only, will change handle all discovery
576 at some point."""
581 at some point."""
577 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
582 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
578 pullop.remote,
583 pullop.remote,
579 heads=pullop.heads,
584 heads=pullop.heads,
580 force=pullop.force)
585 force=pullop.force)
581 pullop.common, pullop.fetch, pullop.rheads = tmp
586 pullop.common, pullop.fetch, pullop.rheads = tmp
582
587
583 def _pullbundle2(pullop):
588 def _pullbundle2(pullop):
584 """pull data using bundle2
589 """pull data using bundle2
585
590
586 For now, the only supported data are changegroup."""
591 For now, the only supported data are changegroup."""
587 remotecaps = bundle2.bundle2caps(pullop.remote)
592 remotecaps = bundle2.bundle2caps(pullop.remote)
588 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
593 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
589 # pulling changegroup
594 # pulling changegroup
590 pullop.todosteps.remove('changegroup')
595 pullop.todosteps.remove('changegroup')
591
596
592 kwargs['common'] = pullop.common
597 kwargs['common'] = pullop.common
593 kwargs['heads'] = pullop.heads or pullop.rheads
598 kwargs['heads'] = pullop.heads or pullop.rheads
594 if 'b2x:listkeys' in remotecaps:
599 if 'b2x:listkeys' in remotecaps:
595 kwargs['listkeys'] = ['phase']
600 kwargs['listkeys'] = ['phase']
596 if not pullop.fetch:
601 if not pullop.fetch:
597 pullop.repo.ui.status(_("no changes found\n"))
602 pullop.repo.ui.status(_("no changes found\n"))
598 pullop.cgresult = 0
603 pullop.cgresult = 0
599 else:
604 else:
600 if pullop.heads is None and list(pullop.common) == [nullid]:
605 if pullop.heads is None and list(pullop.common) == [nullid]:
601 pullop.repo.ui.status(_("requesting all changes\n"))
606 pullop.repo.ui.status(_("requesting all changes\n"))
602 _pullbundle2extraprepare(pullop, kwargs)
607 _pullbundle2extraprepare(pullop, kwargs)
603 if kwargs.keys() == ['format']:
608 if kwargs.keys() == ['format']:
604 return # nothing to pull
609 return # nothing to pull
605 bundle = pullop.remote.getbundle('pull', **kwargs)
610 bundle = pullop.remote.getbundle('pull', **kwargs)
606 try:
611 try:
607 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
612 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
608 except error.BundleValueError, exc:
613 except error.BundleValueError, exc:
609 raise util.Abort('missing support for %s' % exc)
614 raise util.Abort('missing support for %s' % exc)
610
615
611 if pullop.fetch:
616 if pullop.fetch:
612 assert len(op.records['changegroup']) == 1
617 assert len(op.records['changegroup']) == 1
613 pullop.cgresult = op.records['changegroup'][0]['return']
618 pullop.cgresult = op.records['changegroup'][0]['return']
614
619
615 # processing phases change
620 # processing phases change
616 for namespace, value in op.records['listkeys']:
621 for namespace, value in op.records['listkeys']:
617 if namespace == 'phases':
622 if namespace == 'phases':
618 _pullapplyphases(pullop, value)
623 _pullapplyphases(pullop, value)
619
624
620 def _pullbundle2extraprepare(pullop, kwargs):
625 def _pullbundle2extraprepare(pullop, kwargs):
621 """hook function so that extensions can extend the getbundle call"""
626 """hook function so that extensions can extend the getbundle call"""
622 pass
627 pass
623
628
624 def _pullchangeset(pullop):
629 def _pullchangeset(pullop):
625 """pull changeset from unbundle into the local repo"""
630 """pull changeset from unbundle into the local repo"""
626 # We delay the open of the transaction as late as possible so we
631 # We delay the open of the transaction as late as possible so we
627 # don't open transaction for nothing or you break future useful
632 # don't open transaction for nothing or you break future useful
628 # rollback call
633 # rollback call
629 pullop.todosteps.remove('changegroup')
634 pullop.todosteps.remove('changegroup')
630 if not pullop.fetch:
635 if not pullop.fetch:
631 pullop.repo.ui.status(_("no changes found\n"))
636 pullop.repo.ui.status(_("no changes found\n"))
632 pullop.cgresult = 0
637 pullop.cgresult = 0
633 return
638 return
634 pullop.gettransaction()
639 pullop.gettransaction()
635 if pullop.heads is None and list(pullop.common) == [nullid]:
640 if pullop.heads is None and list(pullop.common) == [nullid]:
636 pullop.repo.ui.status(_("requesting all changes\n"))
641 pullop.repo.ui.status(_("requesting all changes\n"))
637 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
642 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
638 # issue1320, avoid a race if remote changed after discovery
643 # issue1320, avoid a race if remote changed after discovery
639 pullop.heads = pullop.rheads
644 pullop.heads = pullop.rheads
640
645
641 if pullop.remote.capable('getbundle'):
646 if pullop.remote.capable('getbundle'):
642 # TODO: get bundlecaps from remote
647 # TODO: get bundlecaps from remote
643 cg = pullop.remote.getbundle('pull', common=pullop.common,
648 cg = pullop.remote.getbundle('pull', common=pullop.common,
644 heads=pullop.heads or pullop.rheads)
649 heads=pullop.heads or pullop.rheads)
645 elif pullop.heads is None:
650 elif pullop.heads is None:
646 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
651 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
647 elif not pullop.remote.capable('changegroupsubset'):
652 elif not pullop.remote.capable('changegroupsubset'):
648 raise util.Abort(_("partial pull cannot be done because "
653 raise util.Abort(_("partial pull cannot be done because "
649 "other repository doesn't support "
654 "other repository doesn't support "
650 "changegroupsubset."))
655 "changegroupsubset."))
651 else:
656 else:
652 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
657 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
653 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
658 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
654 pullop.remote.url())
659 pullop.remote.url())
655
660
656 def _pullphase(pullop):
661 def _pullphase(pullop):
657 # Get remote phases data from remote
662 # Get remote phases data from remote
658 remotephases = pullop.remote.listkeys('phases')
663 remotephases = pullop.remote.listkeys('phases')
659 _pullapplyphases(pullop, remotephases)
664 _pullapplyphases(pullop, remotephases)
660
665
661 def _pullapplyphases(pullop, remotephases):
666 def _pullapplyphases(pullop, remotephases):
662 """apply phase movement from observed remote state"""
667 """apply phase movement from observed remote state"""
663 pullop.todosteps.remove('phases')
668 pullop.todosteps.remove('phases')
664 publishing = bool(remotephases.get('publishing', False))
669 publishing = bool(remotephases.get('publishing', False))
665 if remotephases and not publishing:
670 if remotephases and not publishing:
666 # remote is new and unpublishing
671 # remote is new and unpublishing
667 pheads, _dr = phases.analyzeremotephases(pullop.repo,
672 pheads, _dr = phases.analyzeremotephases(pullop.repo,
668 pullop.pulledsubset,
673 pullop.pulledsubset,
669 remotephases)
674 remotephases)
670 phases.advanceboundary(pullop.repo, phases.public, pheads)
675 phases.advanceboundary(pullop.repo, phases.public, pheads)
671 phases.advanceboundary(pullop.repo, phases.draft,
676 phases.advanceboundary(pullop.repo, phases.draft,
672 pullop.pulledsubset)
677 pullop.pulledsubset)
673 else:
678 else:
674 # Remote is old or publishing all common changesets
679 # Remote is old or publishing all common changesets
675 # should be seen as public
680 # should be seen as public
676 phases.advanceboundary(pullop.repo, phases.public,
681 phases.advanceboundary(pullop.repo, phases.public,
677 pullop.pulledsubset)
682 pullop.pulledsubset)
678
683
679 def _pullobsolete(pullop):
684 def _pullobsolete(pullop):
680 """utility function to pull obsolete markers from a remote
685 """utility function to pull obsolete markers from a remote
681
686
682 The `gettransaction` is function that return the pull transaction, creating
687 The `gettransaction` is function that return the pull transaction, creating
683 one if necessary. We return the transaction to inform the calling code that
688 one if necessary. We return the transaction to inform the calling code that
684 a new transaction have been created (when applicable).
689 a new transaction have been created (when applicable).
685
690
686 Exists mostly to allow overriding for experimentation purpose"""
691 Exists mostly to allow overriding for experimentation purpose"""
687 pullop.todosteps.remove('obsmarkers')
692 pullop.todosteps.remove('obsmarkers')
688 tr = None
693 tr = None
689 if obsolete._enabled:
694 if obsolete._enabled:
690 pullop.repo.ui.debug('fetching remote obsolete markers\n')
695 pullop.repo.ui.debug('fetching remote obsolete markers\n')
691 remoteobs = pullop.remote.listkeys('obsolete')
696 remoteobs = pullop.remote.listkeys('obsolete')
692 if 'dump0' in remoteobs:
697 if 'dump0' in remoteobs:
693 tr = pullop.gettransaction()
698 tr = pullop.gettransaction()
694 for key in sorted(remoteobs, reverse=True):
699 for key in sorted(remoteobs, reverse=True):
695 if key.startswith('dump'):
700 if key.startswith('dump'):
696 data = base85.b85decode(remoteobs[key])
701 data = base85.b85decode(remoteobs[key])
697 pullop.repo.obsstore.mergemarkers(tr, data)
702 pullop.repo.obsstore.mergemarkers(tr, data)
698 pullop.repo.invalidatevolatilesets()
703 pullop.repo.invalidatevolatilesets()
699 return tr
704 return tr
700
705
701 def caps20to10(repo):
706 def caps20to10(repo):
702 """return a set with appropriate options to use bundle20 during getbundle"""
707 """return a set with appropriate options to use bundle20 during getbundle"""
703 caps = set(['HG2X'])
708 caps = set(['HG2X'])
704 capsblob = bundle2.encodecaps(repo.bundle2caps)
709 capsblob = bundle2.encodecaps(repo.bundle2caps)
705 caps.add('bundle2=' + urllib.quote(capsblob))
710 caps.add('bundle2=' + urllib.quote(capsblob))
706 return caps
711 return caps
707
712
708 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
713 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
709 **kwargs):
714 **kwargs):
710 """return a full bundle (with potentially multiple kind of parts)
715 """return a full bundle (with potentially multiple kind of parts)
711
716
712 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
717 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
713 passed. For now, the bundle can contain only changegroup, but this will
718 passed. For now, the bundle can contain only changegroup, but this will
714 changes when more part type will be available for bundle2.
719 changes when more part type will be available for bundle2.
715
720
716 This is different from changegroup.getbundle that only returns an HG10
721 This is different from changegroup.getbundle that only returns an HG10
717 changegroup bundle. They may eventually get reunited in the future when we
722 changegroup bundle. They may eventually get reunited in the future when we
718 have a clearer idea of the API we what to query different data.
723 have a clearer idea of the API we what to query different data.
719
724
720 The implementation is at a very early stage and will get massive rework
725 The implementation is at a very early stage and will get massive rework
721 when the API of bundle is refined.
726 when the API of bundle is refined.
722 """
727 """
723 # build changegroup bundle here.
728 # build changegroup bundle here.
724 cg = changegroup.getbundle(repo, source, heads=heads,
729 cg = changegroup.getbundle(repo, source, heads=heads,
725 common=common, bundlecaps=bundlecaps)
730 common=common, bundlecaps=bundlecaps)
726 if bundlecaps is None or 'HG2X' not in bundlecaps:
731 if bundlecaps is None or 'HG2X' not in bundlecaps:
727 if kwargs:
732 if kwargs:
728 raise ValueError(_('unsupported getbundle arguments: %s')
733 raise ValueError(_('unsupported getbundle arguments: %s')
729 % ', '.join(sorted(kwargs.keys())))
734 % ', '.join(sorted(kwargs.keys())))
730 return cg
735 return cg
731 # very crude first implementation,
736 # very crude first implementation,
732 # the bundle API will change and the generation will be done lazily.
737 # the bundle API will change and the generation will be done lazily.
733 b2caps = {}
738 b2caps = {}
734 for bcaps in bundlecaps:
739 for bcaps in bundlecaps:
735 if bcaps.startswith('bundle2='):
740 if bcaps.startswith('bundle2='):
736 blob = urllib.unquote(bcaps[len('bundle2='):])
741 blob = urllib.unquote(bcaps[len('bundle2='):])
737 b2caps.update(bundle2.decodecaps(blob))
742 b2caps.update(bundle2.decodecaps(blob))
738 bundler = bundle2.bundle20(repo.ui, b2caps)
743 bundler = bundle2.bundle20(repo.ui, b2caps)
739 if cg:
744 if cg:
740 bundler.newpart('b2x:changegroup', data=cg.getchunks())
745 bundler.newpart('b2x:changegroup', data=cg.getchunks())
741 listkeys = kwargs.get('listkeys', ())
746 listkeys = kwargs.get('listkeys', ())
742 for namespace in listkeys:
747 for namespace in listkeys:
743 part = bundler.newpart('b2x:listkeys')
748 part = bundler.newpart('b2x:listkeys')
744 part.addparam('namespace', namespace)
749 part.addparam('namespace', namespace)
745 keys = repo.listkeys(namespace).items()
750 keys = repo.listkeys(namespace).items()
746 part.data = pushkey.encodekeys(keys)
751 part.data = pushkey.encodekeys(keys)
747 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
752 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
748 bundlecaps=bundlecaps, **kwargs)
753 bundlecaps=bundlecaps, **kwargs)
749 return util.chunkbuffer(bundler.getchunks())
754 return util.chunkbuffer(bundler.getchunks())
750
755
751 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
756 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
752 bundlecaps=None, **kwargs):
757 bundlecaps=None, **kwargs):
753 """hook function to let extensions add parts to the requested bundle"""
758 """hook function to let extensions add parts to the requested bundle"""
754 pass
759 pass
755
760
756 def check_heads(repo, their_heads, context):
761 def check_heads(repo, their_heads, context):
757 """check if the heads of a repo have been modified
762 """check if the heads of a repo have been modified
758
763
759 Used by peer for unbundling.
764 Used by peer for unbundling.
760 """
765 """
761 heads = repo.heads()
766 heads = repo.heads()
762 heads_hash = util.sha1(''.join(sorted(heads))).digest()
767 heads_hash = util.sha1(''.join(sorted(heads))).digest()
763 if not (their_heads == ['force'] or their_heads == heads or
768 if not (their_heads == ['force'] or their_heads == heads or
764 their_heads == ['hashed', heads_hash]):
769 their_heads == ['hashed', heads_hash]):
765 # someone else committed/pushed/unbundled while we
770 # someone else committed/pushed/unbundled while we
766 # were transferring data
771 # were transferring data
767 raise error.PushRaced('repository changed while %s - '
772 raise error.PushRaced('repository changed while %s - '
768 'please try again' % context)
773 'please try again' % context)
769
774
770 def unbundle(repo, cg, heads, source, url):
775 def unbundle(repo, cg, heads, source, url):
771 """Apply a bundle to a repo.
776 """Apply a bundle to a repo.
772
777
773 this function makes sure the repo is locked during the application and have
778 this function makes sure the repo is locked during the application and have
774 mechanism to check that no push race occurred between the creation of the
779 mechanism to check that no push race occurred between the creation of the
775 bundle and its application.
780 bundle and its application.
776
781
777 If the push was raced as PushRaced exception is raised."""
782 If the push was raced as PushRaced exception is raised."""
778 r = 0
783 r = 0
779 # need a transaction when processing a bundle2 stream
784 # need a transaction when processing a bundle2 stream
780 tr = None
785 tr = None
781 lock = repo.lock()
786 lock = repo.lock()
782 try:
787 try:
783 check_heads(repo, heads, 'uploading changes')
788 check_heads(repo, heads, 'uploading changes')
784 # push can proceed
789 # push can proceed
785 if util.safehasattr(cg, 'params'):
790 if util.safehasattr(cg, 'params'):
786 try:
791 try:
787 tr = repo.transaction('unbundle')
792 tr = repo.transaction('unbundle')
788 tr.hookargs['bundle2-exp'] = '1'
793 tr.hookargs['bundle2-exp'] = '1'
789 r = bundle2.processbundle(repo, cg, lambda: tr).reply
794 r = bundle2.processbundle(repo, cg, lambda: tr).reply
790 cl = repo.unfiltered().changelog
795 cl = repo.unfiltered().changelog
791 p = cl.writepending() and repo.root or ""
796 p = cl.writepending() and repo.root or ""
792 repo.hook('b2x-pretransactionclose', throw=True, source=source,
797 repo.hook('b2x-pretransactionclose', throw=True, source=source,
793 url=url, pending=p, **tr.hookargs)
798 url=url, pending=p, **tr.hookargs)
794 tr.close()
799 tr.close()
795 repo.hook('b2x-transactionclose', source=source, url=url,
800 repo.hook('b2x-transactionclose', source=source, url=url,
796 **tr.hookargs)
801 **tr.hookargs)
797 except Exception, exc:
802 except Exception, exc:
798 exc.duringunbundle2 = True
803 exc.duringunbundle2 = True
799 raise
804 raise
800 else:
805 else:
801 r = changegroup.addchangegroup(repo, cg, source, url)
806 r = changegroup.addchangegroup(repo, cg, source, url)
802 finally:
807 finally:
803 if tr is not None:
808 if tr is not None:
804 tr.release()
809 tr.release()
805 lock.release()
810 lock.release()
806 return r
811 return r
General Comments 0
You need to be logged in to leave comments. Login now