##// END OF EJS Templates
push: use bundle2 to push phases when available...
Pierre-Yves David -
r21662:09f19e09 default
parent child Browse files
Show More
@@ -1,754 +1,792 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # Integer version of the push result
64 # Integer version of the push result
65 # - None means nothing to push
65 # - None means nothing to push
66 # - 0 means HTTP error
66 # - 0 means HTTP error
67 # - 1 means we pushed and remote head count is unchanged *or*
67 # - 1 means we pushed and remote head count is unchanged *or*
68 # we have outgoing changesets but refused to push
68 # we have outgoing changesets but refused to push
69 # - other values as described by addchangegroup()
69 # - other values as described by addchangegroup()
70 self.ret = None
70 self.ret = None
71 # discover.outgoing object (contains common and outgoing data)
71 # discover.outgoing object (contains common and outgoing data)
72 self.outgoing = None
72 self.outgoing = None
73 # all remote heads before the push
73 # all remote heads before the push
74 self.remoteheads = None
74 self.remoteheads = None
75 # testable as a boolean indicating if any nodes are missing locally.
75 # testable as a boolean indicating if any nodes are missing locally.
76 self.incoming = None
76 self.incoming = None
77 # set of all heads common after changeset bundle push
77 # set of all heads common after changeset bundle push
78 self.commonheads = None
78 self.commonheads = None
79
79
80 def push(repo, remote, force=False, revs=None, newbranch=False):
80 def push(repo, remote, force=False, revs=None, newbranch=False):
81 '''Push outgoing changesets (limited by revs) from a local
81 '''Push outgoing changesets (limited by revs) from a local
82 repository to remote. Return an integer:
82 repository to remote. Return an integer:
83 - None means nothing to push
83 - None means nothing to push
84 - 0 means HTTP error
84 - 0 means HTTP error
85 - 1 means we pushed and remote head count is unchanged *or*
85 - 1 means we pushed and remote head count is unchanged *or*
86 we have outgoing changesets but refused to push
86 we have outgoing changesets but refused to push
87 - other values as described by addchangegroup()
87 - other values as described by addchangegroup()
88 '''
88 '''
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
90 if pushop.remote.local():
90 if pushop.remote.local():
91 missing = (set(pushop.repo.requirements)
91 missing = (set(pushop.repo.requirements)
92 - pushop.remote.local().supported)
92 - pushop.remote.local().supported)
93 if missing:
93 if missing:
94 msg = _("required features are not"
94 msg = _("required features are not"
95 " supported in the destination:"
95 " supported in the destination:"
96 " %s") % (', '.join(sorted(missing)))
96 " %s") % (', '.join(sorted(missing)))
97 raise util.Abort(msg)
97 raise util.Abort(msg)
98
98
99 # there are two ways to push to remote repo:
99 # there are two ways to push to remote repo:
100 #
100 #
101 # addchangegroup assumes local user can lock remote
101 # addchangegroup assumes local user can lock remote
102 # repo (local filesystem, old ssh servers).
102 # repo (local filesystem, old ssh servers).
103 #
103 #
104 # unbundle assumes local user cannot lock remote repo (new ssh
104 # unbundle assumes local user cannot lock remote repo (new ssh
105 # servers, http servers).
105 # servers, http servers).
106
106
107 if not pushop.remote.canpush():
107 if not pushop.remote.canpush():
108 raise util.Abort(_("destination does not support push"))
108 raise util.Abort(_("destination does not support push"))
109 # get local lock as we might write phase data
109 # get local lock as we might write phase data
110 locallock = None
110 locallock = None
111 try:
111 try:
112 locallock = pushop.repo.lock()
112 locallock = pushop.repo.lock()
113 pushop.locallocked = True
113 pushop.locallocked = True
114 except IOError, err:
114 except IOError, err:
115 pushop.locallocked = False
115 pushop.locallocked = False
116 if err.errno != errno.EACCES:
116 if err.errno != errno.EACCES:
117 raise
117 raise
118 # source repo cannot be locked.
118 # source repo cannot be locked.
119 # We do not abort the push, but just disable the local phase
119 # We do not abort the push, but just disable the local phase
120 # synchronisation.
120 # synchronisation.
121 msg = 'cannot lock source repository: %s\n' % err
121 msg = 'cannot lock source repository: %s\n' % err
122 pushop.ui.debug(msg)
122 pushop.ui.debug(msg)
123 try:
123 try:
124 pushop.repo.checkpush(pushop)
124 pushop.repo.checkpush(pushop)
125 lock = None
125 lock = None
126 unbundle = pushop.remote.capable('unbundle')
126 unbundle = pushop.remote.capable('unbundle')
127 if not unbundle:
127 if not unbundle:
128 lock = pushop.remote.lock()
128 lock = pushop.remote.lock()
129 try:
129 try:
130 _pushdiscovery(pushop)
130 _pushdiscovery(pushop)
131 if _pushcheckoutgoing(pushop):
131 if _pushcheckoutgoing(pushop):
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
133 pushop.remote,
133 pushop.remote,
134 pushop.outgoing)
134 pushop.outgoing)
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
136 False)
136 False)
137 and pushop.remote.capable('bundle2-exp')):
137 and pushop.remote.capable('bundle2-exp')):
138 _pushbundle2(pushop)
138 _pushbundle2(pushop)
139 else:
139 else:
140 _pushchangeset(pushop)
140 _pushchangeset(pushop)
141 _pushcomputecommonheads(pushop)
141 _pushcomputecommonheads(pushop)
142 _pushsyncphase(pushop)
142 _pushsyncphase(pushop)
143 _pushobsolete(pushop)
143 _pushobsolete(pushop)
144 finally:
144 finally:
145 if lock is not None:
145 if lock is not None:
146 lock.release()
146 lock.release()
147 finally:
147 finally:
148 if locallock is not None:
148 if locallock is not None:
149 locallock.release()
149 locallock.release()
150
150
151 _pushbookmark(pushop)
151 _pushbookmark(pushop)
152 return pushop.ret
152 return pushop.ret
153
153
154 def _pushdiscovery(pushop):
154 def _pushdiscovery(pushop):
155 # discovery
155 # discovery
156 unfi = pushop.repo.unfiltered()
156 unfi = pushop.repo.unfiltered()
157 fci = discovery.findcommonincoming
157 fci = discovery.findcommonincoming
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
159 common, inc, remoteheads = commoninc
159 common, inc, remoteheads = commoninc
160 fco = discovery.findcommonoutgoing
160 fco = discovery.findcommonoutgoing
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
162 commoninc=commoninc, force=pushop.force)
162 commoninc=commoninc, force=pushop.force)
163 pushop.outgoing = outgoing
163 pushop.outgoing = outgoing
164 pushop.remoteheads = remoteheads
164 pushop.remoteheads = remoteheads
165 pushop.incoming = inc
165 pushop.incoming = inc
166
166
167 def _pushcheckoutgoing(pushop):
167 def _pushcheckoutgoing(pushop):
168 outgoing = pushop.outgoing
168 outgoing = pushop.outgoing
169 unfi = pushop.repo.unfiltered()
169 unfi = pushop.repo.unfiltered()
170 if not outgoing.missing:
170 if not outgoing.missing:
171 # nothing to push
171 # nothing to push
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
173 return False
173 return False
174 # something to push
174 # something to push
175 if not pushop.force:
175 if not pushop.force:
176 # if repo.obsstore == False --> no obsolete
176 # if repo.obsstore == False --> no obsolete
177 # then, save the iteration
177 # then, save the iteration
178 if unfi.obsstore:
178 if unfi.obsstore:
179 # this message are here for 80 char limit reason
179 # this message are here for 80 char limit reason
180 mso = _("push includes obsolete changeset: %s!")
180 mso = _("push includes obsolete changeset: %s!")
181 mst = "push includes %s changeset: %s!"
181 mst = "push includes %s changeset: %s!"
182 # plain versions for i18n tool to detect them
182 # plain versions for i18n tool to detect them
183 _("push includes unstable changeset: %s!")
183 _("push includes unstable changeset: %s!")
184 _("push includes bumped changeset: %s!")
184 _("push includes bumped changeset: %s!")
185 _("push includes divergent changeset: %s!")
185 _("push includes divergent changeset: %s!")
186 # If we are to push if there is at least one
186 # If we are to push if there is at least one
187 # obsolete or unstable changeset in missing, at
187 # obsolete or unstable changeset in missing, at
188 # least one of the missinghead will be obsolete or
188 # least one of the missinghead will be obsolete or
189 # unstable. So checking heads only is ok
189 # unstable. So checking heads only is ok
190 for node in outgoing.missingheads:
190 for node in outgoing.missingheads:
191 ctx = unfi[node]
191 ctx = unfi[node]
192 if ctx.obsolete():
192 if ctx.obsolete():
193 raise util.Abort(mso % ctx)
193 raise util.Abort(mso % ctx)
194 elif ctx.troubled():
194 elif ctx.troubled():
195 raise util.Abort(_(mst)
195 raise util.Abort(_(mst)
196 % (ctx.troubles()[0],
196 % (ctx.troubles()[0],
197 ctx))
197 ctx))
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
199 discovery.checkheads(unfi, pushop.remote, outgoing,
199 discovery.checkheads(unfi, pushop.remote, outgoing,
200 pushop.remoteheads,
200 pushop.remoteheads,
201 pushop.newbranch,
201 pushop.newbranch,
202 bool(pushop.incoming),
202 bool(pushop.incoming),
203 newbm)
203 newbm)
204 return True
204 return True
205
205
206 def _pushbundle2(pushop):
206 def _pushbundle2(pushop):
207 """push data to the remote using bundle2
207 """push data to the remote using bundle2
208
208
209 The only currently supported type of data is changegroup but this will
209 The only currently supported type of data is changegroup but this will
210 evolve in the future."""
210 evolve in the future."""
211 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
211 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
212 # create reply capability
212 # create reply capability
213 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
213 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
214 bundler.newpart('b2x:replycaps', data=capsblob)
214 bundler.newpart('b2x:replycaps', data=capsblob)
215 # Send known heads to the server for race detection.
215 # Send known heads to the server for race detection.
216 if not pushop.force:
216 if not pushop.force:
217 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
217 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
218 extrainfo = _pushbundle2extraparts(pushop, bundler)
218 extrainfo = _pushbundle2extraparts(pushop, bundler)
219 # add the changegroup bundle
219 # add the changegroup bundle
220 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
220 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
221 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
221 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
222 stream = util.chunkbuffer(bundler.getchunks())
222 stream = util.chunkbuffer(bundler.getchunks())
223 try:
223 try:
224 reply = pushop.remote.unbundle(stream, ['force'], 'push')
224 reply = pushop.remote.unbundle(stream, ['force'], 'push')
225 except error.BundleValueError, exc:
225 except error.BundleValueError, exc:
226 raise util.Abort('missing support for %s' % exc)
226 raise util.Abort('missing support for %s' % exc)
227 try:
227 try:
228 op = bundle2.processbundle(pushop.repo, reply)
228 op = bundle2.processbundle(pushop.repo, reply)
229 except error.BundleValueError, exc:
229 except error.BundleValueError, exc:
230 raise util.Abort('missing support for %s' % exc)
230 raise util.Abort('missing support for %s' % exc)
231 cgreplies = op.records.getreplies(cgpart.id)
231 cgreplies = op.records.getreplies(cgpart.id)
232 assert len(cgreplies['changegroup']) == 1
232 assert len(cgreplies['changegroup']) == 1
233 pushop.ret = cgreplies['changegroup'][0]['return']
233 pushop.ret = cgreplies['changegroup'][0]['return']
234 _pushbundle2extrareply(pushop, op, extrainfo)
234 _pushbundle2extrareply(pushop, op, extrainfo)
235
235
236 def _pushbundle2extraparts(pushop, bundler):
236 def _pushbundle2extraparts(pushop, bundler):
237 """hook function to let extensions add parts
237 """hook function to let extensions add parts
238
238
239 Return a dict to let extensions pass data to the reply processing.
239 Return a dict to let extensions pass data to the reply processing.
240 """
240 """
241 return {}
241 return {}
242
242
243 def _pushbundle2extrareply(pushop, op, extrainfo):
243 def _pushbundle2extrareply(pushop, op, extrainfo):
244 """hook function to let extensions react to part replies
244 """hook function to let extensions react to part replies
245
245
246 The dict from _pushbundle2extrareply is fed to this function.
246 The dict from _pushbundle2extrareply is fed to this function.
247 """
247 """
248 pass
248 pass
249
249
250 def _pushchangeset(pushop):
250 def _pushchangeset(pushop):
251 """Make the actual push of changeset bundle to remote repo"""
251 """Make the actual push of changeset bundle to remote repo"""
252 outgoing = pushop.outgoing
252 outgoing = pushop.outgoing
253 unbundle = pushop.remote.capable('unbundle')
253 unbundle = pushop.remote.capable('unbundle')
254 # TODO: get bundlecaps from remote
254 # TODO: get bundlecaps from remote
255 bundlecaps = None
255 bundlecaps = None
256 # create a changegroup from local
256 # create a changegroup from local
257 if pushop.revs is None and not (outgoing.excluded
257 if pushop.revs is None and not (outgoing.excluded
258 or pushop.repo.changelog.filteredrevs):
258 or pushop.repo.changelog.filteredrevs):
259 # push everything,
259 # push everything,
260 # use the fast path, no race possible on push
260 # use the fast path, no race possible on push
261 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
261 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
262 cg = changegroup.getsubset(pushop.repo,
262 cg = changegroup.getsubset(pushop.repo,
263 outgoing,
263 outgoing,
264 bundler,
264 bundler,
265 'push',
265 'push',
266 fastpath=True)
266 fastpath=True)
267 else:
267 else:
268 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
268 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
269 bundlecaps)
269 bundlecaps)
270
270
271 # apply changegroup to remote
271 # apply changegroup to remote
272 if unbundle:
272 if unbundle:
273 # local repo finds heads on server, finds out what
273 # local repo finds heads on server, finds out what
274 # revs it must push. once revs transferred, if server
274 # revs it must push. once revs transferred, if server
275 # finds it has different heads (someone else won
275 # finds it has different heads (someone else won
276 # commit/push race), server aborts.
276 # commit/push race), server aborts.
277 if pushop.force:
277 if pushop.force:
278 remoteheads = ['force']
278 remoteheads = ['force']
279 else:
279 else:
280 remoteheads = pushop.remoteheads
280 remoteheads = pushop.remoteheads
281 # ssh: return remote's addchangegroup()
281 # ssh: return remote's addchangegroup()
282 # http: return remote's addchangegroup() or 0 for error
282 # http: return remote's addchangegroup() or 0 for error
283 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
283 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
284 'push')
284 'push')
285 else:
285 else:
286 # we return an integer indicating remote head count
286 # we return an integer indicating remote head count
287 # change
287 # change
288 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
288 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
289
289
290 def _pushcomputecommonheads(pushop):
290 def _pushcomputecommonheads(pushop):
291 unfi = pushop.repo.unfiltered()
291 unfi = pushop.repo.unfiltered()
292 if pushop.ret:
292 if pushop.ret:
293 # push succeed, synchronize target of the push
293 # push succeed, synchronize target of the push
294 cheads = pushop.outgoing.missingheads
294 cheads = pushop.outgoing.missingheads
295 elif pushop.revs is None:
295 elif pushop.revs is None:
296 # All out push fails. synchronize all common
296 # All out push fails. synchronize all common
297 cheads = pushop.outgoing.commonheads
297 cheads = pushop.outgoing.commonheads
298 else:
298 else:
299 # I want cheads = heads(::missingheads and ::commonheads)
299 # I want cheads = heads(::missingheads and ::commonheads)
300 # (missingheads is revs with secret changeset filtered out)
300 # (missingheads is revs with secret changeset filtered out)
301 #
301 #
302 # This can be expressed as:
302 # This can be expressed as:
303 # cheads = ( (missingheads and ::commonheads)
303 # cheads = ( (missingheads and ::commonheads)
304 # + (commonheads and ::missingheads))"
304 # + (commonheads and ::missingheads))"
305 # )
305 # )
306 #
306 #
307 # while trying to push we already computed the following:
307 # while trying to push we already computed the following:
308 # common = (::commonheads)
308 # common = (::commonheads)
309 # missing = ((commonheads::missingheads) - commonheads)
309 # missing = ((commonheads::missingheads) - commonheads)
310 #
310 #
311 # We can pick:
311 # We can pick:
312 # * missingheads part of common (::commonheads)
312 # * missingheads part of common (::commonheads)
313 common = set(pushop.outgoing.common)
313 common = set(pushop.outgoing.common)
314 nm = pushop.repo.changelog.nodemap
314 nm = pushop.repo.changelog.nodemap
315 cheads = [node for node in pushop.revs if nm[node] in common]
315 cheads = [node for node in pushop.revs if nm[node] in common]
316 # and
316 # and
317 # * commonheads parents on missing
317 # * commonheads parents on missing
318 revset = unfi.set('%ln and parents(roots(%ln))',
318 revset = unfi.set('%ln and parents(roots(%ln))',
319 pushop.outgoing.commonheads,
319 pushop.outgoing.commonheads,
320 pushop.outgoing.missing)
320 pushop.outgoing.missing)
321 cheads.extend(c.node() for c in revset)
321 cheads.extend(c.node() for c in revset)
322 pushop.commonheads = cheads
322 pushop.commonheads = cheads
323
323
324 def _pushsyncphase(pushop):
324 def _pushsyncphase(pushop):
325 """synchronise phase information locally and remotely"""
325 """synchronise phase information locally and remotely"""
326 unfi = pushop.repo.unfiltered()
326 unfi = pushop.repo.unfiltered()
327 cheads = pushop.commonheads
327 cheads = pushop.commonheads
328 # even when we don't push, exchanging phase data is useful
328 # even when we don't push, exchanging phase data is useful
329 remotephases = pushop.remote.listkeys('phases')
329 remotephases = pushop.remote.listkeys('phases')
330 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
330 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
331 and remotephases # server supports phases
331 and remotephases # server supports phases
332 and pushop.ret is None # nothing was pushed
332 and pushop.ret is None # nothing was pushed
333 and remotephases.get('publishing', False)):
333 and remotephases.get('publishing', False)):
334 # When:
334 # When:
335 # - this is a subrepo push
335 # - this is a subrepo push
336 # - and remote support phase
336 # - and remote support phase
337 # - and no changeset was pushed
337 # - and no changeset was pushed
338 # - and remote is publishing
338 # - and remote is publishing
339 # We may be in issue 3871 case!
339 # We may be in issue 3871 case!
340 # We drop the possible phase synchronisation done by
340 # We drop the possible phase synchronisation done by
341 # courtesy to publish changesets possibly locally draft
341 # courtesy to publish changesets possibly locally draft
342 # on the remote.
342 # on the remote.
343 remotephases = {'publishing': 'True'}
343 remotephases = {'publishing': 'True'}
344 if not remotephases: # old server or public only reply from non-publishing
344 if not remotephases: # old server or public only reply from non-publishing
345 _localphasemove(pushop, cheads)
345 _localphasemove(pushop, cheads)
346 # don't push any phase data as there is nothing to push
346 # don't push any phase data as there is nothing to push
347 else:
347 else:
348 ana = phases.analyzeremotephases(pushop.repo, cheads,
348 ana = phases.analyzeremotephases(pushop.repo, cheads,
349 remotephases)
349 remotephases)
350 pheads, droots = ana
350 pheads, droots = ana
351 ### Apply remote phase on local
351 ### Apply remote phase on local
352 if remotephases.get('publishing', False):
352 if remotephases.get('publishing', False):
353 _localphasemove(pushop, cheads)
353 _localphasemove(pushop, cheads)
354 else: # publish = False
354 else: # publish = False
355 _localphasemove(pushop, pheads)
355 _localphasemove(pushop, pheads)
356 _localphasemove(pushop, cheads, phases.draft)
356 _localphasemove(pushop, cheads, phases.draft)
357 ### Apply local phase on remote
357 ### Apply local phase on remote
358
358
359 # Get the list of all revs draft on remote by public here.
359 # Get the list of all revs draft on remote by public here.
360 # XXX Beware that revset break if droots is not strictly
360 # XXX Beware that revset break if droots is not strictly
361 # XXX root we may want to ensure it is but it is costly
361 # XXX root we may want to ensure it is but it is costly
362 outdated = unfi.set('heads((%ln::%ln) and public())',
362 outdated = unfi.set('heads((%ln::%ln) and public())',
363 droots, cheads)
363 droots, cheads)
364 for newremotehead in outdated:
364
365 r = pushop.remote.pushkey('phases',
365 b2caps = bundle2.bundle2caps(pushop.remote)
366 newremotehead.hex(),
366 if 'b2x:pushkey' in b2caps:
367 str(phases.draft),
367 # server supports bundle2, let's do a batched push through it
368 str(phases.public))
368 #
369 if not r:
369 # This will eventually be unified with the changesets bundle2 push
370 pushop.ui.warn(_('updating %s to public failed!\n')
370 bundler = bundle2.bundle20(pushop.ui, b2caps)
371 % newremotehead)
371 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
372 bundler.newpart('b2x:replycaps', data=capsblob)
373 part2node = []
374 enc = pushkey.encode
375 for newremotehead in outdated:
376 part = bundler.newpart('b2x:pushkey')
377 part.addparam('namespace', enc('phases'))
378 part.addparam('key', enc(newremotehead.hex()))
379 part.addparam('old', enc(str(phases.draft)))
380 part.addparam('new', enc(str(phases.public)))
381 part2node.append((part.id, newremotehead))
382 stream = util.chunkbuffer(bundler.getchunks())
383 try:
384 reply = pushop.remote.unbundle(stream, ['force'], 'push')
385 op = bundle2.processbundle(pushop.repo, reply)
386 except error.BundleValueError, exc:
387 raise util.Abort('missing support for %s' % exc)
388 for partid, node in part2node:
389 partrep = op.records.getreplies(partid)
390 results = partrep['pushkey']
391 assert len(results) <= 1
392 msg = None
393 if not results:
394 msg = _('server ignored update of %s to public!\n') % node
395 elif not int(results[0]['return']):
396 msg = _('updating %s to public failed!\n') % node
397 if msg is not None:
398 pushop.ui.warn(msg)
399
400 else:
401 # fallback to independant pushkey command
402 for newremotehead in outdated:
403 r = pushop.remote.pushkey('phases',
404 newremotehead.hex(),
405 str(phases.draft),
406 str(phases.public))
407 if not r:
408 pushop.ui.warn(_('updating %s to public failed!\n')
409 % newremotehead)
372
410
373 def _localphasemove(pushop, nodes, phase=phases.public):
411 def _localphasemove(pushop, nodes, phase=phases.public):
374 """move <nodes> to <phase> in the local source repo"""
412 """move <nodes> to <phase> in the local source repo"""
375 if pushop.locallocked:
413 if pushop.locallocked:
376 phases.advanceboundary(pushop.repo, phase, nodes)
414 phases.advanceboundary(pushop.repo, phase, nodes)
377 else:
415 else:
378 # repo is not locked, do not change any phases!
416 # repo is not locked, do not change any phases!
379 # Informs the user that phases should have been moved when
417 # Informs the user that phases should have been moved when
380 # applicable.
418 # applicable.
381 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
419 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
382 phasestr = phases.phasenames[phase]
420 phasestr = phases.phasenames[phase]
383 if actualmoves:
421 if actualmoves:
384 pushop.ui.status(_('cannot lock source repo, skipping '
422 pushop.ui.status(_('cannot lock source repo, skipping '
385 'local %s phase update\n') % phasestr)
423 'local %s phase update\n') % phasestr)
386
424
387 def _pushobsolete(pushop):
425 def _pushobsolete(pushop):
388 """utility function to push obsolete markers to a remote"""
426 """utility function to push obsolete markers to a remote"""
389 pushop.ui.debug('try to push obsolete markers to remote\n')
427 pushop.ui.debug('try to push obsolete markers to remote\n')
390 repo = pushop.repo
428 repo = pushop.repo
391 remote = pushop.remote
429 remote = pushop.remote
392 if (obsolete._enabled and repo.obsstore and
430 if (obsolete._enabled and repo.obsstore and
393 'obsolete' in remote.listkeys('namespaces')):
431 'obsolete' in remote.listkeys('namespaces')):
394 rslts = []
432 rslts = []
395 remotedata = repo.listkeys('obsolete')
433 remotedata = repo.listkeys('obsolete')
396 for key in sorted(remotedata, reverse=True):
434 for key in sorted(remotedata, reverse=True):
397 # reverse sort to ensure we end with dump0
435 # reverse sort to ensure we end with dump0
398 data = remotedata[key]
436 data = remotedata[key]
399 rslts.append(remote.pushkey('obsolete', key, '', data))
437 rslts.append(remote.pushkey('obsolete', key, '', data))
400 if [r for r in rslts if not r]:
438 if [r for r in rslts if not r]:
401 msg = _('failed to push some obsolete markers!\n')
439 msg = _('failed to push some obsolete markers!\n')
402 repo.ui.warn(msg)
440 repo.ui.warn(msg)
403
441
404 def _pushbookmark(pushop):
442 def _pushbookmark(pushop):
405 """Update bookmark position on remote"""
443 """Update bookmark position on remote"""
406 ui = pushop.ui
444 ui = pushop.ui
407 repo = pushop.repo.unfiltered()
445 repo = pushop.repo.unfiltered()
408 remote = pushop.remote
446 remote = pushop.remote
409 ui.debug("checking for updated bookmarks\n")
447 ui.debug("checking for updated bookmarks\n")
410 revnums = map(repo.changelog.rev, pushop.revs or [])
448 revnums = map(repo.changelog.rev, pushop.revs or [])
411 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
449 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
412 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
450 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
413 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
451 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
414 srchex=hex)
452 srchex=hex)
415
453
416 for b, scid, dcid in advsrc:
454 for b, scid, dcid in advsrc:
417 if ancestors and repo[scid].rev() not in ancestors:
455 if ancestors and repo[scid].rev() not in ancestors:
418 continue
456 continue
419 if remote.pushkey('bookmarks', b, dcid, scid):
457 if remote.pushkey('bookmarks', b, dcid, scid):
420 ui.status(_("updating bookmark %s\n") % b)
458 ui.status(_("updating bookmark %s\n") % b)
421 else:
459 else:
422 ui.warn(_('updating bookmark %s failed!\n') % b)
460 ui.warn(_('updating bookmark %s failed!\n') % b)
423
461
424 class pulloperation(object):
462 class pulloperation(object):
425 """A object that represent a single pull operation
463 """A object that represent a single pull operation
426
464
427 It purpose is to carry push related state and very common operation.
465 It purpose is to carry push related state and very common operation.
428
466
429 A new should be created at the beginning of each pull and discarded
467 A new should be created at the beginning of each pull and discarded
430 afterward.
468 afterward.
431 """
469 """
432
470
433 def __init__(self, repo, remote, heads=None, force=False):
471 def __init__(self, repo, remote, heads=None, force=False):
434 # repo we pull into
472 # repo we pull into
435 self.repo = repo
473 self.repo = repo
436 # repo we pull from
474 # repo we pull from
437 self.remote = remote
475 self.remote = remote
438 # revision we try to pull (None is "all")
476 # revision we try to pull (None is "all")
439 self.heads = heads
477 self.heads = heads
440 # do we force pull?
478 # do we force pull?
441 self.force = force
479 self.force = force
442 # the name the pull transaction
480 # the name the pull transaction
443 self._trname = 'pull\n' + util.hidepassword(remote.url())
481 self._trname = 'pull\n' + util.hidepassword(remote.url())
444 # hold the transaction once created
482 # hold the transaction once created
445 self._tr = None
483 self._tr = None
446 # set of common changeset between local and remote before pull
484 # set of common changeset between local and remote before pull
447 self.common = None
485 self.common = None
448 # set of pulled head
486 # set of pulled head
449 self.rheads = None
487 self.rheads = None
450 # list of missing changeset to fetch remotely
488 # list of missing changeset to fetch remotely
451 self.fetch = None
489 self.fetch = None
452 # result of changegroup pulling (used as return code by pull)
490 # result of changegroup pulling (used as return code by pull)
453 self.cgresult = None
491 self.cgresult = None
454 # list of step remaining todo (related to future bundle2 usage)
492 # list of step remaining todo (related to future bundle2 usage)
455 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
493 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
456
494
457 @util.propertycache
495 @util.propertycache
458 def pulledsubset(self):
496 def pulledsubset(self):
459 """heads of the set of changeset target by the pull"""
497 """heads of the set of changeset target by the pull"""
460 # compute target subset
498 # compute target subset
461 if self.heads is None:
499 if self.heads is None:
462 # We pulled every thing possible
500 # We pulled every thing possible
463 # sync on everything common
501 # sync on everything common
464 c = set(self.common)
502 c = set(self.common)
465 ret = list(self.common)
503 ret = list(self.common)
466 for n in self.rheads:
504 for n in self.rheads:
467 if n not in c:
505 if n not in c:
468 ret.append(n)
506 ret.append(n)
469 return ret
507 return ret
470 else:
508 else:
471 # We pulled a specific subset
509 # We pulled a specific subset
472 # sync on this subset
510 # sync on this subset
473 return self.heads
511 return self.heads
474
512
475 def gettransaction(self):
513 def gettransaction(self):
476 """get appropriate pull transaction, creating it if needed"""
514 """get appropriate pull transaction, creating it if needed"""
477 if self._tr is None:
515 if self._tr is None:
478 self._tr = self.repo.transaction(self._trname)
516 self._tr = self.repo.transaction(self._trname)
479 return self._tr
517 return self._tr
480
518
481 def closetransaction(self):
519 def closetransaction(self):
482 """close transaction if created"""
520 """close transaction if created"""
483 if self._tr is not None:
521 if self._tr is not None:
484 self._tr.close()
522 self._tr.close()
485
523
486 def releasetransaction(self):
524 def releasetransaction(self):
487 """release transaction if created"""
525 """release transaction if created"""
488 if self._tr is not None:
526 if self._tr is not None:
489 self._tr.release()
527 self._tr.release()
490
528
491 def pull(repo, remote, heads=None, force=False):
529 def pull(repo, remote, heads=None, force=False):
492 pullop = pulloperation(repo, remote, heads, force)
530 pullop = pulloperation(repo, remote, heads, force)
493 if pullop.remote.local():
531 if pullop.remote.local():
494 missing = set(pullop.remote.requirements) - pullop.repo.supported
532 missing = set(pullop.remote.requirements) - pullop.repo.supported
495 if missing:
533 if missing:
496 msg = _("required features are not"
534 msg = _("required features are not"
497 " supported in the destination:"
535 " supported in the destination:"
498 " %s") % (', '.join(sorted(missing)))
536 " %s") % (', '.join(sorted(missing)))
499 raise util.Abort(msg)
537 raise util.Abort(msg)
500
538
501 lock = pullop.repo.lock()
539 lock = pullop.repo.lock()
502 try:
540 try:
503 _pulldiscovery(pullop)
541 _pulldiscovery(pullop)
504 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
542 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
505 and pullop.remote.capable('bundle2-exp')):
543 and pullop.remote.capable('bundle2-exp')):
506 _pullbundle2(pullop)
544 _pullbundle2(pullop)
507 if 'changegroup' in pullop.todosteps:
545 if 'changegroup' in pullop.todosteps:
508 _pullchangeset(pullop)
546 _pullchangeset(pullop)
509 if 'phases' in pullop.todosteps:
547 if 'phases' in pullop.todosteps:
510 _pullphase(pullop)
548 _pullphase(pullop)
511 if 'obsmarkers' in pullop.todosteps:
549 if 'obsmarkers' in pullop.todosteps:
512 _pullobsolete(pullop)
550 _pullobsolete(pullop)
513 pullop.closetransaction()
551 pullop.closetransaction()
514 finally:
552 finally:
515 pullop.releasetransaction()
553 pullop.releasetransaction()
516 lock.release()
554 lock.release()
517
555
518 return pullop.cgresult
556 return pullop.cgresult
519
557
520 def _pulldiscovery(pullop):
558 def _pulldiscovery(pullop):
521 """discovery phase for the pull
559 """discovery phase for the pull
522
560
523 Current handle changeset discovery only, will change handle all discovery
561 Current handle changeset discovery only, will change handle all discovery
524 at some point."""
562 at some point."""
525 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
563 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
526 pullop.remote,
564 pullop.remote,
527 heads=pullop.heads,
565 heads=pullop.heads,
528 force=pullop.force)
566 force=pullop.force)
529 pullop.common, pullop.fetch, pullop.rheads = tmp
567 pullop.common, pullop.fetch, pullop.rheads = tmp
530
568
531 def _pullbundle2(pullop):
569 def _pullbundle2(pullop):
532 """pull data using bundle2
570 """pull data using bundle2
533
571
534 For now, the only supported data are changegroup."""
572 For now, the only supported data are changegroup."""
535 remotecaps = bundle2.bundle2caps(pullop.remote)
573 remotecaps = bundle2.bundle2caps(pullop.remote)
536 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
574 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
537 # pulling changegroup
575 # pulling changegroup
538 pullop.todosteps.remove('changegroup')
576 pullop.todosteps.remove('changegroup')
539
577
540 kwargs['common'] = pullop.common
578 kwargs['common'] = pullop.common
541 kwargs['heads'] = pullop.heads or pullop.rheads
579 kwargs['heads'] = pullop.heads or pullop.rheads
542 if 'b2x:listkeys' in remotecaps:
580 if 'b2x:listkeys' in remotecaps:
543 kwargs['listkeys'] = ['phase']
581 kwargs['listkeys'] = ['phase']
544 if not pullop.fetch:
582 if not pullop.fetch:
545 pullop.repo.ui.status(_("no changes found\n"))
583 pullop.repo.ui.status(_("no changes found\n"))
546 pullop.cgresult = 0
584 pullop.cgresult = 0
547 else:
585 else:
548 if pullop.heads is None and list(pullop.common) == [nullid]:
586 if pullop.heads is None and list(pullop.common) == [nullid]:
549 pullop.repo.ui.status(_("requesting all changes\n"))
587 pullop.repo.ui.status(_("requesting all changes\n"))
550 _pullbundle2extraprepare(pullop, kwargs)
588 _pullbundle2extraprepare(pullop, kwargs)
551 if kwargs.keys() == ['format']:
589 if kwargs.keys() == ['format']:
552 return # nothing to pull
590 return # nothing to pull
553 bundle = pullop.remote.getbundle('pull', **kwargs)
591 bundle = pullop.remote.getbundle('pull', **kwargs)
554 try:
592 try:
555 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
593 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
556 except error.BundleValueError, exc:
594 except error.BundleValueError, exc:
557 raise util.Abort('missing support for %s' % exc)
595 raise util.Abort('missing support for %s' % exc)
558
596
559 if pullop.fetch:
597 if pullop.fetch:
560 assert len(op.records['changegroup']) == 1
598 assert len(op.records['changegroup']) == 1
561 pullop.cgresult = op.records['changegroup'][0]['return']
599 pullop.cgresult = op.records['changegroup'][0]['return']
562
600
563 # processing phases change
601 # processing phases change
564 for namespace, value in op.records['listkeys']:
602 for namespace, value in op.records['listkeys']:
565 if namespace == 'phases':
603 if namespace == 'phases':
566 _pullapplyphases(pullop, value)
604 _pullapplyphases(pullop, value)
567
605
568 def _pullbundle2extraprepare(pullop, kwargs):
606 def _pullbundle2extraprepare(pullop, kwargs):
569 """hook function so that extensions can extend the getbundle call"""
607 """hook function so that extensions can extend the getbundle call"""
570 pass
608 pass
571
609
572 def _pullchangeset(pullop):
610 def _pullchangeset(pullop):
573 """pull changeset from unbundle into the local repo"""
611 """pull changeset from unbundle into the local repo"""
574 # We delay the open of the transaction as late as possible so we
612 # We delay the open of the transaction as late as possible so we
575 # don't open transaction for nothing or you break future useful
613 # don't open transaction for nothing or you break future useful
576 # rollback call
614 # rollback call
577 pullop.todosteps.remove('changegroup')
615 pullop.todosteps.remove('changegroup')
578 if not pullop.fetch:
616 if not pullop.fetch:
579 pullop.repo.ui.status(_("no changes found\n"))
617 pullop.repo.ui.status(_("no changes found\n"))
580 pullop.cgresult = 0
618 pullop.cgresult = 0
581 return
619 return
582 pullop.gettransaction()
620 pullop.gettransaction()
583 if pullop.heads is None and list(pullop.common) == [nullid]:
621 if pullop.heads is None and list(pullop.common) == [nullid]:
584 pullop.repo.ui.status(_("requesting all changes\n"))
622 pullop.repo.ui.status(_("requesting all changes\n"))
585 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
623 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
586 # issue1320, avoid a race if remote changed after discovery
624 # issue1320, avoid a race if remote changed after discovery
587 pullop.heads = pullop.rheads
625 pullop.heads = pullop.rheads
588
626
589 if pullop.remote.capable('getbundle'):
627 if pullop.remote.capable('getbundle'):
590 # TODO: get bundlecaps from remote
628 # TODO: get bundlecaps from remote
591 cg = pullop.remote.getbundle('pull', common=pullop.common,
629 cg = pullop.remote.getbundle('pull', common=pullop.common,
592 heads=pullop.heads or pullop.rheads)
630 heads=pullop.heads or pullop.rheads)
593 elif pullop.heads is None:
631 elif pullop.heads is None:
594 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
632 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
595 elif not pullop.remote.capable('changegroupsubset'):
633 elif not pullop.remote.capable('changegroupsubset'):
596 raise util.Abort(_("partial pull cannot be done because "
634 raise util.Abort(_("partial pull cannot be done because "
597 "other repository doesn't support "
635 "other repository doesn't support "
598 "changegroupsubset."))
636 "changegroupsubset."))
599 else:
637 else:
600 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
638 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
601 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
639 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
602 pullop.remote.url())
640 pullop.remote.url())
603
641
604 def _pullphase(pullop):
642 def _pullphase(pullop):
605 # Get remote phases data from remote
643 # Get remote phases data from remote
606 remotephases = pullop.remote.listkeys('phases')
644 remotephases = pullop.remote.listkeys('phases')
607 _pullapplyphases(pullop, remotephases)
645 _pullapplyphases(pullop, remotephases)
608
646
609 def _pullapplyphases(pullop, remotephases):
647 def _pullapplyphases(pullop, remotephases):
610 """apply phase movement from observed remote state"""
648 """apply phase movement from observed remote state"""
611 pullop.todosteps.remove('phases')
649 pullop.todosteps.remove('phases')
612 publishing = bool(remotephases.get('publishing', False))
650 publishing = bool(remotephases.get('publishing', False))
613 if remotephases and not publishing:
651 if remotephases and not publishing:
614 # remote is new and unpublishing
652 # remote is new and unpublishing
615 pheads, _dr = phases.analyzeremotephases(pullop.repo,
653 pheads, _dr = phases.analyzeremotephases(pullop.repo,
616 pullop.pulledsubset,
654 pullop.pulledsubset,
617 remotephases)
655 remotephases)
618 phases.advanceboundary(pullop.repo, phases.public, pheads)
656 phases.advanceboundary(pullop.repo, phases.public, pheads)
619 phases.advanceboundary(pullop.repo, phases.draft,
657 phases.advanceboundary(pullop.repo, phases.draft,
620 pullop.pulledsubset)
658 pullop.pulledsubset)
621 else:
659 else:
622 # Remote is old or publishing all common changesets
660 # Remote is old or publishing all common changesets
623 # should be seen as public
661 # should be seen as public
624 phases.advanceboundary(pullop.repo, phases.public,
662 phases.advanceboundary(pullop.repo, phases.public,
625 pullop.pulledsubset)
663 pullop.pulledsubset)
626
664
627 def _pullobsolete(pullop):
665 def _pullobsolete(pullop):
628 """utility function to pull obsolete markers from a remote
666 """utility function to pull obsolete markers from a remote
629
667
630 The `gettransaction` is function that return the pull transaction, creating
668 The `gettransaction` is function that return the pull transaction, creating
631 one if necessary. We return the transaction to inform the calling code that
669 one if necessary. We return the transaction to inform the calling code that
632 a new transaction have been created (when applicable).
670 a new transaction have been created (when applicable).
633
671
634 Exists mostly to allow overriding for experimentation purpose"""
672 Exists mostly to allow overriding for experimentation purpose"""
635 pullop.todosteps.remove('obsmarkers')
673 pullop.todosteps.remove('obsmarkers')
636 tr = None
674 tr = None
637 if obsolete._enabled:
675 if obsolete._enabled:
638 pullop.repo.ui.debug('fetching remote obsolete markers\n')
676 pullop.repo.ui.debug('fetching remote obsolete markers\n')
639 remoteobs = pullop.remote.listkeys('obsolete')
677 remoteobs = pullop.remote.listkeys('obsolete')
640 if 'dump0' in remoteobs:
678 if 'dump0' in remoteobs:
641 tr = pullop.gettransaction()
679 tr = pullop.gettransaction()
642 for key in sorted(remoteobs, reverse=True):
680 for key in sorted(remoteobs, reverse=True):
643 if key.startswith('dump'):
681 if key.startswith('dump'):
644 data = base85.b85decode(remoteobs[key])
682 data = base85.b85decode(remoteobs[key])
645 pullop.repo.obsstore.mergemarkers(tr, data)
683 pullop.repo.obsstore.mergemarkers(tr, data)
646 pullop.repo.invalidatevolatilesets()
684 pullop.repo.invalidatevolatilesets()
647 return tr
685 return tr
648
686
649 def caps20to10(repo):
687 def caps20to10(repo):
650 """return a set with appropriate options to use bundle20 during getbundle"""
688 """return a set with appropriate options to use bundle20 during getbundle"""
651 caps = set(['HG2X'])
689 caps = set(['HG2X'])
652 capsblob = bundle2.encodecaps(repo.bundle2caps)
690 capsblob = bundle2.encodecaps(repo.bundle2caps)
653 caps.add('bundle2=' + urllib.quote(capsblob))
691 caps.add('bundle2=' + urllib.quote(capsblob))
654 return caps
692 return caps
655
693
656 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
694 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
657 **kwargs):
695 **kwargs):
658 """return a full bundle (with potentially multiple kind of parts)
696 """return a full bundle (with potentially multiple kind of parts)
659
697
660 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
698 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
661 passed. For now, the bundle can contain only changegroup, but this will
699 passed. For now, the bundle can contain only changegroup, but this will
662 changes when more part type will be available for bundle2.
700 changes when more part type will be available for bundle2.
663
701
664 This is different from changegroup.getbundle that only returns an HG10
702 This is different from changegroup.getbundle that only returns an HG10
665 changegroup bundle. They may eventually get reunited in the future when we
703 changegroup bundle. They may eventually get reunited in the future when we
666 have a clearer idea of the API we what to query different data.
704 have a clearer idea of the API we what to query different data.
667
705
668 The implementation is at a very early stage and will get massive rework
706 The implementation is at a very early stage and will get massive rework
669 when the API of bundle is refined.
707 when the API of bundle is refined.
670 """
708 """
671 # build changegroup bundle here.
709 # build changegroup bundle here.
672 cg = changegroup.getbundle(repo, source, heads=heads,
710 cg = changegroup.getbundle(repo, source, heads=heads,
673 common=common, bundlecaps=bundlecaps)
711 common=common, bundlecaps=bundlecaps)
674 if bundlecaps is None or 'HG2X' not in bundlecaps:
712 if bundlecaps is None or 'HG2X' not in bundlecaps:
675 if kwargs:
713 if kwargs:
676 raise ValueError(_('unsupported getbundle arguments: %s')
714 raise ValueError(_('unsupported getbundle arguments: %s')
677 % ', '.join(sorted(kwargs.keys())))
715 % ', '.join(sorted(kwargs.keys())))
678 return cg
716 return cg
679 # very crude first implementation,
717 # very crude first implementation,
680 # the bundle API will change and the generation will be done lazily.
718 # the bundle API will change and the generation will be done lazily.
681 b2caps = {}
719 b2caps = {}
682 for bcaps in bundlecaps:
720 for bcaps in bundlecaps:
683 if bcaps.startswith('bundle2='):
721 if bcaps.startswith('bundle2='):
684 blob = urllib.unquote(bcaps[len('bundle2='):])
722 blob = urllib.unquote(bcaps[len('bundle2='):])
685 b2caps.update(bundle2.decodecaps(blob))
723 b2caps.update(bundle2.decodecaps(blob))
686 bundler = bundle2.bundle20(repo.ui, b2caps)
724 bundler = bundle2.bundle20(repo.ui, b2caps)
687 if cg:
725 if cg:
688 bundler.newpart('b2x:changegroup', data=cg.getchunks())
726 bundler.newpart('b2x:changegroup', data=cg.getchunks())
689 listkeys = kwargs.get('listkeys', ())
727 listkeys = kwargs.get('listkeys', ())
690 for namespace in listkeys:
728 for namespace in listkeys:
691 part = bundler.newpart('b2x:listkeys')
729 part = bundler.newpart('b2x:listkeys')
692 part.addparam('namespace', namespace)
730 part.addparam('namespace', namespace)
693 keys = repo.listkeys(namespace).items()
731 keys = repo.listkeys(namespace).items()
694 part.data = pushkey.encodekeys(keys)
732 part.data = pushkey.encodekeys(keys)
695 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
733 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
696 bundlecaps=bundlecaps, **kwargs)
734 bundlecaps=bundlecaps, **kwargs)
697 return util.chunkbuffer(bundler.getchunks())
735 return util.chunkbuffer(bundler.getchunks())
698
736
699 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
737 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
700 bundlecaps=None, **kwargs):
738 bundlecaps=None, **kwargs):
701 """hook function to let extensions add parts to the requested bundle"""
739 """hook function to let extensions add parts to the requested bundle"""
702 pass
740 pass
703
741
704 def check_heads(repo, their_heads, context):
742 def check_heads(repo, their_heads, context):
705 """check if the heads of a repo have been modified
743 """check if the heads of a repo have been modified
706
744
707 Used by peer for unbundling.
745 Used by peer for unbundling.
708 """
746 """
709 heads = repo.heads()
747 heads = repo.heads()
710 heads_hash = util.sha1(''.join(sorted(heads))).digest()
748 heads_hash = util.sha1(''.join(sorted(heads))).digest()
711 if not (their_heads == ['force'] or their_heads == heads or
749 if not (their_heads == ['force'] or their_heads == heads or
712 their_heads == ['hashed', heads_hash]):
750 their_heads == ['hashed', heads_hash]):
713 # someone else committed/pushed/unbundled while we
751 # someone else committed/pushed/unbundled while we
714 # were transferring data
752 # were transferring data
715 raise error.PushRaced('repository changed while %s - '
753 raise error.PushRaced('repository changed while %s - '
716 'please try again' % context)
754 'please try again' % context)
717
755
718 def unbundle(repo, cg, heads, source, url):
756 def unbundle(repo, cg, heads, source, url):
719 """Apply a bundle to a repo.
757 """Apply a bundle to a repo.
720
758
721 this function makes sure the repo is locked during the application and have
759 this function makes sure the repo is locked during the application and have
722 mechanism to check that no push race occurred between the creation of the
760 mechanism to check that no push race occurred between the creation of the
723 bundle and its application.
761 bundle and its application.
724
762
725 If the push was raced as PushRaced exception is raised."""
763 If the push was raced as PushRaced exception is raised."""
726 r = 0
764 r = 0
727 # need a transaction when processing a bundle2 stream
765 # need a transaction when processing a bundle2 stream
728 tr = None
766 tr = None
729 lock = repo.lock()
767 lock = repo.lock()
730 try:
768 try:
731 check_heads(repo, heads, 'uploading changes')
769 check_heads(repo, heads, 'uploading changes')
732 # push can proceed
770 # push can proceed
733 if util.safehasattr(cg, 'params'):
771 if util.safehasattr(cg, 'params'):
734 try:
772 try:
735 tr = repo.transaction('unbundle')
773 tr = repo.transaction('unbundle')
736 tr.hookargs['bundle2-exp'] = '1'
774 tr.hookargs['bundle2-exp'] = '1'
737 r = bundle2.processbundle(repo, cg, lambda: tr).reply
775 r = bundle2.processbundle(repo, cg, lambda: tr).reply
738 cl = repo.unfiltered().changelog
776 cl = repo.unfiltered().changelog
739 p = cl.writepending() and repo.root or ""
777 p = cl.writepending() and repo.root or ""
740 repo.hook('b2x-pretransactionclose', throw=True, source=source,
778 repo.hook('b2x-pretransactionclose', throw=True, source=source,
741 url=url, pending=p, **tr.hookargs)
779 url=url, pending=p, **tr.hookargs)
742 tr.close()
780 tr.close()
743 repo.hook('b2x-transactionclose', source=source, url=url,
781 repo.hook('b2x-transactionclose', source=source, url=url,
744 **tr.hookargs)
782 **tr.hookargs)
745 except Exception, exc:
783 except Exception, exc:
746 exc.duringunbundle2 = True
784 exc.duringunbundle2 = True
747 raise
785 raise
748 else:
786 else:
749 r = changegroup.addchangegroup(repo, cg, source, url)
787 r = changegroup.addchangegroup(repo, cg, source, url)
750 finally:
788 finally:
751 if tr is not None:
789 if tr is not None:
752 tr.release()
790 tr.release()
753 lock.release()
791 lock.release()
754 return r
792 return r
General Comments 0
You need to be logged in to leave comments. Login now