##// END OF EJS Templates
bundle2: include client capabilities in the pushed bundle...
Pierre-Yves David -
r21142:15039ce3 default
parent child Browse files
Show More
@@ -1,708 +1,710 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85
11 import util, scmutil, changegroup, base85
12 import discovery, phases, obsolete, bookmarks, bundle2
12 import discovery, phases, obsolete, bookmarks, bundle2
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '20':
35 elif version == '20':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # Integer version of the push result
64 # Integer version of the push result
65 # - None means nothing to push
65 # - None means nothing to push
66 # - 0 means HTTP error
66 # - 0 means HTTP error
67 # - 1 means we pushed and remote head count is unchanged *or*
67 # - 1 means we pushed and remote head count is unchanged *or*
68 # we have outgoing changesets but refused to push
68 # we have outgoing changesets but refused to push
69 # - other values as described by addchangegroup()
69 # - other values as described by addchangegroup()
70 self.ret = None
70 self.ret = None
71 # discover.outgoing object (contains common and outgoing data)
71 # discover.outgoing object (contains common and outgoing data)
72 self.outgoing = None
72 self.outgoing = None
73 # all remote heads before the push
73 # all remote heads before the push
74 self.remoteheads = None
74 self.remoteheads = None
75 # testable as a boolean indicating if any nodes are missing locally.
75 # testable as a boolean indicating if any nodes are missing locally.
76 self.incoming = None
76 self.incoming = None
77 # set of all heads common after changeset bundle push
77 # set of all heads common after changeset bundle push
78 self.commonheads = None
78 self.commonheads = None
79
79
80 def push(repo, remote, force=False, revs=None, newbranch=False):
80 def push(repo, remote, force=False, revs=None, newbranch=False):
81 '''Push outgoing changesets (limited by revs) from a local
81 '''Push outgoing changesets (limited by revs) from a local
82 repository to remote. Return an integer:
82 repository to remote. Return an integer:
83 - None means nothing to push
83 - None means nothing to push
84 - 0 means HTTP error
84 - 0 means HTTP error
85 - 1 means we pushed and remote head count is unchanged *or*
85 - 1 means we pushed and remote head count is unchanged *or*
86 we have outgoing changesets but refused to push
86 we have outgoing changesets but refused to push
87 - other values as described by addchangegroup()
87 - other values as described by addchangegroup()
88 '''
88 '''
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
90 if pushop.remote.local():
90 if pushop.remote.local():
91 missing = (set(pushop.repo.requirements)
91 missing = (set(pushop.repo.requirements)
92 - pushop.remote.local().supported)
92 - pushop.remote.local().supported)
93 if missing:
93 if missing:
94 msg = _("required features are not"
94 msg = _("required features are not"
95 " supported in the destination:"
95 " supported in the destination:"
96 " %s") % (', '.join(sorted(missing)))
96 " %s") % (', '.join(sorted(missing)))
97 raise util.Abort(msg)
97 raise util.Abort(msg)
98
98
99 # there are two ways to push to remote repo:
99 # there are two ways to push to remote repo:
100 #
100 #
101 # addchangegroup assumes local user can lock remote
101 # addchangegroup assumes local user can lock remote
102 # repo (local filesystem, old ssh servers).
102 # repo (local filesystem, old ssh servers).
103 #
103 #
104 # unbundle assumes local user cannot lock remote repo (new ssh
104 # unbundle assumes local user cannot lock remote repo (new ssh
105 # servers, http servers).
105 # servers, http servers).
106
106
107 if not pushop.remote.canpush():
107 if not pushop.remote.canpush():
108 raise util.Abort(_("destination does not support push"))
108 raise util.Abort(_("destination does not support push"))
109 # get local lock as we might write phase data
109 # get local lock as we might write phase data
110 locallock = None
110 locallock = None
111 try:
111 try:
112 locallock = pushop.repo.lock()
112 locallock = pushop.repo.lock()
113 pushop.locallocked = True
113 pushop.locallocked = True
114 except IOError, err:
114 except IOError, err:
115 pushop.locallocked = False
115 pushop.locallocked = False
116 if err.errno != errno.EACCES:
116 if err.errno != errno.EACCES:
117 raise
117 raise
118 # source repo cannot be locked.
118 # source repo cannot be locked.
119 # We do not abort the push, but just disable the local phase
119 # We do not abort the push, but just disable the local phase
120 # synchronisation.
120 # synchronisation.
121 msg = 'cannot lock source repository: %s\n' % err
121 msg = 'cannot lock source repository: %s\n' % err
122 pushop.ui.debug(msg)
122 pushop.ui.debug(msg)
123 try:
123 try:
124 pushop.repo.checkpush(pushop)
124 pushop.repo.checkpush(pushop)
125 lock = None
125 lock = None
126 unbundle = pushop.remote.capable('unbundle')
126 unbundle = pushop.remote.capable('unbundle')
127 if not unbundle:
127 if not unbundle:
128 lock = pushop.remote.lock()
128 lock = pushop.remote.lock()
129 try:
129 try:
130 _pushdiscovery(pushop)
130 _pushdiscovery(pushop)
131 if _pushcheckoutgoing(pushop):
131 if _pushcheckoutgoing(pushop):
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
133 pushop.remote,
133 pushop.remote,
134 pushop.outgoing)
134 pushop.outgoing)
135 if pushop.remote.capable('bundle2'):
135 if pushop.remote.capable('bundle2'):
136 _pushbundle2(pushop)
136 _pushbundle2(pushop)
137 else:
137 else:
138 _pushchangeset(pushop)
138 _pushchangeset(pushop)
139 _pushcomputecommonheads(pushop)
139 _pushcomputecommonheads(pushop)
140 _pushsyncphase(pushop)
140 _pushsyncphase(pushop)
141 _pushobsolete(pushop)
141 _pushobsolete(pushop)
142 finally:
142 finally:
143 if lock is not None:
143 if lock is not None:
144 lock.release()
144 lock.release()
145 finally:
145 finally:
146 if locallock is not None:
146 if locallock is not None:
147 locallock.release()
147 locallock.release()
148
148
149 _pushbookmark(pushop)
149 _pushbookmark(pushop)
150 return pushop.ret
150 return pushop.ret
151
151
152 def _pushdiscovery(pushop):
152 def _pushdiscovery(pushop):
153 # discovery
153 # discovery
154 unfi = pushop.repo.unfiltered()
154 unfi = pushop.repo.unfiltered()
155 fci = discovery.findcommonincoming
155 fci = discovery.findcommonincoming
156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
156 commoninc = fci(unfi, pushop.remote, force=pushop.force)
157 common, inc, remoteheads = commoninc
157 common, inc, remoteheads = commoninc
158 fco = discovery.findcommonoutgoing
158 fco = discovery.findcommonoutgoing
159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
159 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
160 commoninc=commoninc, force=pushop.force)
160 commoninc=commoninc, force=pushop.force)
161 pushop.outgoing = outgoing
161 pushop.outgoing = outgoing
162 pushop.remoteheads = remoteheads
162 pushop.remoteheads = remoteheads
163 pushop.incoming = inc
163 pushop.incoming = inc
164
164
165 def _pushcheckoutgoing(pushop):
165 def _pushcheckoutgoing(pushop):
166 outgoing = pushop.outgoing
166 outgoing = pushop.outgoing
167 unfi = pushop.repo.unfiltered()
167 unfi = pushop.repo.unfiltered()
168 if not outgoing.missing:
168 if not outgoing.missing:
169 # nothing to push
169 # nothing to push
170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
170 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
171 return False
171 return False
172 # something to push
172 # something to push
173 if not pushop.force:
173 if not pushop.force:
174 # if repo.obsstore == False --> no obsolete
174 # if repo.obsstore == False --> no obsolete
175 # then, save the iteration
175 # then, save the iteration
176 if unfi.obsstore:
176 if unfi.obsstore:
177 # this message are here for 80 char limit reason
177 # this message are here for 80 char limit reason
178 mso = _("push includes obsolete changeset: %s!")
178 mso = _("push includes obsolete changeset: %s!")
179 mst = "push includes %s changeset: %s!"
179 mst = "push includes %s changeset: %s!"
180 # plain versions for i18n tool to detect them
180 # plain versions for i18n tool to detect them
181 _("push includes unstable changeset: %s!")
181 _("push includes unstable changeset: %s!")
182 _("push includes bumped changeset: %s!")
182 _("push includes bumped changeset: %s!")
183 _("push includes divergent changeset: %s!")
183 _("push includes divergent changeset: %s!")
184 # If we are to push if there is at least one
184 # If we are to push if there is at least one
185 # obsolete or unstable changeset in missing, at
185 # obsolete or unstable changeset in missing, at
186 # least one of the missinghead will be obsolete or
186 # least one of the missinghead will be obsolete or
187 # unstable. So checking heads only is ok
187 # unstable. So checking heads only is ok
188 for node in outgoing.missingheads:
188 for node in outgoing.missingheads:
189 ctx = unfi[node]
189 ctx = unfi[node]
190 if ctx.obsolete():
190 if ctx.obsolete():
191 raise util.Abort(mso % ctx)
191 raise util.Abort(mso % ctx)
192 elif ctx.troubled():
192 elif ctx.troubled():
193 raise util.Abort(_(mst)
193 raise util.Abort(_(mst)
194 % (ctx.troubles()[0],
194 % (ctx.troubles()[0],
195 ctx))
195 ctx))
196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
196 newbm = pushop.ui.configlist('bookmarks', 'pushing')
197 discovery.checkheads(unfi, pushop.remote, outgoing,
197 discovery.checkheads(unfi, pushop.remote, outgoing,
198 pushop.remoteheads,
198 pushop.remoteheads,
199 pushop.newbranch,
199 pushop.newbranch,
200 bool(pushop.incoming),
200 bool(pushop.incoming),
201 newbm)
201 newbm)
202 return True
202 return True
203
203
204 def _pushbundle2(pushop):
204 def _pushbundle2(pushop):
205 """push data to the remote using bundle2
205 """push data to the remote using bundle2
206
206
207 The only currently supported type of data is changegroup but this will
207 The only currently supported type of data is changegroup but this will
208 evolve in the future."""
208 evolve in the future."""
209 # Send known head to the server for race detection.
209 # Send known head to the server for race detection.
210 capsblob = urllib.unquote(pushop.remote.capable('bundle2'))
210 capsblob = urllib.unquote(pushop.remote.capable('bundle2'))
211 caps = bundle2.decodecaps(capsblob)
211 caps = bundle2.decodecaps(capsblob)
212 bundler = bundle2.bundle20(pushop.ui, caps)
212 bundler = bundle2.bundle20(pushop.ui, caps)
213 bundler.addpart(bundle2.bundlepart('replycaps'))
213 # create reply capability
214 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
215 bundler.addpart(bundle2.bundlepart('replycaps', data=capsblob))
214 if not pushop.force:
216 if not pushop.force:
215 part = bundle2.bundlepart('CHECK:HEADS', data=iter(pushop.remoteheads))
217 part = bundle2.bundlepart('CHECK:HEADS', data=iter(pushop.remoteheads))
216 bundler.addpart(part)
218 bundler.addpart(part)
217 # add the changegroup bundle
219 # add the changegroup bundle
218 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
220 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
219 cgpart = bundle2.bundlepart('CHANGEGROUP', data=cg.getchunks())
221 cgpart = bundle2.bundlepart('CHANGEGROUP', data=cg.getchunks())
220 bundler.addpart(cgpart)
222 bundler.addpart(cgpart)
221 stream = util.chunkbuffer(bundler.getchunks())
223 stream = util.chunkbuffer(bundler.getchunks())
222 reply = pushop.remote.unbundle(stream, ['force'], 'push')
224 reply = pushop.remote.unbundle(stream, ['force'], 'push')
223 try:
225 try:
224 op = bundle2.processbundle(pushop.repo, reply)
226 op = bundle2.processbundle(pushop.repo, reply)
225 except KeyError, exc:
227 except KeyError, exc:
226 raise util.Abort('missing support for %s' % exc)
228 raise util.Abort('missing support for %s' % exc)
227 cgreplies = op.records.getreplies(cgpart.id)
229 cgreplies = op.records.getreplies(cgpart.id)
228 assert len(cgreplies['changegroup']) == 1
230 assert len(cgreplies['changegroup']) == 1
229 pushop.ret = cgreplies['changegroup'][0]['return']
231 pushop.ret = cgreplies['changegroup'][0]['return']
230
232
231 def _pushchangeset(pushop):
233 def _pushchangeset(pushop):
232 """Make the actual push of changeset bundle to remote repo"""
234 """Make the actual push of changeset bundle to remote repo"""
233 outgoing = pushop.outgoing
235 outgoing = pushop.outgoing
234 unbundle = pushop.remote.capable('unbundle')
236 unbundle = pushop.remote.capable('unbundle')
235 # TODO: get bundlecaps from remote
237 # TODO: get bundlecaps from remote
236 bundlecaps = None
238 bundlecaps = None
237 # create a changegroup from local
239 # create a changegroup from local
238 if pushop.revs is None and not (outgoing.excluded
240 if pushop.revs is None and not (outgoing.excluded
239 or pushop.repo.changelog.filteredrevs):
241 or pushop.repo.changelog.filteredrevs):
240 # push everything,
242 # push everything,
241 # use the fast path, no race possible on push
243 # use the fast path, no race possible on push
242 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
244 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
243 cg = changegroup.getsubset(pushop.repo,
245 cg = changegroup.getsubset(pushop.repo,
244 outgoing,
246 outgoing,
245 bundler,
247 bundler,
246 'push',
248 'push',
247 fastpath=True)
249 fastpath=True)
248 else:
250 else:
249 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
251 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
250 bundlecaps)
252 bundlecaps)
251
253
252 # apply changegroup to remote
254 # apply changegroup to remote
253 if unbundle:
255 if unbundle:
254 # local repo finds heads on server, finds out what
256 # local repo finds heads on server, finds out what
255 # revs it must push. once revs transferred, if server
257 # revs it must push. once revs transferred, if server
256 # finds it has different heads (someone else won
258 # finds it has different heads (someone else won
257 # commit/push race), server aborts.
259 # commit/push race), server aborts.
258 if pushop.force:
260 if pushop.force:
259 remoteheads = ['force']
261 remoteheads = ['force']
260 else:
262 else:
261 remoteheads = pushop.remoteheads
263 remoteheads = pushop.remoteheads
262 # ssh: return remote's addchangegroup()
264 # ssh: return remote's addchangegroup()
263 # http: return remote's addchangegroup() or 0 for error
265 # http: return remote's addchangegroup() or 0 for error
264 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
266 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
265 'push')
267 'push')
266 else:
268 else:
267 # we return an integer indicating remote head count
269 # we return an integer indicating remote head count
268 # change
270 # change
269 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
271 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
270
272
271 def _pushcomputecommonheads(pushop):
273 def _pushcomputecommonheads(pushop):
272 unfi = pushop.repo.unfiltered()
274 unfi = pushop.repo.unfiltered()
273 if pushop.ret:
275 if pushop.ret:
274 # push succeed, synchronize target of the push
276 # push succeed, synchronize target of the push
275 cheads = pushop.outgoing.missingheads
277 cheads = pushop.outgoing.missingheads
276 elif pushop.revs is None:
278 elif pushop.revs is None:
277 # All out push fails. synchronize all common
279 # All out push fails. synchronize all common
278 cheads = pushop.outgoing.commonheads
280 cheads = pushop.outgoing.commonheads
279 else:
281 else:
280 # I want cheads = heads(::missingheads and ::commonheads)
282 # I want cheads = heads(::missingheads and ::commonheads)
281 # (missingheads is revs with secret changeset filtered out)
283 # (missingheads is revs with secret changeset filtered out)
282 #
284 #
283 # This can be expressed as:
285 # This can be expressed as:
284 # cheads = ( (missingheads and ::commonheads)
286 # cheads = ( (missingheads and ::commonheads)
285 # + (commonheads and ::missingheads))"
287 # + (commonheads and ::missingheads))"
286 # )
288 # )
287 #
289 #
288 # while trying to push we already computed the following:
290 # while trying to push we already computed the following:
289 # common = (::commonheads)
291 # common = (::commonheads)
290 # missing = ((commonheads::missingheads) - commonheads)
292 # missing = ((commonheads::missingheads) - commonheads)
291 #
293 #
292 # We can pick:
294 # We can pick:
293 # * missingheads part of common (::commonheads)
295 # * missingheads part of common (::commonheads)
294 common = set(pushop.outgoing.common)
296 common = set(pushop.outgoing.common)
295 nm = pushop.repo.changelog.nodemap
297 nm = pushop.repo.changelog.nodemap
296 cheads = [node for node in pushop.revs if nm[node] in common]
298 cheads = [node for node in pushop.revs if nm[node] in common]
297 # and
299 # and
298 # * commonheads parents on missing
300 # * commonheads parents on missing
299 revset = unfi.set('%ln and parents(roots(%ln))',
301 revset = unfi.set('%ln and parents(roots(%ln))',
300 pushop.outgoing.commonheads,
302 pushop.outgoing.commonheads,
301 pushop.outgoing.missing)
303 pushop.outgoing.missing)
302 cheads.extend(c.node() for c in revset)
304 cheads.extend(c.node() for c in revset)
303 pushop.commonheads = cheads
305 pushop.commonheads = cheads
304
306
305 def _pushsyncphase(pushop):
307 def _pushsyncphase(pushop):
306 """synchronise phase information locally and remotely"""
308 """synchronise phase information locally and remotely"""
307 unfi = pushop.repo.unfiltered()
309 unfi = pushop.repo.unfiltered()
308 cheads = pushop.commonheads
310 cheads = pushop.commonheads
309 if pushop.ret:
311 if pushop.ret:
310 # push succeed, synchronize target of the push
312 # push succeed, synchronize target of the push
311 cheads = pushop.outgoing.missingheads
313 cheads = pushop.outgoing.missingheads
312 elif pushop.revs is None:
314 elif pushop.revs is None:
313 # All out push fails. synchronize all common
315 # All out push fails. synchronize all common
314 cheads = pushop.outgoing.commonheads
316 cheads = pushop.outgoing.commonheads
315 else:
317 else:
316 # I want cheads = heads(::missingheads and ::commonheads)
318 # I want cheads = heads(::missingheads and ::commonheads)
317 # (missingheads is revs with secret changeset filtered out)
319 # (missingheads is revs with secret changeset filtered out)
318 #
320 #
319 # This can be expressed as:
321 # This can be expressed as:
320 # cheads = ( (missingheads and ::commonheads)
322 # cheads = ( (missingheads and ::commonheads)
321 # + (commonheads and ::missingheads))"
323 # + (commonheads and ::missingheads))"
322 # )
324 # )
323 #
325 #
324 # while trying to push we already computed the following:
326 # while trying to push we already computed the following:
325 # common = (::commonheads)
327 # common = (::commonheads)
326 # missing = ((commonheads::missingheads) - commonheads)
328 # missing = ((commonheads::missingheads) - commonheads)
327 #
329 #
328 # We can pick:
330 # We can pick:
329 # * missingheads part of common (::commonheads)
331 # * missingheads part of common (::commonheads)
330 common = set(pushop.outgoing.common)
332 common = set(pushop.outgoing.common)
331 nm = pushop.repo.changelog.nodemap
333 nm = pushop.repo.changelog.nodemap
332 cheads = [node for node in pushop.revs if nm[node] in common]
334 cheads = [node for node in pushop.revs if nm[node] in common]
333 # and
335 # and
334 # * commonheads parents on missing
336 # * commonheads parents on missing
335 revset = unfi.set('%ln and parents(roots(%ln))',
337 revset = unfi.set('%ln and parents(roots(%ln))',
336 pushop.outgoing.commonheads,
338 pushop.outgoing.commonheads,
337 pushop.outgoing.missing)
339 pushop.outgoing.missing)
338 cheads.extend(c.node() for c in revset)
340 cheads.extend(c.node() for c in revset)
339 pushop.commonheads = cheads
341 pushop.commonheads = cheads
340 # even when we don't push, exchanging phase data is useful
342 # even when we don't push, exchanging phase data is useful
341 remotephases = pushop.remote.listkeys('phases')
343 remotephases = pushop.remote.listkeys('phases')
342 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
344 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
343 and remotephases # server supports phases
345 and remotephases # server supports phases
344 and pushop.ret is None # nothing was pushed
346 and pushop.ret is None # nothing was pushed
345 and remotephases.get('publishing', False)):
347 and remotephases.get('publishing', False)):
346 # When:
348 # When:
347 # - this is a subrepo push
349 # - this is a subrepo push
348 # - and remote support phase
350 # - and remote support phase
349 # - and no changeset was pushed
351 # - and no changeset was pushed
350 # - and remote is publishing
352 # - and remote is publishing
351 # We may be in issue 3871 case!
353 # We may be in issue 3871 case!
352 # We drop the possible phase synchronisation done by
354 # We drop the possible phase synchronisation done by
353 # courtesy to publish changesets possibly locally draft
355 # courtesy to publish changesets possibly locally draft
354 # on the remote.
356 # on the remote.
355 remotephases = {'publishing': 'True'}
357 remotephases = {'publishing': 'True'}
356 if not remotephases: # old server or public only reply from non-publishing
358 if not remotephases: # old server or public only reply from non-publishing
357 _localphasemove(pushop, cheads)
359 _localphasemove(pushop, cheads)
358 # don't push any phase data as there is nothing to push
360 # don't push any phase data as there is nothing to push
359 else:
361 else:
360 ana = phases.analyzeremotephases(pushop.repo, cheads,
362 ana = phases.analyzeremotephases(pushop.repo, cheads,
361 remotephases)
363 remotephases)
362 pheads, droots = ana
364 pheads, droots = ana
363 ### Apply remote phase on local
365 ### Apply remote phase on local
364 if remotephases.get('publishing', False):
366 if remotephases.get('publishing', False):
365 _localphasemove(pushop, cheads)
367 _localphasemove(pushop, cheads)
366 else: # publish = False
368 else: # publish = False
367 _localphasemove(pushop, pheads)
369 _localphasemove(pushop, pheads)
368 _localphasemove(pushop, cheads, phases.draft)
370 _localphasemove(pushop, cheads, phases.draft)
369 ### Apply local phase on remote
371 ### Apply local phase on remote
370
372
371 # Get the list of all revs draft on remote by public here.
373 # Get the list of all revs draft on remote by public here.
372 # XXX Beware that revset break if droots is not strictly
374 # XXX Beware that revset break if droots is not strictly
373 # XXX root we may want to ensure it is but it is costly
375 # XXX root we may want to ensure it is but it is costly
374 outdated = unfi.set('heads((%ln::%ln) and public())',
376 outdated = unfi.set('heads((%ln::%ln) and public())',
375 droots, cheads)
377 droots, cheads)
376 for newremotehead in outdated:
378 for newremotehead in outdated:
377 r = pushop.remote.pushkey('phases',
379 r = pushop.remote.pushkey('phases',
378 newremotehead.hex(),
380 newremotehead.hex(),
379 str(phases.draft),
381 str(phases.draft),
380 str(phases.public))
382 str(phases.public))
381 if not r:
383 if not r:
382 pushop.ui.warn(_('updating %s to public failed!\n')
384 pushop.ui.warn(_('updating %s to public failed!\n')
383 % newremotehead)
385 % newremotehead)
384
386
385 def _localphasemove(pushop, nodes, phase=phases.public):
387 def _localphasemove(pushop, nodes, phase=phases.public):
386 """move <nodes> to <phase> in the local source repo"""
388 """move <nodes> to <phase> in the local source repo"""
387 if pushop.locallocked:
389 if pushop.locallocked:
388 phases.advanceboundary(pushop.repo, phase, nodes)
390 phases.advanceboundary(pushop.repo, phase, nodes)
389 else:
391 else:
390 # repo is not locked, do not change any phases!
392 # repo is not locked, do not change any phases!
391 # Informs the user that phases should have been moved when
393 # Informs the user that phases should have been moved when
392 # applicable.
394 # applicable.
393 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
395 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
394 phasestr = phases.phasenames[phase]
396 phasestr = phases.phasenames[phase]
395 if actualmoves:
397 if actualmoves:
396 pushop.ui.status(_('cannot lock source repo, skipping '
398 pushop.ui.status(_('cannot lock source repo, skipping '
397 'local %s phase update\n') % phasestr)
399 'local %s phase update\n') % phasestr)
398
400
399 def _pushobsolete(pushop):
401 def _pushobsolete(pushop):
400 """utility function to push obsolete markers to a remote"""
402 """utility function to push obsolete markers to a remote"""
401 pushop.ui.debug('try to push obsolete markers to remote\n')
403 pushop.ui.debug('try to push obsolete markers to remote\n')
402 repo = pushop.repo
404 repo = pushop.repo
403 remote = pushop.remote
405 remote = pushop.remote
404 if (obsolete._enabled and repo.obsstore and
406 if (obsolete._enabled and repo.obsstore and
405 'obsolete' in remote.listkeys('namespaces')):
407 'obsolete' in remote.listkeys('namespaces')):
406 rslts = []
408 rslts = []
407 remotedata = repo.listkeys('obsolete')
409 remotedata = repo.listkeys('obsolete')
408 for key in sorted(remotedata, reverse=True):
410 for key in sorted(remotedata, reverse=True):
409 # reverse sort to ensure we end with dump0
411 # reverse sort to ensure we end with dump0
410 data = remotedata[key]
412 data = remotedata[key]
411 rslts.append(remote.pushkey('obsolete', key, '', data))
413 rslts.append(remote.pushkey('obsolete', key, '', data))
412 if [r for r in rslts if not r]:
414 if [r for r in rslts if not r]:
413 msg = _('failed to push some obsolete markers!\n')
415 msg = _('failed to push some obsolete markers!\n')
414 repo.ui.warn(msg)
416 repo.ui.warn(msg)
415
417
416 def _pushbookmark(pushop):
418 def _pushbookmark(pushop):
417 """Update bookmark position on remote"""
419 """Update bookmark position on remote"""
418 ui = pushop.ui
420 ui = pushop.ui
419 repo = pushop.repo.unfiltered()
421 repo = pushop.repo.unfiltered()
420 remote = pushop.remote
422 remote = pushop.remote
421 ui.debug("checking for updated bookmarks\n")
423 ui.debug("checking for updated bookmarks\n")
422 revnums = map(repo.changelog.rev, pushop.revs or [])
424 revnums = map(repo.changelog.rev, pushop.revs or [])
423 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
425 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
424 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
426 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
425 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
427 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
426 srchex=hex)
428 srchex=hex)
427
429
428 for b, scid, dcid in advsrc:
430 for b, scid, dcid in advsrc:
429 if ancestors and repo[scid].rev() not in ancestors:
431 if ancestors and repo[scid].rev() not in ancestors:
430 continue
432 continue
431 if remote.pushkey('bookmarks', b, dcid, scid):
433 if remote.pushkey('bookmarks', b, dcid, scid):
432 ui.status(_("updating bookmark %s\n") % b)
434 ui.status(_("updating bookmark %s\n") % b)
433 else:
435 else:
434 ui.warn(_('updating bookmark %s failed!\n') % b)
436 ui.warn(_('updating bookmark %s failed!\n') % b)
435
437
436 class pulloperation(object):
438 class pulloperation(object):
437 """A object that represent a single pull operation
439 """A object that represent a single pull operation
438
440
439 It purpose is to carry push related state and very common operation.
441 It purpose is to carry push related state and very common operation.
440
442
441 A new should be created at the beginning of each pull and discarded
443 A new should be created at the beginning of each pull and discarded
442 afterward.
444 afterward.
443 """
445 """
444
446
445 def __init__(self, repo, remote, heads=None, force=False):
447 def __init__(self, repo, remote, heads=None, force=False):
446 # repo we pull into
448 # repo we pull into
447 self.repo = repo
449 self.repo = repo
448 # repo we pull from
450 # repo we pull from
449 self.remote = remote
451 self.remote = remote
450 # revision we try to pull (None is "all")
452 # revision we try to pull (None is "all")
451 self.heads = heads
453 self.heads = heads
452 # do we force pull?
454 # do we force pull?
453 self.force = force
455 self.force = force
454 # the name the pull transaction
456 # the name the pull transaction
455 self._trname = 'pull\n' + util.hidepassword(remote.url())
457 self._trname = 'pull\n' + util.hidepassword(remote.url())
456 # hold the transaction once created
458 # hold the transaction once created
457 self._tr = None
459 self._tr = None
458 # set of common changeset between local and remote before pull
460 # set of common changeset between local and remote before pull
459 self.common = None
461 self.common = None
460 # set of pulled head
462 # set of pulled head
461 self.rheads = None
463 self.rheads = None
462 # list of missing changeset to fetch remotely
464 # list of missing changeset to fetch remotely
463 self.fetch = None
465 self.fetch = None
464 # result of changegroup pulling (used as return code by pull)
466 # result of changegroup pulling (used as return code by pull)
465 self.cgresult = None
467 self.cgresult = None
466 # list of step remaining todo (related to future bundle2 usage)
468 # list of step remaining todo (related to future bundle2 usage)
467 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
469 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
468
470
469 @util.propertycache
471 @util.propertycache
470 def pulledsubset(self):
472 def pulledsubset(self):
471 """heads of the set of changeset target by the pull"""
473 """heads of the set of changeset target by the pull"""
472 # compute target subset
474 # compute target subset
473 if self.heads is None:
475 if self.heads is None:
474 # We pulled every thing possible
476 # We pulled every thing possible
475 # sync on everything common
477 # sync on everything common
476 c = set(self.common)
478 c = set(self.common)
477 ret = list(self.common)
479 ret = list(self.common)
478 for n in self.rheads:
480 for n in self.rheads:
479 if n not in c:
481 if n not in c:
480 ret.append(n)
482 ret.append(n)
481 return ret
483 return ret
482 else:
484 else:
483 # We pulled a specific subset
485 # We pulled a specific subset
484 # sync on this subset
486 # sync on this subset
485 return self.heads
487 return self.heads
486
488
487 def gettransaction(self):
489 def gettransaction(self):
488 """get appropriate pull transaction, creating it if needed"""
490 """get appropriate pull transaction, creating it if needed"""
489 if self._tr is None:
491 if self._tr is None:
490 self._tr = self.repo.transaction(self._trname)
492 self._tr = self.repo.transaction(self._trname)
491 return self._tr
493 return self._tr
492
494
493 def closetransaction(self):
495 def closetransaction(self):
494 """close transaction if created"""
496 """close transaction if created"""
495 if self._tr is not None:
497 if self._tr is not None:
496 self._tr.close()
498 self._tr.close()
497
499
498 def releasetransaction(self):
500 def releasetransaction(self):
499 """release transaction if created"""
501 """release transaction if created"""
500 if self._tr is not None:
502 if self._tr is not None:
501 self._tr.release()
503 self._tr.release()
502
504
503 def pull(repo, remote, heads=None, force=False):
505 def pull(repo, remote, heads=None, force=False):
504 pullop = pulloperation(repo, remote, heads, force)
506 pullop = pulloperation(repo, remote, heads, force)
505 if pullop.remote.local():
507 if pullop.remote.local():
506 missing = set(pullop.remote.requirements) - pullop.repo.supported
508 missing = set(pullop.remote.requirements) - pullop.repo.supported
507 if missing:
509 if missing:
508 msg = _("required features are not"
510 msg = _("required features are not"
509 " supported in the destination:"
511 " supported in the destination:"
510 " %s") % (', '.join(sorted(missing)))
512 " %s") % (', '.join(sorted(missing)))
511 raise util.Abort(msg)
513 raise util.Abort(msg)
512
514
513 lock = pullop.repo.lock()
515 lock = pullop.repo.lock()
514 try:
516 try:
515 _pulldiscovery(pullop)
517 _pulldiscovery(pullop)
516 if pullop.remote.capable('bundle2'):
518 if pullop.remote.capable('bundle2'):
517 _pullbundle2(pullop)
519 _pullbundle2(pullop)
518 if 'changegroup' in pullop.todosteps:
520 if 'changegroup' in pullop.todosteps:
519 _pullchangeset(pullop)
521 _pullchangeset(pullop)
520 if 'phases' in pullop.todosteps:
522 if 'phases' in pullop.todosteps:
521 _pullphase(pullop)
523 _pullphase(pullop)
522 if 'obsmarkers' in pullop.todosteps:
524 if 'obsmarkers' in pullop.todosteps:
523 _pullobsolete(pullop)
525 _pullobsolete(pullop)
524 pullop.closetransaction()
526 pullop.closetransaction()
525 finally:
527 finally:
526 pullop.releasetransaction()
528 pullop.releasetransaction()
527 lock.release()
529 lock.release()
528
530
529 return pullop.cgresult
531 return pullop.cgresult
530
532
531 def _pulldiscovery(pullop):
533 def _pulldiscovery(pullop):
532 """discovery phase for the pull
534 """discovery phase for the pull
533
535
534 Current handle changeset discovery only, will change handle all discovery
536 Current handle changeset discovery only, will change handle all discovery
535 at some point."""
537 at some point."""
536 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
538 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
537 pullop.remote,
539 pullop.remote,
538 heads=pullop.heads,
540 heads=pullop.heads,
539 force=pullop.force)
541 force=pullop.force)
540 pullop.common, pullop.fetch, pullop.rheads = tmp
542 pullop.common, pullop.fetch, pullop.rheads = tmp
541
543
542 def _pullbundle2(pullop):
544 def _pullbundle2(pullop):
543 """pull data using bundle2
545 """pull data using bundle2
544
546
545 For now, the only supported data are changegroup."""
547 For now, the only supported data are changegroup."""
546 kwargs = {'bundlecaps': set(['HG20'])}
548 kwargs = {'bundlecaps': set(['HG20'])}
547 # pulling changegroup
549 # pulling changegroup
548 pullop.todosteps.remove('changegroup')
550 pullop.todosteps.remove('changegroup')
549 if not pullop.fetch:
551 if not pullop.fetch:
550 pullop.repo.ui.status(_("no changes found\n"))
552 pullop.repo.ui.status(_("no changes found\n"))
551 pullop.cgresult = 0
553 pullop.cgresult = 0
552 else:
554 else:
553 kwargs['common'] = pullop.common
555 kwargs['common'] = pullop.common
554 kwargs['heads'] = pullop.heads or pullop.rheads
556 kwargs['heads'] = pullop.heads or pullop.rheads
555 if pullop.heads is None and list(pullop.common) == [nullid]:
557 if pullop.heads is None and list(pullop.common) == [nullid]:
556 pullop.repo.ui.status(_("requesting all changes\n"))
558 pullop.repo.ui.status(_("requesting all changes\n"))
557 if kwargs.keys() == ['format']:
559 if kwargs.keys() == ['format']:
558 return # nothing to pull
560 return # nothing to pull
559 bundle = pullop.remote.getbundle('pull', **kwargs)
561 bundle = pullop.remote.getbundle('pull', **kwargs)
560 try:
562 try:
561 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
563 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
562 except KeyError, exc:
564 except KeyError, exc:
563 raise util.Abort('missing support for %s' % exc)
565 raise util.Abort('missing support for %s' % exc)
564 assert len(op.records['changegroup']) == 1
566 assert len(op.records['changegroup']) == 1
565 pullop.cgresult = op.records['changegroup'][0]['return']
567 pullop.cgresult = op.records['changegroup'][0]['return']
566
568
567 def _pullchangeset(pullop):
569 def _pullchangeset(pullop):
568 """pull changeset from unbundle into the local repo"""
570 """pull changeset from unbundle into the local repo"""
569 # We delay the open of the transaction as late as possible so we
571 # We delay the open of the transaction as late as possible so we
570 # don't open transaction for nothing or you break future useful
572 # don't open transaction for nothing or you break future useful
571 # rollback call
573 # rollback call
572 pullop.todosteps.remove('changegroup')
574 pullop.todosteps.remove('changegroup')
573 if not pullop.fetch:
575 if not pullop.fetch:
574 pullop.repo.ui.status(_("no changes found\n"))
576 pullop.repo.ui.status(_("no changes found\n"))
575 pullop.cgresult = 0
577 pullop.cgresult = 0
576 return
578 return
577 pullop.gettransaction()
579 pullop.gettransaction()
578 if pullop.heads is None and list(pullop.common) == [nullid]:
580 if pullop.heads is None and list(pullop.common) == [nullid]:
579 pullop.repo.ui.status(_("requesting all changes\n"))
581 pullop.repo.ui.status(_("requesting all changes\n"))
580 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
582 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
581 # issue1320, avoid a race if remote changed after discovery
583 # issue1320, avoid a race if remote changed after discovery
582 pullop.heads = pullop.rheads
584 pullop.heads = pullop.rheads
583
585
584 if pullop.remote.capable('getbundle'):
586 if pullop.remote.capable('getbundle'):
585 # TODO: get bundlecaps from remote
587 # TODO: get bundlecaps from remote
586 cg = pullop.remote.getbundle('pull', common=pullop.common,
588 cg = pullop.remote.getbundle('pull', common=pullop.common,
587 heads=pullop.heads or pullop.rheads)
589 heads=pullop.heads or pullop.rheads)
588 elif pullop.heads is None:
590 elif pullop.heads is None:
589 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
591 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
590 elif not pullop.remote.capable('changegroupsubset'):
592 elif not pullop.remote.capable('changegroupsubset'):
591 raise util.Abort(_("partial pull cannot be done because "
593 raise util.Abort(_("partial pull cannot be done because "
592 "other repository doesn't support "
594 "other repository doesn't support "
593 "changegroupsubset."))
595 "changegroupsubset."))
594 else:
596 else:
595 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
597 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
596 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
598 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
597 pullop.remote.url())
599 pullop.remote.url())
598
600
599 def _pullphase(pullop):
601 def _pullphase(pullop):
600 # Get remote phases data from remote
602 # Get remote phases data from remote
601 pullop.todosteps.remove('phases')
603 pullop.todosteps.remove('phases')
602 remotephases = pullop.remote.listkeys('phases')
604 remotephases = pullop.remote.listkeys('phases')
603 publishing = bool(remotephases.get('publishing', False))
605 publishing = bool(remotephases.get('publishing', False))
604 if remotephases and not publishing:
606 if remotephases and not publishing:
605 # remote is new and unpublishing
607 # remote is new and unpublishing
606 pheads, _dr = phases.analyzeremotephases(pullop.repo,
608 pheads, _dr = phases.analyzeremotephases(pullop.repo,
607 pullop.pulledsubset,
609 pullop.pulledsubset,
608 remotephases)
610 remotephases)
609 phases.advanceboundary(pullop.repo, phases.public, pheads)
611 phases.advanceboundary(pullop.repo, phases.public, pheads)
610 phases.advanceboundary(pullop.repo, phases.draft,
612 phases.advanceboundary(pullop.repo, phases.draft,
611 pullop.pulledsubset)
613 pullop.pulledsubset)
612 else:
614 else:
613 # Remote is old or publishing all common changesets
615 # Remote is old or publishing all common changesets
614 # should be seen as public
616 # should be seen as public
615 phases.advanceboundary(pullop.repo, phases.public,
617 phases.advanceboundary(pullop.repo, phases.public,
616 pullop.pulledsubset)
618 pullop.pulledsubset)
617
619
618 def _pullobsolete(pullop):
620 def _pullobsolete(pullop):
619 """utility function to pull obsolete markers from a remote
621 """utility function to pull obsolete markers from a remote
620
622
621 The `gettransaction` is function that return the pull transaction, creating
623 The `gettransaction` is function that return the pull transaction, creating
622 one if necessary. We return the transaction to inform the calling code that
624 one if necessary. We return the transaction to inform the calling code that
623 a new transaction have been created (when applicable).
625 a new transaction have been created (when applicable).
624
626
625 Exists mostly to allow overriding for experimentation purpose"""
627 Exists mostly to allow overriding for experimentation purpose"""
626 pullop.todosteps.remove('obsmarkers')
628 pullop.todosteps.remove('obsmarkers')
627 tr = None
629 tr = None
628 if obsolete._enabled:
630 if obsolete._enabled:
629 pullop.repo.ui.debug('fetching remote obsolete markers\n')
631 pullop.repo.ui.debug('fetching remote obsolete markers\n')
630 remoteobs = pullop.remote.listkeys('obsolete')
632 remoteobs = pullop.remote.listkeys('obsolete')
631 if 'dump0' in remoteobs:
633 if 'dump0' in remoteobs:
632 tr = pullop.gettransaction()
634 tr = pullop.gettransaction()
633 for key in sorted(remoteobs, reverse=True):
635 for key in sorted(remoteobs, reverse=True):
634 if key.startswith('dump'):
636 if key.startswith('dump'):
635 data = base85.b85decode(remoteobs[key])
637 data = base85.b85decode(remoteobs[key])
636 pullop.repo.obsstore.mergemarkers(tr, data)
638 pullop.repo.obsstore.mergemarkers(tr, data)
637 pullop.repo.invalidatevolatilesets()
639 pullop.repo.invalidatevolatilesets()
638 return tr
640 return tr
639
641
640 def getbundle(repo, source, heads=None, common=None, bundlecaps=None):
642 def getbundle(repo, source, heads=None, common=None, bundlecaps=None):
641 """return a full bundle (with potentially multiple kind of parts)
643 """return a full bundle (with potentially multiple kind of parts)
642
644
643 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
645 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
644 passed. For now, the bundle can contain only changegroup, but this will
646 passed. For now, the bundle can contain only changegroup, but this will
645 changes when more part type will be available for bundle2.
647 changes when more part type will be available for bundle2.
646
648
647 This is different from changegroup.getbundle that only returns an HG10
649 This is different from changegroup.getbundle that only returns an HG10
648 changegroup bundle. They may eventually get reunited in the future when we
650 changegroup bundle. They may eventually get reunited in the future when we
649 have a clearer idea of the API we what to query different data.
651 have a clearer idea of the API we what to query different data.
650
652
651 The implementation is at a very early stage and will get massive rework
653 The implementation is at a very early stage and will get massive rework
652 when the API of bundle is refined.
654 when the API of bundle is refined.
653 """
655 """
654 # build bundle here.
656 # build bundle here.
655 cg = changegroup.getbundle(repo, source, heads=heads,
657 cg = changegroup.getbundle(repo, source, heads=heads,
656 common=common, bundlecaps=bundlecaps)
658 common=common, bundlecaps=bundlecaps)
657 if bundlecaps is None or 'HG20' not in bundlecaps:
659 if bundlecaps is None or 'HG20' not in bundlecaps:
658 return cg
660 return cg
659 # very crude first implementation,
661 # very crude first implementation,
660 # the bundle API will change and the generation will be done lazily.
662 # the bundle API will change and the generation will be done lazily.
661 bundler = bundle2.bundle20(repo.ui)
663 bundler = bundle2.bundle20(repo.ui)
662 part = bundle2.bundlepart('changegroup', data=cg.getchunks())
664 part = bundle2.bundlepart('changegroup', data=cg.getchunks())
663 bundler.addpart(part)
665 bundler.addpart(part)
664 return util.chunkbuffer(bundler.getchunks())
666 return util.chunkbuffer(bundler.getchunks())
665
667
666 class PushRaced(RuntimeError):
668 class PushRaced(RuntimeError):
667 """An exception raised during unbundling that indicate a push race"""
669 """An exception raised during unbundling that indicate a push race"""
668
670
669 def check_heads(repo, their_heads, context):
671 def check_heads(repo, their_heads, context):
670 """check if the heads of a repo have been modified
672 """check if the heads of a repo have been modified
671
673
672 Used by peer for unbundling.
674 Used by peer for unbundling.
673 """
675 """
674 heads = repo.heads()
676 heads = repo.heads()
675 heads_hash = util.sha1(''.join(sorted(heads))).digest()
677 heads_hash = util.sha1(''.join(sorted(heads))).digest()
676 if not (their_heads == ['force'] or their_heads == heads or
678 if not (their_heads == ['force'] or their_heads == heads or
677 their_heads == ['hashed', heads_hash]):
679 their_heads == ['hashed', heads_hash]):
678 # someone else committed/pushed/unbundled while we
680 # someone else committed/pushed/unbundled while we
679 # were transferring data
681 # were transferring data
680 raise PushRaced('repository changed while %s - '
682 raise PushRaced('repository changed while %s - '
681 'please try again' % context)
683 'please try again' % context)
682
684
683 def unbundle(repo, cg, heads, source, url):
685 def unbundle(repo, cg, heads, source, url):
684 """Apply a bundle to a repo.
686 """Apply a bundle to a repo.
685
687
686 this function makes sure the repo is locked during the application and have
688 this function makes sure the repo is locked during the application and have
687 mechanism to check that no push race occurred between the creation of the
689 mechanism to check that no push race occurred between the creation of the
688 bundle and its application.
690 bundle and its application.
689
691
690 If the push was raced as PushRaced exception is raised."""
692 If the push was raced as PushRaced exception is raised."""
691 r = 0
693 r = 0
692 # need a transaction when processing a bundle2 stream
694 # need a transaction when processing a bundle2 stream
693 tr = None
695 tr = None
694 lock = repo.lock()
696 lock = repo.lock()
695 try:
697 try:
696 check_heads(repo, heads, 'uploading changes')
698 check_heads(repo, heads, 'uploading changes')
697 # push can proceed
699 # push can proceed
698 if util.safehasattr(cg, 'params'):
700 if util.safehasattr(cg, 'params'):
699 tr = repo.transaction('unbundle')
701 tr = repo.transaction('unbundle')
700 r = bundle2.processbundle(repo, cg, lambda: tr).reply
702 r = bundle2.processbundle(repo, cg, lambda: tr).reply
701 tr.close()
703 tr.close()
702 else:
704 else:
703 r = changegroup.addchangegroup(repo, cg, source, url)
705 r = changegroup.addchangegroup(repo, cg, source, url)
704 finally:
706 finally:
705 if tr is not None:
707 if tr is not None:
706 tr.release()
708 tr.release()
707 lock.release()
709 lock.release()
708 return r
710 return r
General Comments 0
You need to be logged in to leave comments. Login now