##// END OF EJS Templates
exchange: fix bad indentation...
Pierre-Yves David -
r21258:71931b78 default
parent child Browse files
Show More
@@ -1,761 +1,761 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2
12 import discovery, phases, obsolete, bookmarks, bundle2
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # Integer version of the push result
64 # Integer version of the push result
65 # - None means nothing to push
65 # - None means nothing to push
66 # - 0 means HTTP error
66 # - 0 means HTTP error
67 # - 1 means we pushed and remote head count is unchanged *or*
67 # - 1 means we pushed and remote head count is unchanged *or*
68 # we have outgoing changesets but refused to push
68 # we have outgoing changesets but refused to push
69 # - other values as described by addchangegroup()
69 # - other values as described by addchangegroup()
70 self.ret = None
70 self.ret = None
71 # discover.outgoing object (contains common and outgoing data)
71 # discover.outgoing object (contains common and outgoing data)
72 self.outgoing = None
72 self.outgoing = None
73 # all remote heads before the push
73 # all remote heads before the push
74 self.remoteheads = None
74 self.remoteheads = None
75 # testable as a boolean indicating if any nodes are missing locally.
75 # testable as a boolean indicating if any nodes are missing locally.
76 self.incoming = None
76 self.incoming = None
77 # set of all heads common after changeset bundle push
77 # set of all heads common after changeset bundle push
78 self.commonheads = None
78 self.commonheads = None
79
79
80 def push(repo, remote, force=False, revs=None, newbranch=False):
80 def push(repo, remote, force=False, revs=None, newbranch=False):
81 '''Push outgoing changesets (limited by revs) from a local
81 '''Push outgoing changesets (limited by revs) from a local
82 repository to remote. Return an integer:
82 repository to remote. Return an integer:
83 - None means nothing to push
83 - None means nothing to push
84 - 0 means HTTP error
84 - 0 means HTTP error
85 - 1 means we pushed and remote head count is unchanged *or*
85 - 1 means we pushed and remote head count is unchanged *or*
86 we have outgoing changesets but refused to push
86 we have outgoing changesets but refused to push
87 - other values as described by addchangegroup()
87 - other values as described by addchangegroup()
88 '''
88 '''
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
90 if pushop.remote.local():
90 if pushop.remote.local():
91 missing = (set(pushop.repo.requirements)
91 missing = (set(pushop.repo.requirements)
92 - pushop.remote.local().supported)
92 - pushop.remote.local().supported)
93 if missing:
93 if missing:
94 msg = _("required features are not"
94 msg = _("required features are not"
95 " supported in the destination:"
95 " supported in the destination:"
96 " %s") % (', '.join(sorted(missing)))
96 " %s") % (', '.join(sorted(missing)))
97 raise util.Abort(msg)
97 raise util.Abort(msg)
98
98
99 # there are two ways to push to remote repo:
99 # there are two ways to push to remote repo:
100 #
100 #
101 # addchangegroup assumes local user can lock remote
101 # addchangegroup assumes local user can lock remote
102 # repo (local filesystem, old ssh servers).
102 # repo (local filesystem, old ssh servers).
103 #
103 #
104 # unbundle assumes local user cannot lock remote repo (new ssh
104 # unbundle assumes local user cannot lock remote repo (new ssh
105 # servers, http servers).
105 # servers, http servers).
106
106
107 if not pushop.remote.canpush():
107 if not pushop.remote.canpush():
108 raise util.Abort(_("destination does not support push"))
108 raise util.Abort(_("destination does not support push"))
109 # get local lock as we might write phase data
109 # get local lock as we might write phase data
110 locallock = None
110 locallock = None
111 try:
111 try:
112 locallock = pushop.repo.lock()
112 locallock = pushop.repo.lock()
113 pushop.locallocked = True
113 pushop.locallocked = True
114 except IOError, err:
114 except IOError, err:
115 pushop.locallocked = False
115 pushop.locallocked = False
116 if err.errno != errno.EACCES:
116 if err.errno != errno.EACCES:
117 raise
117 raise
118 # source repo cannot be locked.
118 # source repo cannot be locked.
119 # We do not abort the push, but just disable the local phase
119 # We do not abort the push, but just disable the local phase
120 # synchronisation.
120 # synchronisation.
121 msg = 'cannot lock source repository: %s\n' % err
121 msg = 'cannot lock source repository: %s\n' % err
122 pushop.ui.debug(msg)
122 pushop.ui.debug(msg)
123 try:
123 try:
124 pushop.repo.checkpush(pushop)
124 pushop.repo.checkpush(pushop)
125 lock = None
125 lock = None
126 unbundle = pushop.remote.capable('unbundle')
126 unbundle = pushop.remote.capable('unbundle')
127 if not unbundle:
127 if not unbundle:
128 lock = pushop.remote.lock()
128 lock = pushop.remote.lock()
129 try:
129 try:
130 _pushdiscovery(pushop)
130 _pushdiscovery(pushop)
131 if _pushcheckoutgoing(pushop):
131 if _pushcheckoutgoing(pushop):
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
133 pushop.remote,
133 pushop.remote,
134 pushop.outgoing)
134 pushop.outgoing)
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
136 False)
136 False)
137 and pushop.remote.capable('bundle2-exp')):
137 and pushop.remote.capable('bundle2-exp')):
138 _pushbundle2(pushop)
138 _pushbundle2(pushop)
139 else:
139 else:
140 _pushchangeset(pushop)
140 _pushchangeset(pushop)
141 _pushcomputecommonheads(pushop)
141 _pushcomputecommonheads(pushop)
142 _pushsyncphase(pushop)
142 _pushsyncphase(pushop)
143 _pushobsolete(pushop)
143 _pushobsolete(pushop)
144 finally:
144 finally:
145 if lock is not None:
145 if lock is not None:
146 lock.release()
146 lock.release()
147 finally:
147 finally:
148 if locallock is not None:
148 if locallock is not None:
149 locallock.release()
149 locallock.release()
150
150
151 _pushbookmark(pushop)
151 _pushbookmark(pushop)
152 return pushop.ret
152 return pushop.ret
153
153
154 def _pushdiscovery(pushop):
154 def _pushdiscovery(pushop):
155 # discovery
155 # discovery
156 unfi = pushop.repo.unfiltered()
156 unfi = pushop.repo.unfiltered()
157 fci = discovery.findcommonincoming
157 fci = discovery.findcommonincoming
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
159 common, inc, remoteheads = commoninc
159 common, inc, remoteheads = commoninc
160 fco = discovery.findcommonoutgoing
160 fco = discovery.findcommonoutgoing
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
162 commoninc=commoninc, force=pushop.force)
162 commoninc=commoninc, force=pushop.force)
163 pushop.outgoing = outgoing
163 pushop.outgoing = outgoing
164 pushop.remoteheads = remoteheads
164 pushop.remoteheads = remoteheads
165 pushop.incoming = inc
165 pushop.incoming = inc
166
166
167 def _pushcheckoutgoing(pushop):
167 def _pushcheckoutgoing(pushop):
168 outgoing = pushop.outgoing
168 outgoing = pushop.outgoing
169 unfi = pushop.repo.unfiltered()
169 unfi = pushop.repo.unfiltered()
170 if not outgoing.missing:
170 if not outgoing.missing:
171 # nothing to push
171 # nothing to push
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
173 return False
173 return False
174 # something to push
174 # something to push
175 if not pushop.force:
175 if not pushop.force:
176 # if repo.obsstore == False --> no obsolete
176 # if repo.obsstore == False --> no obsolete
177 # then, save the iteration
177 # then, save the iteration
178 if unfi.obsstore:
178 if unfi.obsstore:
179 # this message are here for 80 char limit reason
179 # this message are here for 80 char limit reason
180 mso = _("push includes obsolete changeset: %s!")
180 mso = _("push includes obsolete changeset: %s!")
181 mst = "push includes %s changeset: %s!"
181 mst = "push includes %s changeset: %s!"
182 # plain versions for i18n tool to detect them
182 # plain versions for i18n tool to detect them
183 _("push includes unstable changeset: %s!")
183 _("push includes unstable changeset: %s!")
184 _("push includes bumped changeset: %s!")
184 _("push includes bumped changeset: %s!")
185 _("push includes divergent changeset: %s!")
185 _("push includes divergent changeset: %s!")
186 # If we are to push if there is at least one
186 # If we are to push if there is at least one
187 # obsolete or unstable changeset in missing, at
187 # obsolete or unstable changeset in missing, at
188 # least one of the missinghead will be obsolete or
188 # least one of the missinghead will be obsolete or
189 # unstable. So checking heads only is ok
189 # unstable. So checking heads only is ok
190 for node in outgoing.missingheads:
190 for node in outgoing.missingheads:
191 ctx = unfi[node]
191 ctx = unfi[node]
192 if ctx.obsolete():
192 if ctx.obsolete():
193 raise util.Abort(mso % ctx)
193 raise util.Abort(mso % ctx)
194 elif ctx.troubled():
194 elif ctx.troubled():
195 raise util.Abort(_(mst)
195 raise util.Abort(_(mst)
196 % (ctx.troubles()[0],
196 % (ctx.troubles()[0],
197 ctx))
197 ctx))
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
199 discovery.checkheads(unfi, pushop.remote, outgoing,
199 discovery.checkheads(unfi, pushop.remote, outgoing,
200 pushop.remoteheads,
200 pushop.remoteheads,
201 pushop.newbranch,
201 pushop.newbranch,
202 bool(pushop.incoming),
202 bool(pushop.incoming),
203 newbm)
203 newbm)
204 return True
204 return True
205
205
206 def _pushbundle2(pushop):
206 def _pushbundle2(pushop):
207 """push data to the remote using bundle2
207 """push data to the remote using bundle2
208
208
209 The only currently supported type of data is changegroup but this will
209 The only currently supported type of data is changegroup but this will
210 evolve in the future."""
210 evolve in the future."""
211 # Send known head to the server for race detection.
211 # Send known head to the server for race detection.
212 capsblob = urllib.unquote(pushop.remote.capable('bundle2-exp'))
212 capsblob = urllib.unquote(pushop.remote.capable('bundle2-exp'))
213 caps = bundle2.decodecaps(capsblob)
213 caps = bundle2.decodecaps(capsblob)
214 bundler = bundle2.bundle20(pushop.ui, caps)
214 bundler = bundle2.bundle20(pushop.ui, caps)
215 # create reply capability
215 # create reply capability
216 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
216 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
217 bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsblob))
217 bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsblob))
218 if not pushop.force:
218 if not pushop.force:
219 part = bundle2.bundlepart('B2X:CHECK:HEADS',
219 part = bundle2.bundlepart('B2X:CHECK:HEADS',
220 data=iter(pushop.remoteheads))
220 data=iter(pushop.remoteheads))
221 bundler.addpart(part)
221 bundler.addpart(part)
222 extrainfo = _pushbundle2extraparts(pushop, bundler)
222 extrainfo = _pushbundle2extraparts(pushop, bundler)
223 # add the changegroup bundle
223 # add the changegroup bundle
224 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
224 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
225 cgpart = bundle2.bundlepart('B2X:CHANGEGROUP', data=cg.getchunks())
225 cgpart = bundle2.bundlepart('B2X:CHANGEGROUP', data=cg.getchunks())
226 bundler.addpart(cgpart)
226 bundler.addpart(cgpart)
227 stream = util.chunkbuffer(bundler.getchunks())
227 stream = util.chunkbuffer(bundler.getchunks())
228 try:
228 try:
229 reply = pushop.remote.unbundle(stream, ['force'], 'push')
229 reply = pushop.remote.unbundle(stream, ['force'], 'push')
230 except bundle2.UnknownPartError, exc:
230 except bundle2.UnknownPartError, exc:
231 raise util.Abort('missing support for %s' % exc)
231 raise util.Abort('missing support for %s' % exc)
232 try:
232 try:
233 op = bundle2.processbundle(pushop.repo, reply)
233 op = bundle2.processbundle(pushop.repo, reply)
234 except bundle2.UnknownPartError, exc:
234 except bundle2.UnknownPartError, exc:
235 raise util.Abort('missing support for %s' % exc)
235 raise util.Abort('missing support for %s' % exc)
236 cgreplies = op.records.getreplies(cgpart.id)
236 cgreplies = op.records.getreplies(cgpart.id)
237 assert len(cgreplies['changegroup']) == 1
237 assert len(cgreplies['changegroup']) == 1
238 pushop.ret = cgreplies['changegroup'][0]['return']
238 pushop.ret = cgreplies['changegroup'][0]['return']
239 _pushbundle2extrareply(pushop, op, extrainfo)
239 _pushbundle2extrareply(pushop, op, extrainfo)
240
240
241 def _pushbundle2extraparts(pushop, bundler):
241 def _pushbundle2extraparts(pushop, bundler):
242 """hook function to let extensions add parts
242 """hook function to let extensions add parts
243
243
244 Return a dict to let extensions pass data to the reply processing.
244 Return a dict to let extensions pass data to the reply processing.
245 """
245 """
246 return {}
246 return {}
247
247
248 def _pushbundle2extrareply(pushop, op, extrainfo):
248 def _pushbundle2extrareply(pushop, op, extrainfo):
249 """hook function to let extensions react to part replies
249 """hook function to let extensions react to part replies
250
250
251 The dict from _pushbundle2extrareply is fed to this function.
251 The dict from _pushbundle2extrareply is fed to this function.
252 """
252 """
253 pass
253 pass
254
254
255 def _pushchangeset(pushop):
255 def _pushchangeset(pushop):
256 """Make the actual push of changeset bundle to remote repo"""
256 """Make the actual push of changeset bundle to remote repo"""
257 outgoing = pushop.outgoing
257 outgoing = pushop.outgoing
258 unbundle = pushop.remote.capable('unbundle')
258 unbundle = pushop.remote.capable('unbundle')
259 # TODO: get bundlecaps from remote
259 # TODO: get bundlecaps from remote
260 bundlecaps = None
260 bundlecaps = None
261 # create a changegroup from local
261 # create a changegroup from local
262 if pushop.revs is None and not (outgoing.excluded
262 if pushop.revs is None and not (outgoing.excluded
263 or pushop.repo.changelog.filteredrevs):
263 or pushop.repo.changelog.filteredrevs):
264 # push everything,
264 # push everything,
265 # use the fast path, no race possible on push
265 # use the fast path, no race possible on push
266 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
266 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
267 cg = changegroup.getsubset(pushop.repo,
267 cg = changegroup.getsubset(pushop.repo,
268 outgoing,
268 outgoing,
269 bundler,
269 bundler,
270 'push',
270 'push',
271 fastpath=True)
271 fastpath=True)
272 else:
272 else:
273 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
273 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
274 bundlecaps)
274 bundlecaps)
275
275
276 # apply changegroup to remote
276 # apply changegroup to remote
277 if unbundle:
277 if unbundle:
278 # local repo finds heads on server, finds out what
278 # local repo finds heads on server, finds out what
279 # revs it must push. once revs transferred, if server
279 # revs it must push. once revs transferred, if server
280 # finds it has different heads (someone else won
280 # finds it has different heads (someone else won
281 # commit/push race), server aborts.
281 # commit/push race), server aborts.
282 if pushop.force:
282 if pushop.force:
283 remoteheads = ['force']
283 remoteheads = ['force']
284 else:
284 else:
285 remoteheads = pushop.remoteheads
285 remoteheads = pushop.remoteheads
286 # ssh: return remote's addchangegroup()
286 # ssh: return remote's addchangegroup()
287 # http: return remote's addchangegroup() or 0 for error
287 # http: return remote's addchangegroup() or 0 for error
288 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
288 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
289 'push')
289 'push')
290 else:
290 else:
291 # we return an integer indicating remote head count
291 # we return an integer indicating remote head count
292 # change
292 # change
293 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
293 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
294
294
295 def _pushcomputecommonheads(pushop):
295 def _pushcomputecommonheads(pushop):
296 unfi = pushop.repo.unfiltered()
296 unfi = pushop.repo.unfiltered()
297 if pushop.ret:
297 if pushop.ret:
298 # push succeed, synchronize target of the push
298 # push succeed, synchronize target of the push
299 cheads = pushop.outgoing.missingheads
299 cheads = pushop.outgoing.missingheads
300 elif pushop.revs is None:
300 elif pushop.revs is None:
301 # All out push fails. synchronize all common
301 # All out push fails. synchronize all common
302 cheads = pushop.outgoing.commonheads
302 cheads = pushop.outgoing.commonheads
303 else:
303 else:
304 # I want cheads = heads(::missingheads and ::commonheads)
304 # I want cheads = heads(::missingheads and ::commonheads)
305 # (missingheads is revs with secret changeset filtered out)
305 # (missingheads is revs with secret changeset filtered out)
306 #
306 #
307 # This can be expressed as:
307 # This can be expressed as:
308 # cheads = ( (missingheads and ::commonheads)
308 # cheads = ( (missingheads and ::commonheads)
309 # + (commonheads and ::missingheads))"
309 # + (commonheads and ::missingheads))"
310 # )
310 # )
311 #
311 #
312 # while trying to push we already computed the following:
312 # while trying to push we already computed the following:
313 # common = (::commonheads)
313 # common = (::commonheads)
314 # missing = ((commonheads::missingheads) - commonheads)
314 # missing = ((commonheads::missingheads) - commonheads)
315 #
315 #
316 # We can pick:
316 # We can pick:
317 # * missingheads part of common (::commonheads)
317 # * missingheads part of common (::commonheads)
318 common = set(pushop.outgoing.common)
318 common = set(pushop.outgoing.common)
319 nm = pushop.repo.changelog.nodemap
319 nm = pushop.repo.changelog.nodemap
320 cheads = [node for node in pushop.revs if nm[node] in common]
320 cheads = [node for node in pushop.revs if nm[node] in common]
321 # and
321 # and
322 # * commonheads parents on missing
322 # * commonheads parents on missing
323 revset = unfi.set('%ln and parents(roots(%ln))',
323 revset = unfi.set('%ln and parents(roots(%ln))',
324 pushop.outgoing.commonheads,
324 pushop.outgoing.commonheads,
325 pushop.outgoing.missing)
325 pushop.outgoing.missing)
326 cheads.extend(c.node() for c in revset)
326 cheads.extend(c.node() for c in revset)
327 pushop.commonheads = cheads
327 pushop.commonheads = cheads
328
328
329 def _pushsyncphase(pushop):
329 def _pushsyncphase(pushop):
330 """synchronise phase information locally and remotely"""
330 """synchronise phase information locally and remotely"""
331 unfi = pushop.repo.unfiltered()
331 unfi = pushop.repo.unfiltered()
332 cheads = pushop.commonheads
332 cheads = pushop.commonheads
333 if pushop.ret:
333 if pushop.ret:
334 # push succeed, synchronize target of the push
334 # push succeed, synchronize target of the push
335 cheads = pushop.outgoing.missingheads
335 cheads = pushop.outgoing.missingheads
336 elif pushop.revs is None:
336 elif pushop.revs is None:
337 # All out push fails. synchronize all common
337 # All out push fails. synchronize all common
338 cheads = pushop.outgoing.commonheads
338 cheads = pushop.outgoing.commonheads
339 else:
339 else:
340 # I want cheads = heads(::missingheads and ::commonheads)
340 # I want cheads = heads(::missingheads and ::commonheads)
341 # (missingheads is revs with secret changeset filtered out)
341 # (missingheads is revs with secret changeset filtered out)
342 #
342 #
343 # This can be expressed as:
343 # This can be expressed as:
344 # cheads = ( (missingheads and ::commonheads)
344 # cheads = ( (missingheads and ::commonheads)
345 # + (commonheads and ::missingheads))"
345 # + (commonheads and ::missingheads))"
346 # )
346 # )
347 #
347 #
348 # while trying to push we already computed the following:
348 # while trying to push we already computed the following:
349 # common = (::commonheads)
349 # common = (::commonheads)
350 # missing = ((commonheads::missingheads) - commonheads)
350 # missing = ((commonheads::missingheads) - commonheads)
351 #
351 #
352 # We can pick:
352 # We can pick:
353 # * missingheads part of common (::commonheads)
353 # * missingheads part of common (::commonheads)
354 common = set(pushop.outgoing.common)
354 common = set(pushop.outgoing.common)
355 nm = pushop.repo.changelog.nodemap
355 nm = pushop.repo.changelog.nodemap
356 cheads = [node for node in pushop.revs if nm[node] in common]
356 cheads = [node for node in pushop.revs if nm[node] in common]
357 # and
357 # and
358 # * commonheads parents on missing
358 # * commonheads parents on missing
359 revset = unfi.set('%ln and parents(roots(%ln))',
359 revset = unfi.set('%ln and parents(roots(%ln))',
360 pushop.outgoing.commonheads,
360 pushop.outgoing.commonheads,
361 pushop.outgoing.missing)
361 pushop.outgoing.missing)
362 cheads.extend(c.node() for c in revset)
362 cheads.extend(c.node() for c in revset)
363 pushop.commonheads = cheads
363 pushop.commonheads = cheads
364 # even when we don't push, exchanging phase data is useful
364 # even when we don't push, exchanging phase data is useful
365 remotephases = pushop.remote.listkeys('phases')
365 remotephases = pushop.remote.listkeys('phases')
366 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
366 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
367 and remotephases # server supports phases
367 and remotephases # server supports phases
368 and pushop.ret is None # nothing was pushed
368 and pushop.ret is None # nothing was pushed
369 and remotephases.get('publishing', False)):
369 and remotephases.get('publishing', False)):
370 # When:
370 # When:
371 # - this is a subrepo push
371 # - this is a subrepo push
372 # - and remote support phase
372 # - and remote support phase
373 # - and no changeset was pushed
373 # - and no changeset was pushed
374 # - and remote is publishing
374 # - and remote is publishing
375 # We may be in issue 3871 case!
375 # We may be in issue 3871 case!
376 # We drop the possible phase synchronisation done by
376 # We drop the possible phase synchronisation done by
377 # courtesy to publish changesets possibly locally draft
377 # courtesy to publish changesets possibly locally draft
378 # on the remote.
378 # on the remote.
379 remotephases = {'publishing': 'True'}
379 remotephases = {'publishing': 'True'}
380 if not remotephases: # old server or public only reply from non-publishing
380 if not remotephases: # old server or public only reply from non-publishing
381 _localphasemove(pushop, cheads)
381 _localphasemove(pushop, cheads)
382 # don't push any phase data as there is nothing to push
382 # don't push any phase data as there is nothing to push
383 else:
383 else:
384 ana = phases.analyzeremotephases(pushop.repo, cheads,
384 ana = phases.analyzeremotephases(pushop.repo, cheads,
385 remotephases)
385 remotephases)
386 pheads, droots = ana
386 pheads, droots = ana
387 ### Apply remote phase on local
387 ### Apply remote phase on local
388 if remotephases.get('publishing', False):
388 if remotephases.get('publishing', False):
389 _localphasemove(pushop, cheads)
389 _localphasemove(pushop, cheads)
390 else: # publish = False
390 else: # publish = False
391 _localphasemove(pushop, pheads)
391 _localphasemove(pushop, pheads)
392 _localphasemove(pushop, cheads, phases.draft)
392 _localphasemove(pushop, cheads, phases.draft)
393 ### Apply local phase on remote
393 ### Apply local phase on remote
394
394
395 # Get the list of all revs draft on remote by public here.
395 # Get the list of all revs draft on remote by public here.
396 # XXX Beware that revset break if droots is not strictly
396 # XXX Beware that revset break if droots is not strictly
397 # XXX root we may want to ensure it is but it is costly
397 # XXX root we may want to ensure it is but it is costly
398 outdated = unfi.set('heads((%ln::%ln) and public())',
398 outdated = unfi.set('heads((%ln::%ln) and public())',
399 droots, cheads)
399 droots, cheads)
400 for newremotehead in outdated:
400 for newremotehead in outdated:
401 r = pushop.remote.pushkey('phases',
401 r = pushop.remote.pushkey('phases',
402 newremotehead.hex(),
402 newremotehead.hex(),
403 str(phases.draft),
403 str(phases.draft),
404 str(phases.public))
404 str(phases.public))
405 if not r:
405 if not r:
406 pushop.ui.warn(_('updating %s to public failed!\n')
406 pushop.ui.warn(_('updating %s to public failed!\n')
407 % newremotehead)
407 % newremotehead)
408
408
409 def _localphasemove(pushop, nodes, phase=phases.public):
409 def _localphasemove(pushop, nodes, phase=phases.public):
410 """move <nodes> to <phase> in the local source repo"""
410 """move <nodes> to <phase> in the local source repo"""
411 if pushop.locallocked:
411 if pushop.locallocked:
412 phases.advanceboundary(pushop.repo, phase, nodes)
412 phases.advanceboundary(pushop.repo, phase, nodes)
413 else:
413 else:
414 # repo is not locked, do not change any phases!
414 # repo is not locked, do not change any phases!
415 # Informs the user that phases should have been moved when
415 # Informs the user that phases should have been moved when
416 # applicable.
416 # applicable.
417 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
417 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
418 phasestr = phases.phasenames[phase]
418 phasestr = phases.phasenames[phase]
419 if actualmoves:
419 if actualmoves:
420 pushop.ui.status(_('cannot lock source repo, skipping '
420 pushop.ui.status(_('cannot lock source repo, skipping '
421 'local %s phase update\n') % phasestr)
421 'local %s phase update\n') % phasestr)
422
422
423 def _pushobsolete(pushop):
423 def _pushobsolete(pushop):
424 """utility function to push obsolete markers to a remote"""
424 """utility function to push obsolete markers to a remote"""
425 pushop.ui.debug('try to push obsolete markers to remote\n')
425 pushop.ui.debug('try to push obsolete markers to remote\n')
426 repo = pushop.repo
426 repo = pushop.repo
427 remote = pushop.remote
427 remote = pushop.remote
428 if (obsolete._enabled and repo.obsstore and
428 if (obsolete._enabled and repo.obsstore and
429 'obsolete' in remote.listkeys('namespaces')):
429 'obsolete' in remote.listkeys('namespaces')):
430 rslts = []
430 rslts = []
431 remotedata = repo.listkeys('obsolete')
431 remotedata = repo.listkeys('obsolete')
432 for key in sorted(remotedata, reverse=True):
432 for key in sorted(remotedata, reverse=True):
433 # reverse sort to ensure we end with dump0
433 # reverse sort to ensure we end with dump0
434 data = remotedata[key]
434 data = remotedata[key]
435 rslts.append(remote.pushkey('obsolete', key, '', data))
435 rslts.append(remote.pushkey('obsolete', key, '', data))
436 if [r for r in rslts if not r]:
436 if [r for r in rslts if not r]:
437 msg = _('failed to push some obsolete markers!\n')
437 msg = _('failed to push some obsolete markers!\n')
438 repo.ui.warn(msg)
438 repo.ui.warn(msg)
439
439
440 def _pushbookmark(pushop):
440 def _pushbookmark(pushop):
441 """Update bookmark position on remote"""
441 """Update bookmark position on remote"""
442 ui = pushop.ui
442 ui = pushop.ui
443 repo = pushop.repo.unfiltered()
443 repo = pushop.repo.unfiltered()
444 remote = pushop.remote
444 remote = pushop.remote
445 ui.debug("checking for updated bookmarks\n")
445 ui.debug("checking for updated bookmarks\n")
446 revnums = map(repo.changelog.rev, pushop.revs or [])
446 revnums = map(repo.changelog.rev, pushop.revs or [])
447 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
447 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
448 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
448 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
449 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
449 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
450 srchex=hex)
450 srchex=hex)
451
451
452 for b, scid, dcid in advsrc:
452 for b, scid, dcid in advsrc:
453 if ancestors and repo[scid].rev() not in ancestors:
453 if ancestors and repo[scid].rev() not in ancestors:
454 continue
454 continue
455 if remote.pushkey('bookmarks', b, dcid, scid):
455 if remote.pushkey('bookmarks', b, dcid, scid):
456 ui.status(_("updating bookmark %s\n") % b)
456 ui.status(_("updating bookmark %s\n") % b)
457 else:
457 else:
458 ui.warn(_('updating bookmark %s failed!\n') % b)
458 ui.warn(_('updating bookmark %s failed!\n') % b)
459
459
460 class pulloperation(object):
460 class pulloperation(object):
461 """A object that represent a single pull operation
461 """A object that represent a single pull operation
462
462
463 It purpose is to carry push related state and very common operation.
463 It purpose is to carry push related state and very common operation.
464
464
465 A new should be created at the beginning of each pull and discarded
465 A new should be created at the beginning of each pull and discarded
466 afterward.
466 afterward.
467 """
467 """
468
468
469 def __init__(self, repo, remote, heads=None, force=False):
469 def __init__(self, repo, remote, heads=None, force=False):
470 # repo we pull into
470 # repo we pull into
471 self.repo = repo
471 self.repo = repo
472 # repo we pull from
472 # repo we pull from
473 self.remote = remote
473 self.remote = remote
474 # revision we try to pull (None is "all")
474 # revision we try to pull (None is "all")
475 self.heads = heads
475 self.heads = heads
476 # do we force pull?
476 # do we force pull?
477 self.force = force
477 self.force = force
478 # the name the pull transaction
478 # the name the pull transaction
479 self._trname = 'pull\n' + util.hidepassword(remote.url())
479 self._trname = 'pull\n' + util.hidepassword(remote.url())
480 # hold the transaction once created
480 # hold the transaction once created
481 self._tr = None
481 self._tr = None
482 # set of common changeset between local and remote before pull
482 # set of common changeset between local and remote before pull
483 self.common = None
483 self.common = None
484 # set of pulled head
484 # set of pulled head
485 self.rheads = None
485 self.rheads = None
486 # list of missing changeset to fetch remotely
486 # list of missing changeset to fetch remotely
487 self.fetch = None
487 self.fetch = None
488 # result of changegroup pulling (used as return code by pull)
488 # result of changegroup pulling (used as return code by pull)
489 self.cgresult = None
489 self.cgresult = None
490 # list of step remaining todo (related to future bundle2 usage)
490 # list of step remaining todo (related to future bundle2 usage)
491 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
491 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
492
492
493 @util.propertycache
493 @util.propertycache
494 def pulledsubset(self):
494 def pulledsubset(self):
495 """heads of the set of changeset target by the pull"""
495 """heads of the set of changeset target by the pull"""
496 # compute target subset
496 # compute target subset
497 if self.heads is None:
497 if self.heads is None:
498 # We pulled every thing possible
498 # We pulled every thing possible
499 # sync on everything common
499 # sync on everything common
500 c = set(self.common)
500 c = set(self.common)
501 ret = list(self.common)
501 ret = list(self.common)
502 for n in self.rheads:
502 for n in self.rheads:
503 if n not in c:
503 if n not in c:
504 ret.append(n)
504 ret.append(n)
505 return ret
505 return ret
506 else:
506 else:
507 # We pulled a specific subset
507 # We pulled a specific subset
508 # sync on this subset
508 # sync on this subset
509 return self.heads
509 return self.heads
510
510
511 def gettransaction(self):
511 def gettransaction(self):
512 """get appropriate pull transaction, creating it if needed"""
512 """get appropriate pull transaction, creating it if needed"""
513 if self._tr is None:
513 if self._tr is None:
514 self._tr = self.repo.transaction(self._trname)
514 self._tr = self.repo.transaction(self._trname)
515 return self._tr
515 return self._tr
516
516
517 def closetransaction(self):
517 def closetransaction(self):
518 """close transaction if created"""
518 """close transaction if created"""
519 if self._tr is not None:
519 if self._tr is not None:
520 self._tr.close()
520 self._tr.close()
521
521
522 def releasetransaction(self):
522 def releasetransaction(self):
523 """release transaction if created"""
523 """release transaction if created"""
524 if self._tr is not None:
524 if self._tr is not None:
525 self._tr.release()
525 self._tr.release()
526
526
527 def pull(repo, remote, heads=None, force=False):
527 def pull(repo, remote, heads=None, force=False):
528 pullop = pulloperation(repo, remote, heads, force)
528 pullop = pulloperation(repo, remote, heads, force)
529 if pullop.remote.local():
529 if pullop.remote.local():
530 missing = set(pullop.remote.requirements) - pullop.repo.supported
530 missing = set(pullop.remote.requirements) - pullop.repo.supported
531 if missing:
531 if missing:
532 msg = _("required features are not"
532 msg = _("required features are not"
533 " supported in the destination:"
533 " supported in the destination:"
534 " %s") % (', '.join(sorted(missing)))
534 " %s") % (', '.join(sorted(missing)))
535 raise util.Abort(msg)
535 raise util.Abort(msg)
536
536
537 lock = pullop.repo.lock()
537 lock = pullop.repo.lock()
538 try:
538 try:
539 _pulldiscovery(pullop)
539 _pulldiscovery(pullop)
540 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
540 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
541 and pullop.remote.capable('bundle2-exp')):
541 and pullop.remote.capable('bundle2-exp')):
542 _pullbundle2(pullop)
542 _pullbundle2(pullop)
543 if 'changegroup' in pullop.todosteps:
543 if 'changegroup' in pullop.todosteps:
544 _pullchangeset(pullop)
544 _pullchangeset(pullop)
545 if 'phases' in pullop.todosteps:
545 if 'phases' in pullop.todosteps:
546 _pullphase(pullop)
546 _pullphase(pullop)
547 if 'obsmarkers' in pullop.todosteps:
547 if 'obsmarkers' in pullop.todosteps:
548 _pullobsolete(pullop)
548 _pullobsolete(pullop)
549 pullop.closetransaction()
549 pullop.closetransaction()
550 finally:
550 finally:
551 pullop.releasetransaction()
551 pullop.releasetransaction()
552 lock.release()
552 lock.release()
553
553
554 return pullop.cgresult
554 return pullop.cgresult
555
555
556 def _pulldiscovery(pullop):
556 def _pulldiscovery(pullop):
557 """discovery phase for the pull
557 """discovery phase for the pull
558
558
559 Current handle changeset discovery only, will change handle all discovery
559 Current handle changeset discovery only, will change handle all discovery
560 at some point."""
560 at some point."""
561 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
561 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
562 pullop.remote,
562 pullop.remote,
563 heads=pullop.heads,
563 heads=pullop.heads,
564 force=pullop.force)
564 force=pullop.force)
565 pullop.common, pullop.fetch, pullop.rheads = tmp
565 pullop.common, pullop.fetch, pullop.rheads = tmp
566
566
567 def _pullbundle2(pullop):
567 def _pullbundle2(pullop):
568 """pull data using bundle2
568 """pull data using bundle2
569
569
570 For now, the only supported data are changegroup."""
570 For now, the only supported data are changegroup."""
571 kwargs = {'bundlecaps': set(['HG2X'])}
571 kwargs = {'bundlecaps': set(['HG2X'])}
572 capsblob = bundle2.encodecaps(pullop.repo.bundle2caps)
572 capsblob = bundle2.encodecaps(pullop.repo.bundle2caps)
573 kwargs['bundlecaps'].add('bundle2=' + urllib.quote(capsblob))
573 kwargs['bundlecaps'].add('bundle2=' + urllib.quote(capsblob))
574 # pulling changegroup
574 # pulling changegroup
575 pullop.todosteps.remove('changegroup')
575 pullop.todosteps.remove('changegroup')
576 if not pullop.fetch:
576 if not pullop.fetch:
577 pullop.repo.ui.status(_("no changes found\n"))
577 pullop.repo.ui.status(_("no changes found\n"))
578 pullop.cgresult = 0
578 pullop.cgresult = 0
579 else:
579 else:
580 kwargs['common'] = pullop.common
580 kwargs['common'] = pullop.common
581 kwargs['heads'] = pullop.heads or pullop.rheads
581 kwargs['heads'] = pullop.heads or pullop.rheads
582 if pullop.heads is None and list(pullop.common) == [nullid]:
582 if pullop.heads is None and list(pullop.common) == [nullid]:
583 pullop.repo.ui.status(_("requesting all changes\n"))
583 pullop.repo.ui.status(_("requesting all changes\n"))
584 _pullbundle2extraprepare(pullop, kwargs)
584 _pullbundle2extraprepare(pullop, kwargs)
585 if kwargs.keys() == ['format']:
585 if kwargs.keys() == ['format']:
586 return # nothing to pull
586 return # nothing to pull
587 bundle = pullop.remote.getbundle('pull', **kwargs)
587 bundle = pullop.remote.getbundle('pull', **kwargs)
588 try:
588 try:
589 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
589 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
590 except bundle2.UnknownPartError, exc:
590 except bundle2.UnknownPartError, exc:
591 raise util.Abort('missing support for %s' % exc)
591 raise util.Abort('missing support for %s' % exc)
592 assert len(op.records['changegroup']) == 1
592 assert len(op.records['changegroup']) == 1
593 pullop.cgresult = op.records['changegroup'][0]['return']
593 pullop.cgresult = op.records['changegroup'][0]['return']
594
594
595 def _pullbundle2extraprepare(pullop, kwargs):
595 def _pullbundle2extraprepare(pullop, kwargs):
596 """hook function so that extensions can extend the getbundle call"""
596 """hook function so that extensions can extend the getbundle call"""
597 pass
597 pass
598
598
599 def _pullchangeset(pullop):
599 def _pullchangeset(pullop):
600 """pull changeset from unbundle into the local repo"""
600 """pull changeset from unbundle into the local repo"""
601 # We delay the open of the transaction as late as possible so we
601 # We delay the open of the transaction as late as possible so we
602 # don't open transaction for nothing or you break future useful
602 # don't open transaction for nothing or you break future useful
603 # rollback call
603 # rollback call
604 pullop.todosteps.remove('changegroup')
604 pullop.todosteps.remove('changegroup')
605 if not pullop.fetch:
605 if not pullop.fetch:
606 pullop.repo.ui.status(_("no changes found\n"))
606 pullop.repo.ui.status(_("no changes found\n"))
607 pullop.cgresult = 0
607 pullop.cgresult = 0
608 return
608 return
609 pullop.gettransaction()
609 pullop.gettransaction()
610 if pullop.heads is None and list(pullop.common) == [nullid]:
610 if pullop.heads is None and list(pullop.common) == [nullid]:
611 pullop.repo.ui.status(_("requesting all changes\n"))
611 pullop.repo.ui.status(_("requesting all changes\n"))
612 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
612 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
613 # issue1320, avoid a race if remote changed after discovery
613 # issue1320, avoid a race if remote changed after discovery
614 pullop.heads = pullop.rheads
614 pullop.heads = pullop.rheads
615
615
616 if pullop.remote.capable('getbundle'):
616 if pullop.remote.capable('getbundle'):
617 # TODO: get bundlecaps from remote
617 # TODO: get bundlecaps from remote
618 cg = pullop.remote.getbundle('pull', common=pullop.common,
618 cg = pullop.remote.getbundle('pull', common=pullop.common,
619 heads=pullop.heads or pullop.rheads)
619 heads=pullop.heads or pullop.rheads)
620 elif pullop.heads is None:
620 elif pullop.heads is None:
621 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
621 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
622 elif not pullop.remote.capable('changegroupsubset'):
622 elif not pullop.remote.capable('changegroupsubset'):
623 raise util.Abort(_("partial pull cannot be done because "
623 raise util.Abort(_("partial pull cannot be done because "
624 "other repository doesn't support "
624 "other repository doesn't support "
625 "changegroupsubset."))
625 "changegroupsubset."))
626 else:
626 else:
627 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
627 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
628 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
628 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
629 pullop.remote.url())
629 pullop.remote.url())
630
630
631 def _pullphase(pullop):
631 def _pullphase(pullop):
632 # Get remote phases data from remote
632 # Get remote phases data from remote
633 pullop.todosteps.remove('phases')
633 pullop.todosteps.remove('phases')
634 remotephases = pullop.remote.listkeys('phases')
634 remotephases = pullop.remote.listkeys('phases')
635 publishing = bool(remotephases.get('publishing', False))
635 publishing = bool(remotephases.get('publishing', False))
636 if remotephases and not publishing:
636 if remotephases and not publishing:
637 # remote is new and unpublishing
637 # remote is new and unpublishing
638 pheads, _dr = phases.analyzeremotephases(pullop.repo,
638 pheads, _dr = phases.analyzeremotephases(pullop.repo,
639 pullop.pulledsubset,
639 pullop.pulledsubset,
640 remotephases)
640 remotephases)
641 phases.advanceboundary(pullop.repo, phases.public, pheads)
641 phases.advanceboundary(pullop.repo, phases.public, pheads)
642 phases.advanceboundary(pullop.repo, phases.draft,
642 phases.advanceboundary(pullop.repo, phases.draft,
643 pullop.pulledsubset)
643 pullop.pulledsubset)
644 else:
644 else:
645 # Remote is old or publishing all common changesets
645 # Remote is old or publishing all common changesets
646 # should be seen as public
646 # should be seen as public
647 phases.advanceboundary(pullop.repo, phases.public,
647 phases.advanceboundary(pullop.repo, phases.public,
648 pullop.pulledsubset)
648 pullop.pulledsubset)
649
649
650 def _pullobsolete(pullop):
650 def _pullobsolete(pullop):
651 """utility function to pull obsolete markers from a remote
651 """utility function to pull obsolete markers from a remote
652
652
653 The `gettransaction` is function that return the pull transaction, creating
653 The `gettransaction` is function that return the pull transaction, creating
654 one if necessary. We return the transaction to inform the calling code that
654 one if necessary. We return the transaction to inform the calling code that
655 a new transaction have been created (when applicable).
655 a new transaction have been created (when applicable).
656
656
657 Exists mostly to allow overriding for experimentation purpose"""
657 Exists mostly to allow overriding for experimentation purpose"""
658 pullop.todosteps.remove('obsmarkers')
658 pullop.todosteps.remove('obsmarkers')
659 tr = None
659 tr = None
660 if obsolete._enabled:
660 if obsolete._enabled:
661 pullop.repo.ui.debug('fetching remote obsolete markers\n')
661 pullop.repo.ui.debug('fetching remote obsolete markers\n')
662 remoteobs = pullop.remote.listkeys('obsolete')
662 remoteobs = pullop.remote.listkeys('obsolete')
663 if 'dump0' in remoteobs:
663 if 'dump0' in remoteobs:
664 tr = pullop.gettransaction()
664 tr = pullop.gettransaction()
665 for key in sorted(remoteobs, reverse=True):
665 for key in sorted(remoteobs, reverse=True):
666 if key.startswith('dump'):
666 if key.startswith('dump'):
667 data = base85.b85decode(remoteobs[key])
667 data = base85.b85decode(remoteobs[key])
668 pullop.repo.obsstore.mergemarkers(tr, data)
668 pullop.repo.obsstore.mergemarkers(tr, data)
669 pullop.repo.invalidatevolatilesets()
669 pullop.repo.invalidatevolatilesets()
670 return tr
670 return tr
671
671
672 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
672 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
673 **kwargs):
673 **kwargs):
674 """return a full bundle (with potentially multiple kind of parts)
674 """return a full bundle (with potentially multiple kind of parts)
675
675
676 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
676 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
677 passed. For now, the bundle can contain only changegroup, but this will
677 passed. For now, the bundle can contain only changegroup, but this will
678 changes when more part type will be available for bundle2.
678 changes when more part type will be available for bundle2.
679
679
680 This is different from changegroup.getbundle that only returns an HG10
680 This is different from changegroup.getbundle that only returns an HG10
681 changegroup bundle. They may eventually get reunited in the future when we
681 changegroup bundle. They may eventually get reunited in the future when we
682 have a clearer idea of the API we what to query different data.
682 have a clearer idea of the API we what to query different data.
683
683
684 The implementation is at a very early stage and will get massive rework
684 The implementation is at a very early stage and will get massive rework
685 when the API of bundle is refined.
685 when the API of bundle is refined.
686 """
686 """
687 # build bundle here.
687 # build bundle here.
688 cg = changegroup.getbundle(repo, source, heads=heads,
688 cg = changegroup.getbundle(repo, source, heads=heads,
689 common=common, bundlecaps=bundlecaps)
689 common=common, bundlecaps=bundlecaps)
690 if bundlecaps is None or 'HG2X' not in bundlecaps:
690 if bundlecaps is None or 'HG2X' not in bundlecaps:
691 return cg
691 return cg
692 # very crude first implementation,
692 # very crude first implementation,
693 # the bundle API will change and the generation will be done lazily.
693 # the bundle API will change and the generation will be done lazily.
694 b2caps = {}
694 b2caps = {}
695 for bcaps in bundlecaps:
695 for bcaps in bundlecaps:
696 if bcaps.startswith('bundle2='):
696 if bcaps.startswith('bundle2='):
697 blob = urllib.unquote(bcaps[len('bundle2='):])
697 blob = urllib.unquote(bcaps[len('bundle2='):])
698 b2caps.update(bundle2.decodecaps(blob))
698 b2caps.update(bundle2.decodecaps(blob))
699 bundler = bundle2.bundle20(repo.ui, b2caps)
699 bundler = bundle2.bundle20(repo.ui, b2caps)
700 part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
700 part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
701 bundler.addpart(part)
701 bundler.addpart(part)
702 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
702 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
703 bundlecaps=bundlecaps, **kwargs)
703 bundlecaps=bundlecaps, **kwargs)
704 return util.chunkbuffer(bundler.getchunks())
704 return util.chunkbuffer(bundler.getchunks())
705
705
706 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
706 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
707 bundlecaps=None, **kwargs):
707 bundlecaps=None, **kwargs):
708 """hook function to let extensions add parts to the requested bundle"""
708 """hook function to let extensions add parts to the requested bundle"""
709 pass
709 pass
710
710
711 def check_heads(repo, their_heads, context):
711 def check_heads(repo, their_heads, context):
712 """check if the heads of a repo have been modified
712 """check if the heads of a repo have been modified
713
713
714 Used by peer for unbundling.
714 Used by peer for unbundling.
715 """
715 """
716 heads = repo.heads()
716 heads = repo.heads()
717 heads_hash = util.sha1(''.join(sorted(heads))).digest()
717 heads_hash = util.sha1(''.join(sorted(heads))).digest()
718 if not (their_heads == ['force'] or their_heads == heads or
718 if not (their_heads == ['force'] or their_heads == heads or
719 their_heads == ['hashed', heads_hash]):
719 their_heads == ['hashed', heads_hash]):
720 # someone else committed/pushed/unbundled while we
720 # someone else committed/pushed/unbundled while we
721 # were transferring data
721 # were transferring data
722 raise error.PushRaced('repository changed while %s - '
722 raise error.PushRaced('repository changed while %s - '
723 'please try again' % context)
723 'please try again' % context)
724
724
725 def unbundle(repo, cg, heads, source, url):
725 def unbundle(repo, cg, heads, source, url):
726 """Apply a bundle to a repo.
726 """Apply a bundle to a repo.
727
727
728 this function makes sure the repo is locked during the application and have
728 this function makes sure the repo is locked during the application and have
729 mechanism to check that no push race occurred between the creation of the
729 mechanism to check that no push race occurred between the creation of the
730 bundle and its application.
730 bundle and its application.
731
731
732 If the push was raced as PushRaced exception is raised."""
732 If the push was raced as PushRaced exception is raised."""
733 r = 0
733 r = 0
734 # need a transaction when processing a bundle2 stream
734 # need a transaction when processing a bundle2 stream
735 tr = None
735 tr = None
736 lock = repo.lock()
736 lock = repo.lock()
737 try:
737 try:
738 check_heads(repo, heads, 'uploading changes')
738 check_heads(repo, heads, 'uploading changes')
739 # push can proceed
739 # push can proceed
740 if util.safehasattr(cg, 'params'):
740 if util.safehasattr(cg, 'params'):
741 try:
741 try:
742 tr = repo.transaction('unbundle')
742 tr = repo.transaction('unbundle')
743 tr.hookargs['bundle2-exp'] = '1'
743 tr.hookargs['bundle2-exp'] = '1'
744 r = bundle2.processbundle(repo, cg, lambda: tr).reply
744 r = bundle2.processbundle(repo, cg, lambda: tr).reply
745 cl = repo.unfiltered().changelog
745 cl = repo.unfiltered().changelog
746 p = cl.writepending() and repo.root or ""
746 p = cl.writepending() and repo.root or ""
747 repo.hook('b2x-pretransactionclose', throw=True, source=source,
747 repo.hook('b2x-pretransactionclose', throw=True, source=source,
748 url=url, pending=p, **tr.hookargs)
748 url=url, pending=p, **tr.hookargs)
749 tr.close()
749 tr.close()
750 repo.hook('b2x-transactionclose', source=source, url=url,
750 repo.hook('b2x-transactionclose', source=source, url=url,
751 **tr.hookargs)
751 **tr.hookargs)
752 except Exception, exc:
752 except Exception, exc:
753 exc.duringunbundle2 = True
753 exc.duringunbundle2 = True
754 raise
754 raise
755 else:
755 else:
756 r = changegroup.addchangegroup(repo, cg, source, url)
756 r = changegroup.addchangegroup(repo, cg, source, url)
757 finally:
757 finally:
758 if tr is not None:
758 if tr is not None:
759 tr.release()
759 tr.release()
760 lock.release()
760 lock.release()
761 return r
761 return r
General Comments 0
You need to be logged in to leave comments. Login now