##// END OF EJS Templates
pull: prevent duplicated entry in `op.pulledsubset`...
Pierre-Yves David -
r20878:09e71187 default
parent child Browse files
Show More
@@ -1,530 +1,535 b''
1 # exchange.py - utily to exchange data between repo.
1 # exchange.py - utily to exchange data between repo.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno
10 import errno
11 import util, scmutil, changegroup, base85
11 import util, scmutil, changegroup, base85
12 import discovery, phases, obsolete, bookmarks
12 import discovery, phases, obsolete, bookmarks
13
13
14
14
15 class pushoperation(object):
15 class pushoperation(object):
16 """A object that represent a single push operation
16 """A object that represent a single push operation
17
17
18 It purpose is to carry push related state and very common operation.
18 It purpose is to carry push related state and very common operation.
19
19
20 A new should be created at the begining of each push and discarded
20 A new should be created at the begining of each push and discarded
21 afterward.
21 afterward.
22 """
22 """
23
23
24 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
24 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
25 # repo we push from
25 # repo we push from
26 self.repo = repo
26 self.repo = repo
27 self.ui = repo.ui
27 self.ui = repo.ui
28 # repo we push to
28 # repo we push to
29 self.remote = remote
29 self.remote = remote
30 # force option provided
30 # force option provided
31 self.force = force
31 self.force = force
32 # revs to be pushed (None is "all")
32 # revs to be pushed (None is "all")
33 self.revs = revs
33 self.revs = revs
34 # allow push of new branch
34 # allow push of new branch
35 self.newbranch = newbranch
35 self.newbranch = newbranch
36 # did a local lock get acquired?
36 # did a local lock get acquired?
37 self.locallocked = None
37 self.locallocked = None
38 # Integer version of the push result
38 # Integer version of the push result
39 # - None means nothing to push
39 # - None means nothing to push
40 # - 0 means HTTP error
40 # - 0 means HTTP error
41 # - 1 means we pushed and remote head count is unchanged *or*
41 # - 1 means we pushed and remote head count is unchanged *or*
42 # we have outgoing changesets but refused to push
42 # we have outgoing changesets but refused to push
43 # - other values as described by addchangegroup()
43 # - other values as described by addchangegroup()
44 self.ret = None
44 self.ret = None
45 # discover.outgoing object (contains common and outgoin data)
45 # discover.outgoing object (contains common and outgoin data)
46 self.outgoing = None
46 self.outgoing = None
47 # all remote heads before the push
47 # all remote heads before the push
48 self.remoteheads = None
48 self.remoteheads = None
49 # testable as a boolean indicating if any nodes are missing locally.
49 # testable as a boolean indicating if any nodes are missing locally.
50 self.incoming = None
50 self.incoming = None
51 # set of all heads common after changeset bundle push
51 # set of all heads common after changeset bundle push
52 self.commonheads = None
52 self.commonheads = None
53
53
54 def push(repo, remote, force=False, revs=None, newbranch=False):
54 def push(repo, remote, force=False, revs=None, newbranch=False):
55 '''Push outgoing changesets (limited by revs) from a local
55 '''Push outgoing changesets (limited by revs) from a local
56 repository to remote. Return an integer:
56 repository to remote. Return an integer:
57 - None means nothing to push
57 - None means nothing to push
58 - 0 means HTTP error
58 - 0 means HTTP error
59 - 1 means we pushed and remote head count is unchanged *or*
59 - 1 means we pushed and remote head count is unchanged *or*
60 we have outgoing changesets but refused to push
60 we have outgoing changesets but refused to push
61 - other values as described by addchangegroup()
61 - other values as described by addchangegroup()
62 '''
62 '''
63 pushop = pushoperation(repo, remote, force, revs, newbranch)
63 pushop = pushoperation(repo, remote, force, revs, newbranch)
64 if pushop.remote.local():
64 if pushop.remote.local():
65 missing = (set(pushop.repo.requirements)
65 missing = (set(pushop.repo.requirements)
66 - pushop.remote.local().supported)
66 - pushop.remote.local().supported)
67 if missing:
67 if missing:
68 msg = _("required features are not"
68 msg = _("required features are not"
69 " supported in the destination:"
69 " supported in the destination:"
70 " %s") % (', '.join(sorted(missing)))
70 " %s") % (', '.join(sorted(missing)))
71 raise util.Abort(msg)
71 raise util.Abort(msg)
72
72
73 # there are two ways to push to remote repo:
73 # there are two ways to push to remote repo:
74 #
74 #
75 # addchangegroup assumes local user can lock remote
75 # addchangegroup assumes local user can lock remote
76 # repo (local filesystem, old ssh servers).
76 # repo (local filesystem, old ssh servers).
77 #
77 #
78 # unbundle assumes local user cannot lock remote repo (new ssh
78 # unbundle assumes local user cannot lock remote repo (new ssh
79 # servers, http servers).
79 # servers, http servers).
80
80
81 if not pushop.remote.canpush():
81 if not pushop.remote.canpush():
82 raise util.Abort(_("destination does not support push"))
82 raise util.Abort(_("destination does not support push"))
83 # get local lock as we might write phase data
83 # get local lock as we might write phase data
84 locallock = None
84 locallock = None
85 try:
85 try:
86 locallock = pushop.repo.lock()
86 locallock = pushop.repo.lock()
87 pushop.locallocked = True
87 pushop.locallocked = True
88 except IOError, err:
88 except IOError, err:
89 pushop.locallocked = False
89 pushop.locallocked = False
90 if err.errno != errno.EACCES:
90 if err.errno != errno.EACCES:
91 raise
91 raise
92 # source repo cannot be locked.
92 # source repo cannot be locked.
93 # We do not abort the push, but just disable the local phase
93 # We do not abort the push, but just disable the local phase
94 # synchronisation.
94 # synchronisation.
95 msg = 'cannot lock source repository: %s\n' % err
95 msg = 'cannot lock source repository: %s\n' % err
96 pushop.ui.debug(msg)
96 pushop.ui.debug(msg)
97 try:
97 try:
98 pushop.repo.checkpush(pushop.force, pushop.revs)
98 pushop.repo.checkpush(pushop.force, pushop.revs)
99 lock = None
99 lock = None
100 unbundle = pushop.remote.capable('unbundle')
100 unbundle = pushop.remote.capable('unbundle')
101 if not unbundle:
101 if not unbundle:
102 lock = pushop.remote.lock()
102 lock = pushop.remote.lock()
103 try:
103 try:
104 _pushdiscovery(pushop)
104 _pushdiscovery(pushop)
105 if _pushcheckoutgoing(pushop):
105 if _pushcheckoutgoing(pushop):
106 _pushchangeset(pushop)
106 _pushchangeset(pushop)
107 _pushcomputecommonheads(pushop)
107 _pushcomputecommonheads(pushop)
108 _pushsyncphase(pushop)
108 _pushsyncphase(pushop)
109 _pushobsolete(pushop)
109 _pushobsolete(pushop)
110 finally:
110 finally:
111 if lock is not None:
111 if lock is not None:
112 lock.release()
112 lock.release()
113 finally:
113 finally:
114 if locallock is not None:
114 if locallock is not None:
115 locallock.release()
115 locallock.release()
116
116
117 _pushbookmark(pushop)
117 _pushbookmark(pushop)
118 return pushop.ret
118 return pushop.ret
119
119
120 def _pushdiscovery(pushop):
120 def _pushdiscovery(pushop):
121 # discovery
121 # discovery
122 unfi = pushop.repo.unfiltered()
122 unfi = pushop.repo.unfiltered()
123 fci = discovery.findcommonincoming
123 fci = discovery.findcommonincoming
124 commoninc = fci(unfi, pushop.remote, force=pushop.force)
124 commoninc = fci(unfi, pushop.remote, force=pushop.force)
125 common, inc, remoteheads = commoninc
125 common, inc, remoteheads = commoninc
126 fco = discovery.findcommonoutgoing
126 fco = discovery.findcommonoutgoing
127 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
127 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
128 commoninc=commoninc, force=pushop.force)
128 commoninc=commoninc, force=pushop.force)
129 pushop.outgoing = outgoing
129 pushop.outgoing = outgoing
130 pushop.remoteheads = remoteheads
130 pushop.remoteheads = remoteheads
131 pushop.incoming = inc
131 pushop.incoming = inc
132
132
133 def _pushcheckoutgoing(pushop):
133 def _pushcheckoutgoing(pushop):
134 outgoing = pushop.outgoing
134 outgoing = pushop.outgoing
135 unfi = pushop.repo.unfiltered()
135 unfi = pushop.repo.unfiltered()
136 if not outgoing.missing:
136 if not outgoing.missing:
137 # nothing to push
137 # nothing to push
138 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
138 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
139 return False
139 return False
140 # something to push
140 # something to push
141 if not pushop.force:
141 if not pushop.force:
142 # if repo.obsstore == False --> no obsolete
142 # if repo.obsstore == False --> no obsolete
143 # then, save the iteration
143 # then, save the iteration
144 if unfi.obsstore:
144 if unfi.obsstore:
145 # this message are here for 80 char limit reason
145 # this message are here for 80 char limit reason
146 mso = _("push includes obsolete changeset: %s!")
146 mso = _("push includes obsolete changeset: %s!")
147 mst = "push includes %s changeset: %s!"
147 mst = "push includes %s changeset: %s!"
148 # plain versions for i18n tool to detect them
148 # plain versions for i18n tool to detect them
149 _("push includes unstable changeset: %s!")
149 _("push includes unstable changeset: %s!")
150 _("push includes bumped changeset: %s!")
150 _("push includes bumped changeset: %s!")
151 _("push includes divergent changeset: %s!")
151 _("push includes divergent changeset: %s!")
152 # If we are to push if there is at least one
152 # If we are to push if there is at least one
153 # obsolete or unstable changeset in missing, at
153 # obsolete or unstable changeset in missing, at
154 # least one of the missinghead will be obsolete or
154 # least one of the missinghead will be obsolete or
155 # unstable. So checking heads only is ok
155 # unstable. So checking heads only is ok
156 for node in outgoing.missingheads:
156 for node in outgoing.missingheads:
157 ctx = unfi[node]
157 ctx = unfi[node]
158 if ctx.obsolete():
158 if ctx.obsolete():
159 raise util.Abort(mso % ctx)
159 raise util.Abort(mso % ctx)
160 elif ctx.troubled():
160 elif ctx.troubled():
161 raise util.Abort(_(mst)
161 raise util.Abort(_(mst)
162 % (ctx.troubles()[0],
162 % (ctx.troubles()[0],
163 ctx))
163 ctx))
164 newbm = pushop.ui.configlist('bookmarks', 'pushing')
164 newbm = pushop.ui.configlist('bookmarks', 'pushing')
165 discovery.checkheads(unfi, pushop.remote, outgoing,
165 discovery.checkheads(unfi, pushop.remote, outgoing,
166 pushop.remoteheads,
166 pushop.remoteheads,
167 pushop.newbranch,
167 pushop.newbranch,
168 bool(pushop.incoming),
168 bool(pushop.incoming),
169 newbm)
169 newbm)
170 return True
170 return True
171
171
172 def _pushchangeset(pushop):
172 def _pushchangeset(pushop):
173 """Make the actual push of changeset bundle to remote repo"""
173 """Make the actual push of changeset bundle to remote repo"""
174 outgoing = pushop.outgoing
174 outgoing = pushop.outgoing
175 unbundle = pushop.remote.capable('unbundle')
175 unbundle = pushop.remote.capable('unbundle')
176 # TODO: get bundlecaps from remote
176 # TODO: get bundlecaps from remote
177 bundlecaps = None
177 bundlecaps = None
178 # create a changegroup from local
178 # create a changegroup from local
179 if pushop.revs is None and not (outgoing.excluded
179 if pushop.revs is None and not (outgoing.excluded
180 or pushop.repo.changelog.filteredrevs):
180 or pushop.repo.changelog.filteredrevs):
181 # push everything,
181 # push everything,
182 # use the fast path, no race possible on push
182 # use the fast path, no race possible on push
183 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
183 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
184 cg = pushop.repo._changegroupsubset(outgoing,
184 cg = pushop.repo._changegroupsubset(outgoing,
185 bundler,
185 bundler,
186 'push',
186 'push',
187 fastpath=True)
187 fastpath=True)
188 else:
188 else:
189 cg = pushop.repo.getlocalbundle('push', outgoing, bundlecaps)
189 cg = pushop.repo.getlocalbundle('push', outgoing, bundlecaps)
190
190
191 # apply changegroup to remote
191 # apply changegroup to remote
192 if unbundle:
192 if unbundle:
193 # local repo finds heads on server, finds out what
193 # local repo finds heads on server, finds out what
194 # revs it must push. once revs transferred, if server
194 # revs it must push. once revs transferred, if server
195 # finds it has different heads (someone else won
195 # finds it has different heads (someone else won
196 # commit/push race), server aborts.
196 # commit/push race), server aborts.
197 if pushop.force:
197 if pushop.force:
198 remoteheads = ['force']
198 remoteheads = ['force']
199 else:
199 else:
200 remoteheads = pushop.remoteheads
200 remoteheads = pushop.remoteheads
201 # ssh: return remote's addchangegroup()
201 # ssh: return remote's addchangegroup()
202 # http: return remote's addchangegroup() or 0 for error
202 # http: return remote's addchangegroup() or 0 for error
203 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
203 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
204 'push')
204 'push')
205 else:
205 else:
206 # we return an integer indicating remote head count
206 # we return an integer indicating remote head count
207 # change
207 # change
208 pushop.ret = pushop.remote.addchangegroup(cg, 'push',
208 pushop.ret = pushop.remote.addchangegroup(cg, 'push',
209 pushop.repo.url())
209 pushop.repo.url())
210
210
211 def _pushcomputecommonheads(pushop):
211 def _pushcomputecommonheads(pushop):
212 unfi = pushop.repo.unfiltered()
212 unfi = pushop.repo.unfiltered()
213 if pushop.ret:
213 if pushop.ret:
214 # push succeed, synchronize target of the push
214 # push succeed, synchronize target of the push
215 cheads = pushop.outgoing.missingheads
215 cheads = pushop.outgoing.missingheads
216 elif pushop.revs is None:
216 elif pushop.revs is None:
217 # All out push fails. synchronize all common
217 # All out push fails. synchronize all common
218 cheads = pushop.outgoing.commonheads
218 cheads = pushop.outgoing.commonheads
219 else:
219 else:
220 # I want cheads = heads(::missingheads and ::commonheads)
220 # I want cheads = heads(::missingheads and ::commonheads)
221 # (missingheads is revs with secret changeset filtered out)
221 # (missingheads is revs with secret changeset filtered out)
222 #
222 #
223 # This can be expressed as:
223 # This can be expressed as:
224 # cheads = ( (missingheads and ::commonheads)
224 # cheads = ( (missingheads and ::commonheads)
225 # + (commonheads and ::missingheads))"
225 # + (commonheads and ::missingheads))"
226 # )
226 # )
227 #
227 #
228 # while trying to push we already computed the following:
228 # while trying to push we already computed the following:
229 # common = (::commonheads)
229 # common = (::commonheads)
230 # missing = ((commonheads::missingheads) - commonheads)
230 # missing = ((commonheads::missingheads) - commonheads)
231 #
231 #
232 # We can pick:
232 # We can pick:
233 # * missingheads part of common (::commonheads)
233 # * missingheads part of common (::commonheads)
234 common = set(pushop.outgoing.common)
234 common = set(pushop.outgoing.common)
235 nm = pushop.repo.changelog.nodemap
235 nm = pushop.repo.changelog.nodemap
236 cheads = [node for node in pushop.revs if nm[node] in common]
236 cheads = [node for node in pushop.revs if nm[node] in common]
237 # and
237 # and
238 # * commonheads parents on missing
238 # * commonheads parents on missing
239 revset = unfi.set('%ln and parents(roots(%ln))',
239 revset = unfi.set('%ln and parents(roots(%ln))',
240 pushop.outgoing.commonheads,
240 pushop.outgoing.commonheads,
241 pushop.outgoing.missing)
241 pushop.outgoing.missing)
242 cheads.extend(c.node() for c in revset)
242 cheads.extend(c.node() for c in revset)
243 pushop.commonheads = cheads
243 pushop.commonheads = cheads
244
244
245 def _pushsyncphase(pushop):
245 def _pushsyncphase(pushop):
246 """synchronise phase information locally and remotly"""
246 """synchronise phase information locally and remotly"""
247 unfi = pushop.repo.unfiltered()
247 unfi = pushop.repo.unfiltered()
248 cheads = pushop.commonheads
248 cheads = pushop.commonheads
249 if pushop.ret:
249 if pushop.ret:
250 # push succeed, synchronize target of the push
250 # push succeed, synchronize target of the push
251 cheads = pushop.outgoing.missingheads
251 cheads = pushop.outgoing.missingheads
252 elif pushop.revs is None:
252 elif pushop.revs is None:
253 # All out push fails. synchronize all common
253 # All out push fails. synchronize all common
254 cheads = pushop.outgoing.commonheads
254 cheads = pushop.outgoing.commonheads
255 else:
255 else:
256 # I want cheads = heads(::missingheads and ::commonheads)
256 # I want cheads = heads(::missingheads and ::commonheads)
257 # (missingheads is revs with secret changeset filtered out)
257 # (missingheads is revs with secret changeset filtered out)
258 #
258 #
259 # This can be expressed as:
259 # This can be expressed as:
260 # cheads = ( (missingheads and ::commonheads)
260 # cheads = ( (missingheads and ::commonheads)
261 # + (commonheads and ::missingheads))"
261 # + (commonheads and ::missingheads))"
262 # )
262 # )
263 #
263 #
264 # while trying to push we already computed the following:
264 # while trying to push we already computed the following:
265 # common = (::commonheads)
265 # common = (::commonheads)
266 # missing = ((commonheads::missingheads) - commonheads)
266 # missing = ((commonheads::missingheads) - commonheads)
267 #
267 #
268 # We can pick:
268 # We can pick:
269 # * missingheads part of common (::commonheads)
269 # * missingheads part of common (::commonheads)
270 common = set(pushop.outgoing.common)
270 common = set(pushop.outgoing.common)
271 nm = pushop.repo.changelog.nodemap
271 nm = pushop.repo.changelog.nodemap
272 cheads = [node for node in pushop.revs if nm[node] in common]
272 cheads = [node for node in pushop.revs if nm[node] in common]
273 # and
273 # and
274 # * commonheads parents on missing
274 # * commonheads parents on missing
275 revset = unfi.set('%ln and parents(roots(%ln))',
275 revset = unfi.set('%ln and parents(roots(%ln))',
276 pushop.outgoing.commonheads,
276 pushop.outgoing.commonheads,
277 pushop.outgoing.missing)
277 pushop.outgoing.missing)
278 cheads.extend(c.node() for c in revset)
278 cheads.extend(c.node() for c in revset)
279 pushop.commonheads = cheads
279 pushop.commonheads = cheads
280 # even when we don't push, exchanging phase data is useful
280 # even when we don't push, exchanging phase data is useful
281 remotephases = pushop.remote.listkeys('phases')
281 remotephases = pushop.remote.listkeys('phases')
282 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
282 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
283 and remotephases # server supports phases
283 and remotephases # server supports phases
284 and pushop.ret is None # nothing was pushed
284 and pushop.ret is None # nothing was pushed
285 and remotephases.get('publishing', False)):
285 and remotephases.get('publishing', False)):
286 # When:
286 # When:
287 # - this is a subrepo push
287 # - this is a subrepo push
288 # - and remote support phase
288 # - and remote support phase
289 # - and no changeset was pushed
289 # - and no changeset was pushed
290 # - and remote is publishing
290 # - and remote is publishing
291 # We may be in issue 3871 case!
291 # We may be in issue 3871 case!
292 # We drop the possible phase synchronisation done by
292 # We drop the possible phase synchronisation done by
293 # courtesy to publish changesets possibly locally draft
293 # courtesy to publish changesets possibly locally draft
294 # on the remote.
294 # on the remote.
295 remotephases = {'publishing': 'True'}
295 remotephases = {'publishing': 'True'}
296 if not remotephases: # old server or public only rer
296 if not remotephases: # old server or public only rer
297 _localphasemove(pushop, cheads)
297 _localphasemove(pushop, cheads)
298 # don't push any phase data as there is nothing to push
298 # don't push any phase data as there is nothing to push
299 else:
299 else:
300 ana = phases.analyzeremotephases(pushop.repo, cheads,
300 ana = phases.analyzeremotephases(pushop.repo, cheads,
301 remotephases)
301 remotephases)
302 pheads, droots = ana
302 pheads, droots = ana
303 ### Apply remote phase on local
303 ### Apply remote phase on local
304 if remotephases.get('publishing', False):
304 if remotephases.get('publishing', False):
305 _localphasemove(pushop, cheads)
305 _localphasemove(pushop, cheads)
306 else: # publish = False
306 else: # publish = False
307 _localphasemove(pushop, pheads)
307 _localphasemove(pushop, pheads)
308 _localphasemove(pushop, cheads, phases.draft)
308 _localphasemove(pushop, cheads, phases.draft)
309 ### Apply local phase on remote
309 ### Apply local phase on remote
310
310
311 # Get the list of all revs draft on remote by public here.
311 # Get the list of all revs draft on remote by public here.
312 # XXX Beware that revset break if droots is not strictly
312 # XXX Beware that revset break if droots is not strictly
313 # XXX root we may want to ensure it is but it is costly
313 # XXX root we may want to ensure it is but it is costly
314 outdated = unfi.set('heads((%ln::%ln) and public())',
314 outdated = unfi.set('heads((%ln::%ln) and public())',
315 droots, cheads)
315 droots, cheads)
316 for newremotehead in outdated:
316 for newremotehead in outdated:
317 r = pushop.remote.pushkey('phases',
317 r = pushop.remote.pushkey('phases',
318 newremotehead.hex(),
318 newremotehead.hex(),
319 str(phases.draft),
319 str(phases.draft),
320 str(phases.public))
320 str(phases.public))
321 if not r:
321 if not r:
322 pushop.ui.warn(_('updating %s to public failed!\n')
322 pushop.ui.warn(_('updating %s to public failed!\n')
323 % newremotehead)
323 % newremotehead)
324
324
325 def _localphasemove(pushop, nodes, phase=phases.public):
325 def _localphasemove(pushop, nodes, phase=phases.public):
326 """move <nodes> to <phase> in the local source repo"""
326 """move <nodes> to <phase> in the local source repo"""
327 if pushop.locallocked:
327 if pushop.locallocked:
328 phases.advanceboundary(pushop.repo, phase, nodes)
328 phases.advanceboundary(pushop.repo, phase, nodes)
329 else:
329 else:
330 # repo is not locked, do not change any phases!
330 # repo is not locked, do not change any phases!
331 # Informs the user that phases should have been moved when
331 # Informs the user that phases should have been moved when
332 # applicable.
332 # applicable.
333 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
333 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
334 phasestr = phases.phasenames[phase]
334 phasestr = phases.phasenames[phase]
335 if actualmoves:
335 if actualmoves:
336 pushop.ui.status(_('cannot lock source repo, skipping '
336 pushop.ui.status(_('cannot lock source repo, skipping '
337 'local %s phase update\n') % phasestr)
337 'local %s phase update\n') % phasestr)
338
338
339 def _pushobsolete(pushop):
339 def _pushobsolete(pushop):
340 """utility function to push obsolete markers to a remote"""
340 """utility function to push obsolete markers to a remote"""
341 pushop.ui.debug('try to push obsolete markers to remote\n')
341 pushop.ui.debug('try to push obsolete markers to remote\n')
342 repo = pushop.repo
342 repo = pushop.repo
343 remote = pushop.remote
343 remote = pushop.remote
344 if (obsolete._enabled and repo.obsstore and
344 if (obsolete._enabled and repo.obsstore and
345 'obsolete' in remote.listkeys('namespaces')):
345 'obsolete' in remote.listkeys('namespaces')):
346 rslts = []
346 rslts = []
347 remotedata = repo.listkeys('obsolete')
347 remotedata = repo.listkeys('obsolete')
348 for key in sorted(remotedata, reverse=True):
348 for key in sorted(remotedata, reverse=True):
349 # reverse sort to ensure we end with dump0
349 # reverse sort to ensure we end with dump0
350 data = remotedata[key]
350 data = remotedata[key]
351 rslts.append(remote.pushkey('obsolete', key, '', data))
351 rslts.append(remote.pushkey('obsolete', key, '', data))
352 if [r for r in rslts if not r]:
352 if [r for r in rslts if not r]:
353 msg = _('failed to push some obsolete markers!\n')
353 msg = _('failed to push some obsolete markers!\n')
354 repo.ui.warn(msg)
354 repo.ui.warn(msg)
355
355
356 def _pushbookmark(pushop):
356 def _pushbookmark(pushop):
357 """Update bookmark position on remote"""
357 """Update bookmark position on remote"""
358 ui = pushop.ui
358 ui = pushop.ui
359 repo = pushop.repo.unfiltered()
359 repo = pushop.repo.unfiltered()
360 remote = pushop.remote
360 remote = pushop.remote
361 ui.debug("checking for updated bookmarks\n")
361 ui.debug("checking for updated bookmarks\n")
362 revnums = map(repo.changelog.rev, pushop.revs or [])
362 revnums = map(repo.changelog.rev, pushop.revs or [])
363 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
363 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
364 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
364 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
365 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
365 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
366 srchex=hex)
366 srchex=hex)
367
367
368 for b, scid, dcid in advsrc:
368 for b, scid, dcid in advsrc:
369 if ancestors and repo[scid].rev() not in ancestors:
369 if ancestors and repo[scid].rev() not in ancestors:
370 continue
370 continue
371 if remote.pushkey('bookmarks', b, dcid, scid):
371 if remote.pushkey('bookmarks', b, dcid, scid):
372 ui.status(_("updating bookmark %s\n") % b)
372 ui.status(_("updating bookmark %s\n") % b)
373 else:
373 else:
374 ui.warn(_('updating bookmark %s failed!\n') % b)
374 ui.warn(_('updating bookmark %s failed!\n') % b)
375
375
376 class pulloperation(object):
376 class pulloperation(object):
377 """A object that represent a single pull operation
377 """A object that represent a single pull operation
378
378
379 It purpose is to carry push related state and very common operation.
379 It purpose is to carry push related state and very common operation.
380
380
381 A new should be created at the begining of each pull and discarded
381 A new should be created at the begining of each pull and discarded
382 afterward.
382 afterward.
383 """
383 """
384
384
385 def __init__(self, repo, remote, heads=None, force=False):
385 def __init__(self, repo, remote, heads=None, force=False):
386 # repo we pull into
386 # repo we pull into
387 self.repo = repo
387 self.repo = repo
388 # repo we pull from
388 # repo we pull from
389 self.remote = remote
389 self.remote = remote
390 # revision we try to pull (None is "all")
390 # revision we try to pull (None is "all")
391 self.heads = heads
391 self.heads = heads
392 # do we force pull?
392 # do we force pull?
393 self.force = force
393 self.force = force
394 # the name the pull transaction
394 # the name the pull transaction
395 self._trname = 'pull\n' + util.hidepassword(remote.url())
395 self._trname = 'pull\n' + util.hidepassword(remote.url())
396 # hold the transaction once created
396 # hold the transaction once created
397 self._tr = None
397 self._tr = None
398 # set of common changeset between local and remote before pull
398 # set of common changeset between local and remote before pull
399 self.common = None
399 self.common = None
400 # set of pulled head
400 # set of pulled head
401 self.rheads = None
401 self.rheads = None
402 # list of missing changeset to fetch remotly
402 # list of missing changeset to fetch remotly
403 self.fetch = None
403 self.fetch = None
404
404
405 @util.propertycache
405 @util.propertycache
406 def pulledsubset(self):
406 def pulledsubset(self):
407 """heads of the set of changeset target by the pull"""
407 """heads of the set of changeset target by the pull"""
408 # compute target subset
408 # compute target subset
409 if self.heads is None:
409 if self.heads is None:
410 # We pulled every thing possible
410 # We pulled every thing possible
411 # sync on everything common
411 # sync on everything common
412 return self.common + self.rheads
412 c = set(self.common)
413 ret = list(self.common)
414 for n in self.rheads:
415 if n not in c:
416 ret.append(n)
417 return ret
413 else:
418 else:
414 # We pulled a specific subset
419 # We pulled a specific subset
415 # sync on this subset
420 # sync on this subset
416 return self.heads
421 return self.heads
417
422
418 def gettransaction(self):
423 def gettransaction(self):
419 """get appropriate pull transaction, creating it if needed"""
424 """get appropriate pull transaction, creating it if needed"""
420 if self._tr is None:
425 if self._tr is None:
421 self._tr = self.repo.transaction(self._trname)
426 self._tr = self.repo.transaction(self._trname)
422 return self._tr
427 return self._tr
423
428
424 def closetransaction(self):
429 def closetransaction(self):
425 """close transaction if created"""
430 """close transaction if created"""
426 if self._tr is not None:
431 if self._tr is not None:
427 self._tr.close()
432 self._tr.close()
428
433
429 def releasetransaction(self):
434 def releasetransaction(self):
430 """release transaction if created"""
435 """release transaction if created"""
431 if self._tr is not None:
436 if self._tr is not None:
432 self._tr.release()
437 self._tr.release()
433
438
434 def pull(repo, remote, heads=None, force=False):
439 def pull(repo, remote, heads=None, force=False):
435 pullop = pulloperation(repo, remote, heads, force)
440 pullop = pulloperation(repo, remote, heads, force)
436 if pullop.remote.local():
441 if pullop.remote.local():
437 missing = set(pullop.remote.requirements) - pullop.repo.supported
442 missing = set(pullop.remote.requirements) - pullop.repo.supported
438 if missing:
443 if missing:
439 msg = _("required features are not"
444 msg = _("required features are not"
440 " supported in the destination:"
445 " supported in the destination:"
441 " %s") % (', '.join(sorted(missing)))
446 " %s") % (', '.join(sorted(missing)))
442 raise util.Abort(msg)
447 raise util.Abort(msg)
443
448
444 lock = pullop.repo.lock()
449 lock = pullop.repo.lock()
445 try:
450 try:
446 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
451 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
447 pullop.remote,
452 pullop.remote,
448 heads=pullop.heads,
453 heads=pullop.heads,
449 force=force)
454 force=force)
450 pullop.common, pullop.fetch, pullop.rheads = tmp
455 pullop.common, pullop.fetch, pullop.rheads = tmp
451 if not pullop.fetch:
456 if not pullop.fetch:
452 pullop.repo.ui.status(_("no changes found\n"))
457 pullop.repo.ui.status(_("no changes found\n"))
453 result = 0
458 result = 0
454 else:
459 else:
455 result = _pullchangeset(pullop)
460 result = _pullchangeset(pullop)
456
461
457 _pullphase(pullop)
462 _pullphase(pullop)
458 _pullobsolete(pullop)
463 _pullobsolete(pullop)
459 pullop.closetransaction()
464 pullop.closetransaction()
460 finally:
465 finally:
461 pullop.releasetransaction()
466 pullop.releasetransaction()
462 lock.release()
467 lock.release()
463
468
464 return result
469 return result
465
470
466 def _pullchangeset(pullop):
471 def _pullchangeset(pullop):
467 """pull changeset from unbundle into the local repo"""
472 """pull changeset from unbundle into the local repo"""
468 # We delay the open of the transaction as late as possible so we
473 # We delay the open of the transaction as late as possible so we
469 # don't open transaction for nothing or you break future useful
474 # don't open transaction for nothing or you break future useful
470 # rollback call
475 # rollback call
471 pullop.gettransaction()
476 pullop.gettransaction()
472 if pullop.heads is None and list(pullop.common) == [nullid]:
477 if pullop.heads is None and list(pullop.common) == [nullid]:
473 pullop.repo.ui.status(_("requesting all changes\n"))
478 pullop.repo.ui.status(_("requesting all changes\n"))
474 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
479 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
475 # issue1320, avoid a race if remote changed after discovery
480 # issue1320, avoid a race if remote changed after discovery
476 pullop.heads = pullop.rheads
481 pullop.heads = pullop.rheads
477
482
478 if pullop.remote.capable('getbundle'):
483 if pullop.remote.capable('getbundle'):
479 # TODO: get bundlecaps from remote
484 # TODO: get bundlecaps from remote
480 cg = pullop.remote.getbundle('pull', common=pullop.common,
485 cg = pullop.remote.getbundle('pull', common=pullop.common,
481 heads=pullop.heads or pullop.rheads)
486 heads=pullop.heads or pullop.rheads)
482 elif pullop.heads is None:
487 elif pullop.heads is None:
483 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
488 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
484 elif not pullop.remote.capable('changegroupsubset'):
489 elif not pullop.remote.capable('changegroupsubset'):
485 raise util.Abort(_("partial pull cannot be done because "
490 raise util.Abort(_("partial pull cannot be done because "
486 "other repository doesn't support "
491 "other repository doesn't support "
487 "changegroupsubset."))
492 "changegroupsubset."))
488 else:
493 else:
489 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
494 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
490 return pullop.repo.addchangegroup(cg, 'pull', pullop.remote.url())
495 return pullop.repo.addchangegroup(cg, 'pull', pullop.remote.url())
491
496
492 def _pullphase(pullop):
497 def _pullphase(pullop):
493 # Get remote phases data from remote
498 # Get remote phases data from remote
494 remotephases = pullop.remote.listkeys('phases')
499 remotephases = pullop.remote.listkeys('phases')
495 publishing = bool(remotephases.get('publishing', False))
500 publishing = bool(remotephases.get('publishing', False))
496 if remotephases and not publishing:
501 if remotephases and not publishing:
497 # remote is new and unpublishing
502 # remote is new and unpublishing
498 pheads, _dr = phases.analyzeremotephases(pullop.repo,
503 pheads, _dr = phases.analyzeremotephases(pullop.repo,
499 pullop.pulledsubset,
504 pullop.pulledsubset,
500 remotephases)
505 remotephases)
501 phases.advanceboundary(pullop.repo, phases.public, pheads)
506 phases.advanceboundary(pullop.repo, phases.public, pheads)
502 phases.advanceboundary(pullop.repo, phases.draft,
507 phases.advanceboundary(pullop.repo, phases.draft,
503 pullop.pulledsubset)
508 pullop.pulledsubset)
504 else:
509 else:
505 # Remote is old or publishing all common changesets
510 # Remote is old or publishing all common changesets
506 # should be seen as public
511 # should be seen as public
507 phases.advanceboundary(pullop.repo, phases.public,
512 phases.advanceboundary(pullop.repo, phases.public,
508 pullop.pulledsubset)
513 pullop.pulledsubset)
509
514
510 def _pullobsolete(pullop):
515 def _pullobsolete(pullop):
511 """utility function to pull obsolete markers from a remote
516 """utility function to pull obsolete markers from a remote
512
517
513 The `gettransaction` is function that return the pull transaction, creating
518 The `gettransaction` is function that return the pull transaction, creating
514 one if necessary. We return the transaction to inform the calling code that
519 one if necessary. We return the transaction to inform the calling code that
515 a new transaction have been created (when applicable).
520 a new transaction have been created (when applicable).
516
521
517 Exists mostly to allow overriding for experimentation purpose"""
522 Exists mostly to allow overriding for experimentation purpose"""
518 tr = None
523 tr = None
519 if obsolete._enabled:
524 if obsolete._enabled:
520 pullop.repo.ui.debug('fetching remote obsolete markers\n')
525 pullop.repo.ui.debug('fetching remote obsolete markers\n')
521 remoteobs = pullop.remote.listkeys('obsolete')
526 remoteobs = pullop.remote.listkeys('obsolete')
522 if 'dump0' in remoteobs:
527 if 'dump0' in remoteobs:
523 tr = pullop.gettransaction()
528 tr = pullop.gettransaction()
524 for key in sorted(remoteobs, reverse=True):
529 for key in sorted(remoteobs, reverse=True):
525 if key.startswith('dump'):
530 if key.startswith('dump'):
526 data = base85.b85decode(remoteobs[key])
531 data = base85.b85decode(remoteobs[key])
527 pullop.repo.obsstore.mergemarkers(tr, data)
532 pullop.repo.obsstore.mergemarkers(tr, data)
528 pullop.repo.invalidatevolatilesets()
533 pullop.repo.invalidatevolatilesets()
529 return tr
534 return tr
530
535
General Comments 0
You need to be logged in to leave comments. Login now