##// END OF EJS Templates
pull: add source information to the transaction...
Pierre-Yves David -
r22972:44b16b59 default
parent child Browse files
Show More
@@ -1,1257 +1,1259 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.cg1unpacker(fh, alg)
34 return changegroup.cg1unpacker(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40 def buildobsmarkerspart(bundler, markers):
40 def buildobsmarkerspart(bundler, markers):
41 """add an obsmarker part to the bundler with <markers>
41 """add an obsmarker part to the bundler with <markers>
42
42
43 No part is created if markers is empty.
43 No part is created if markers is empty.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 """
45 """
46 if markers:
46 if markers:
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 version = obsolete.commonversion(remoteversions)
48 version = obsolete.commonversion(remoteversions)
49 if version is None:
49 if version is None:
50 raise ValueError('bundler do not support common obsmarker format')
50 raise ValueError('bundler do not support common obsmarker format')
51 stream = obsolete.encodemarkers(markers, True, version=version)
51 stream = obsolete.encodemarkers(markers, True, version=version)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 return None
53 return None
54
54
55 class pushoperation(object):
55 class pushoperation(object):
56 """A object that represent a single push operation
56 """A object that represent a single push operation
57
57
58 It purpose is to carry push related state and very common operation.
58 It purpose is to carry push related state and very common operation.
59
59
60 A new should be created at the beginning of each push and discarded
60 A new should be created at the beginning of each push and discarded
61 afterward.
61 afterward.
62 """
62 """
63
63
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
65 bookmarks=()):
65 bookmarks=()):
66 # repo we push from
66 # repo we push from
67 self.repo = repo
67 self.repo = repo
68 self.ui = repo.ui
68 self.ui = repo.ui
69 # repo we push to
69 # repo we push to
70 self.remote = remote
70 self.remote = remote
71 # force option provided
71 # force option provided
72 self.force = force
72 self.force = force
73 # revs to be pushed (None is "all")
73 # revs to be pushed (None is "all")
74 self.revs = revs
74 self.revs = revs
75 # bookmark explicitly pushed
75 # bookmark explicitly pushed
76 self.bookmarks = bookmarks
76 self.bookmarks = bookmarks
77 # allow push of new branch
77 # allow push of new branch
78 self.newbranch = newbranch
78 self.newbranch = newbranch
79 # did a local lock get acquired?
79 # did a local lock get acquired?
80 self.locallocked = None
80 self.locallocked = None
81 # step already performed
81 # step already performed
82 # (used to check what steps have been already performed through bundle2)
82 # (used to check what steps have been already performed through bundle2)
83 self.stepsdone = set()
83 self.stepsdone = set()
84 # Integer version of the changegroup push result
84 # Integer version of the changegroup push result
85 # - None means nothing to push
85 # - None means nothing to push
86 # - 0 means HTTP error
86 # - 0 means HTTP error
87 # - 1 means we pushed and remote head count is unchanged *or*
87 # - 1 means we pushed and remote head count is unchanged *or*
88 # we have outgoing changesets but refused to push
88 # we have outgoing changesets but refused to push
89 # - other values as described by addchangegroup()
89 # - other values as described by addchangegroup()
90 self.cgresult = None
90 self.cgresult = None
91 # Boolean value for the bookmark push
91 # Boolean value for the bookmark push
92 self.bkresult = None
92 self.bkresult = None
93 # discover.outgoing object (contains common and outgoing data)
93 # discover.outgoing object (contains common and outgoing data)
94 self.outgoing = None
94 self.outgoing = None
95 # all remote heads before the push
95 # all remote heads before the push
96 self.remoteheads = None
96 self.remoteheads = None
97 # testable as a boolean indicating if any nodes are missing locally.
97 # testable as a boolean indicating if any nodes are missing locally.
98 self.incoming = None
98 self.incoming = None
99 # phases changes that must be pushed along side the changesets
99 # phases changes that must be pushed along side the changesets
100 self.outdatedphases = None
100 self.outdatedphases = None
101 # phases changes that must be pushed if changeset push fails
101 # phases changes that must be pushed if changeset push fails
102 self.fallbackoutdatedphases = None
102 self.fallbackoutdatedphases = None
103 # outgoing obsmarkers
103 # outgoing obsmarkers
104 self.outobsmarkers = set()
104 self.outobsmarkers = set()
105 # outgoing bookmarks
105 # outgoing bookmarks
106 self.outbookmarks = []
106 self.outbookmarks = []
107
107
108 @util.propertycache
108 @util.propertycache
109 def futureheads(self):
109 def futureheads(self):
110 """future remote heads if the changeset push succeeds"""
110 """future remote heads if the changeset push succeeds"""
111 return self.outgoing.missingheads
111 return self.outgoing.missingheads
112
112
113 @util.propertycache
113 @util.propertycache
114 def fallbackheads(self):
114 def fallbackheads(self):
115 """future remote heads if the changeset push fails"""
115 """future remote heads if the changeset push fails"""
116 if self.revs is None:
116 if self.revs is None:
117 # not target to push, all common are relevant
117 # not target to push, all common are relevant
118 return self.outgoing.commonheads
118 return self.outgoing.commonheads
119 unfi = self.repo.unfiltered()
119 unfi = self.repo.unfiltered()
120 # I want cheads = heads(::missingheads and ::commonheads)
120 # I want cheads = heads(::missingheads and ::commonheads)
121 # (missingheads is revs with secret changeset filtered out)
121 # (missingheads is revs with secret changeset filtered out)
122 #
122 #
123 # This can be expressed as:
123 # This can be expressed as:
124 # cheads = ( (missingheads and ::commonheads)
124 # cheads = ( (missingheads and ::commonheads)
125 # + (commonheads and ::missingheads))"
125 # + (commonheads and ::missingheads))"
126 # )
126 # )
127 #
127 #
128 # while trying to push we already computed the following:
128 # while trying to push we already computed the following:
129 # common = (::commonheads)
129 # common = (::commonheads)
130 # missing = ((commonheads::missingheads) - commonheads)
130 # missing = ((commonheads::missingheads) - commonheads)
131 #
131 #
132 # We can pick:
132 # We can pick:
133 # * missingheads part of common (::commonheads)
133 # * missingheads part of common (::commonheads)
134 common = set(self.outgoing.common)
134 common = set(self.outgoing.common)
135 nm = self.repo.changelog.nodemap
135 nm = self.repo.changelog.nodemap
136 cheads = [node for node in self.revs if nm[node] in common]
136 cheads = [node for node in self.revs if nm[node] in common]
137 # and
137 # and
138 # * commonheads parents on missing
138 # * commonheads parents on missing
139 revset = unfi.set('%ln and parents(roots(%ln))',
139 revset = unfi.set('%ln and parents(roots(%ln))',
140 self.outgoing.commonheads,
140 self.outgoing.commonheads,
141 self.outgoing.missing)
141 self.outgoing.missing)
142 cheads.extend(c.node() for c in revset)
142 cheads.extend(c.node() for c in revset)
143 return cheads
143 return cheads
144
144
145 @property
145 @property
146 def commonheads(self):
146 def commonheads(self):
147 """set of all common heads after changeset bundle push"""
147 """set of all common heads after changeset bundle push"""
148 if self.cgresult:
148 if self.cgresult:
149 return self.futureheads
149 return self.futureheads
150 else:
150 else:
151 return self.fallbackheads
151 return self.fallbackheads
152
152
153 # mapping of message used when pushing bookmark
153 # mapping of message used when pushing bookmark
154 bookmsgmap = {'update': (_("updating bookmark %s\n"),
154 bookmsgmap = {'update': (_("updating bookmark %s\n"),
155 _('updating bookmark %s failed!\n')),
155 _('updating bookmark %s failed!\n')),
156 'export': (_("exporting bookmark %s\n"),
156 'export': (_("exporting bookmark %s\n"),
157 _('exporting bookmark %s failed!\n')),
157 _('exporting bookmark %s failed!\n')),
158 'delete': (_("deleting remote bookmark %s\n"),
158 'delete': (_("deleting remote bookmark %s\n"),
159 _('deleting remote bookmark %s failed!\n')),
159 _('deleting remote bookmark %s failed!\n')),
160 }
160 }
161
161
162
162
163 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
163 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=()):
164 '''Push outgoing changesets (limited by revs) from a local
164 '''Push outgoing changesets (limited by revs) from a local
165 repository to remote. Return an integer:
165 repository to remote. Return an integer:
166 - None means nothing to push
166 - None means nothing to push
167 - 0 means HTTP error
167 - 0 means HTTP error
168 - 1 means we pushed and remote head count is unchanged *or*
168 - 1 means we pushed and remote head count is unchanged *or*
169 we have outgoing changesets but refused to push
169 we have outgoing changesets but refused to push
170 - other values as described by addchangegroup()
170 - other values as described by addchangegroup()
171 '''
171 '''
172 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
172 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks)
173 if pushop.remote.local():
173 if pushop.remote.local():
174 missing = (set(pushop.repo.requirements)
174 missing = (set(pushop.repo.requirements)
175 - pushop.remote.local().supported)
175 - pushop.remote.local().supported)
176 if missing:
176 if missing:
177 msg = _("required features are not"
177 msg = _("required features are not"
178 " supported in the destination:"
178 " supported in the destination:"
179 " %s") % (', '.join(sorted(missing)))
179 " %s") % (', '.join(sorted(missing)))
180 raise util.Abort(msg)
180 raise util.Abort(msg)
181
181
182 # there are two ways to push to remote repo:
182 # there are two ways to push to remote repo:
183 #
183 #
184 # addchangegroup assumes local user can lock remote
184 # addchangegroup assumes local user can lock remote
185 # repo (local filesystem, old ssh servers).
185 # repo (local filesystem, old ssh servers).
186 #
186 #
187 # unbundle assumes local user cannot lock remote repo (new ssh
187 # unbundle assumes local user cannot lock remote repo (new ssh
188 # servers, http servers).
188 # servers, http servers).
189
189
190 if not pushop.remote.canpush():
190 if not pushop.remote.canpush():
191 raise util.Abort(_("destination does not support push"))
191 raise util.Abort(_("destination does not support push"))
192 # get local lock as we might write phase data
192 # get local lock as we might write phase data
193 locallock = None
193 locallock = None
194 try:
194 try:
195 locallock = pushop.repo.lock()
195 locallock = pushop.repo.lock()
196 pushop.locallocked = True
196 pushop.locallocked = True
197 except IOError, err:
197 except IOError, err:
198 pushop.locallocked = False
198 pushop.locallocked = False
199 if err.errno != errno.EACCES:
199 if err.errno != errno.EACCES:
200 raise
200 raise
201 # source repo cannot be locked.
201 # source repo cannot be locked.
202 # We do not abort the push, but just disable the local phase
202 # We do not abort the push, but just disable the local phase
203 # synchronisation.
203 # synchronisation.
204 msg = 'cannot lock source repository: %s\n' % err
204 msg = 'cannot lock source repository: %s\n' % err
205 pushop.ui.debug(msg)
205 pushop.ui.debug(msg)
206 try:
206 try:
207 pushop.repo.checkpush(pushop)
207 pushop.repo.checkpush(pushop)
208 lock = None
208 lock = None
209 unbundle = pushop.remote.capable('unbundle')
209 unbundle = pushop.remote.capable('unbundle')
210 if not unbundle:
210 if not unbundle:
211 lock = pushop.remote.lock()
211 lock = pushop.remote.lock()
212 try:
212 try:
213 _pushdiscovery(pushop)
213 _pushdiscovery(pushop)
214 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
214 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
215 False)
215 False)
216 and pushop.remote.capable('bundle2-exp')):
216 and pushop.remote.capable('bundle2-exp')):
217 _pushbundle2(pushop)
217 _pushbundle2(pushop)
218 _pushchangeset(pushop)
218 _pushchangeset(pushop)
219 _pushsyncphase(pushop)
219 _pushsyncphase(pushop)
220 _pushobsolete(pushop)
220 _pushobsolete(pushop)
221 _pushbookmark(pushop)
221 _pushbookmark(pushop)
222 finally:
222 finally:
223 if lock is not None:
223 if lock is not None:
224 lock.release()
224 lock.release()
225 finally:
225 finally:
226 if locallock is not None:
226 if locallock is not None:
227 locallock.release()
227 locallock.release()
228
228
229 return pushop
229 return pushop
230
230
231 # list of steps to perform discovery before push
231 # list of steps to perform discovery before push
232 pushdiscoveryorder = []
232 pushdiscoveryorder = []
233
233
234 # Mapping between step name and function
234 # Mapping between step name and function
235 #
235 #
236 # This exists to help extensions wrap steps if necessary
236 # This exists to help extensions wrap steps if necessary
237 pushdiscoverymapping = {}
237 pushdiscoverymapping = {}
238
238
239 def pushdiscovery(stepname):
239 def pushdiscovery(stepname):
240 """decorator for function performing discovery before push
240 """decorator for function performing discovery before push
241
241
242 The function is added to the step -> function mapping and appended to the
242 The function is added to the step -> function mapping and appended to the
243 list of steps. Beware that decorated function will be added in order (this
243 list of steps. Beware that decorated function will be added in order (this
244 may matter).
244 may matter).
245
245
246 You can only use this decorator for a new step, if you want to wrap a step
246 You can only use this decorator for a new step, if you want to wrap a step
247 from an extension, change the pushdiscovery dictionary directly."""
247 from an extension, change the pushdiscovery dictionary directly."""
248 def dec(func):
248 def dec(func):
249 assert stepname not in pushdiscoverymapping
249 assert stepname not in pushdiscoverymapping
250 pushdiscoverymapping[stepname] = func
250 pushdiscoverymapping[stepname] = func
251 pushdiscoveryorder.append(stepname)
251 pushdiscoveryorder.append(stepname)
252 return func
252 return func
253 return dec
253 return dec
254
254
255 def _pushdiscovery(pushop):
255 def _pushdiscovery(pushop):
256 """Run all discovery steps"""
256 """Run all discovery steps"""
257 for stepname in pushdiscoveryorder:
257 for stepname in pushdiscoveryorder:
258 step = pushdiscoverymapping[stepname]
258 step = pushdiscoverymapping[stepname]
259 step(pushop)
259 step(pushop)
260
260
261 @pushdiscovery('changeset')
261 @pushdiscovery('changeset')
262 def _pushdiscoverychangeset(pushop):
262 def _pushdiscoverychangeset(pushop):
263 """discover the changeset that need to be pushed"""
263 """discover the changeset that need to be pushed"""
264 unfi = pushop.repo.unfiltered()
264 unfi = pushop.repo.unfiltered()
265 fci = discovery.findcommonincoming
265 fci = discovery.findcommonincoming
266 commoninc = fci(unfi, pushop.remote, force=pushop.force)
266 commoninc = fci(unfi, pushop.remote, force=pushop.force)
267 common, inc, remoteheads = commoninc
267 common, inc, remoteheads = commoninc
268 fco = discovery.findcommonoutgoing
268 fco = discovery.findcommonoutgoing
269 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
269 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
270 commoninc=commoninc, force=pushop.force)
270 commoninc=commoninc, force=pushop.force)
271 pushop.outgoing = outgoing
271 pushop.outgoing = outgoing
272 pushop.remoteheads = remoteheads
272 pushop.remoteheads = remoteheads
273 pushop.incoming = inc
273 pushop.incoming = inc
274
274
275 @pushdiscovery('phase')
275 @pushdiscovery('phase')
276 def _pushdiscoveryphase(pushop):
276 def _pushdiscoveryphase(pushop):
277 """discover the phase that needs to be pushed
277 """discover the phase that needs to be pushed
278
278
279 (computed for both success and failure case for changesets push)"""
279 (computed for both success and failure case for changesets push)"""
280 outgoing = pushop.outgoing
280 outgoing = pushop.outgoing
281 unfi = pushop.repo.unfiltered()
281 unfi = pushop.repo.unfiltered()
282 remotephases = pushop.remote.listkeys('phases')
282 remotephases = pushop.remote.listkeys('phases')
283 publishing = remotephases.get('publishing', False)
283 publishing = remotephases.get('publishing', False)
284 ana = phases.analyzeremotephases(pushop.repo,
284 ana = phases.analyzeremotephases(pushop.repo,
285 pushop.fallbackheads,
285 pushop.fallbackheads,
286 remotephases)
286 remotephases)
287 pheads, droots = ana
287 pheads, droots = ana
288 extracond = ''
288 extracond = ''
289 if not publishing:
289 if not publishing:
290 extracond = ' and public()'
290 extracond = ' and public()'
291 revset = 'heads((%%ln::%%ln) %s)' % extracond
291 revset = 'heads((%%ln::%%ln) %s)' % extracond
292 # Get the list of all revs draft on remote by public here.
292 # Get the list of all revs draft on remote by public here.
293 # XXX Beware that revset break if droots is not strictly
293 # XXX Beware that revset break if droots is not strictly
294 # XXX root we may want to ensure it is but it is costly
294 # XXX root we may want to ensure it is but it is costly
295 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
295 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
296 if not outgoing.missing:
296 if not outgoing.missing:
297 future = fallback
297 future = fallback
298 else:
298 else:
299 # adds changeset we are going to push as draft
299 # adds changeset we are going to push as draft
300 #
300 #
301 # should not be necessary for pushblishing server, but because of an
301 # should not be necessary for pushblishing server, but because of an
302 # issue fixed in xxxxx we have to do it anyway.
302 # issue fixed in xxxxx we have to do it anyway.
303 fdroots = list(unfi.set('roots(%ln + %ln::)',
303 fdroots = list(unfi.set('roots(%ln + %ln::)',
304 outgoing.missing, droots))
304 outgoing.missing, droots))
305 fdroots = [f.node() for f in fdroots]
305 fdroots = [f.node() for f in fdroots]
306 future = list(unfi.set(revset, fdroots, pushop.futureheads))
306 future = list(unfi.set(revset, fdroots, pushop.futureheads))
307 pushop.outdatedphases = future
307 pushop.outdatedphases = future
308 pushop.fallbackoutdatedphases = fallback
308 pushop.fallbackoutdatedphases = fallback
309
309
310 @pushdiscovery('obsmarker')
310 @pushdiscovery('obsmarker')
311 def _pushdiscoveryobsmarkers(pushop):
311 def _pushdiscoveryobsmarkers(pushop):
312 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
312 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
313 and pushop.repo.obsstore
313 and pushop.repo.obsstore
314 and 'obsolete' in pushop.remote.listkeys('namespaces')):
314 and 'obsolete' in pushop.remote.listkeys('namespaces')):
315 repo = pushop.repo
315 repo = pushop.repo
316 # very naive computation, that can be quite expensive on big repo.
316 # very naive computation, that can be quite expensive on big repo.
317 # However: evolution is currently slow on them anyway.
317 # However: evolution is currently slow on them anyway.
318 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
318 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
319 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
319 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
320
320
321 @pushdiscovery('bookmarks')
321 @pushdiscovery('bookmarks')
322 def _pushdiscoverybookmarks(pushop):
322 def _pushdiscoverybookmarks(pushop):
323 ui = pushop.ui
323 ui = pushop.ui
324 repo = pushop.repo.unfiltered()
324 repo = pushop.repo.unfiltered()
325 remote = pushop.remote
325 remote = pushop.remote
326 ui.debug("checking for updated bookmarks\n")
326 ui.debug("checking for updated bookmarks\n")
327 ancestors = ()
327 ancestors = ()
328 if pushop.revs:
328 if pushop.revs:
329 revnums = map(repo.changelog.rev, pushop.revs)
329 revnums = map(repo.changelog.rev, pushop.revs)
330 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
330 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
331 remotebookmark = remote.listkeys('bookmarks')
331 remotebookmark = remote.listkeys('bookmarks')
332
332
333 explicit = set(pushop.bookmarks)
333 explicit = set(pushop.bookmarks)
334
334
335 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
335 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
336 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
336 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
337 for b, scid, dcid in advsrc:
337 for b, scid, dcid in advsrc:
338 if b in explicit:
338 if b in explicit:
339 explicit.remove(b)
339 explicit.remove(b)
340 if not ancestors or repo[scid].rev() in ancestors:
340 if not ancestors or repo[scid].rev() in ancestors:
341 pushop.outbookmarks.append((b, dcid, scid))
341 pushop.outbookmarks.append((b, dcid, scid))
342 # search added bookmark
342 # search added bookmark
343 for b, scid, dcid in addsrc:
343 for b, scid, dcid in addsrc:
344 if b in explicit:
344 if b in explicit:
345 explicit.remove(b)
345 explicit.remove(b)
346 pushop.outbookmarks.append((b, '', scid))
346 pushop.outbookmarks.append((b, '', scid))
347 # search for overwritten bookmark
347 # search for overwritten bookmark
348 for b, scid, dcid in advdst + diverge + differ:
348 for b, scid, dcid in advdst + diverge + differ:
349 if b in explicit:
349 if b in explicit:
350 explicit.remove(b)
350 explicit.remove(b)
351 pushop.outbookmarks.append((b, dcid, scid))
351 pushop.outbookmarks.append((b, dcid, scid))
352 # search for bookmark to delete
352 # search for bookmark to delete
353 for b, scid, dcid in adddst:
353 for b, scid, dcid in adddst:
354 if b in explicit:
354 if b in explicit:
355 explicit.remove(b)
355 explicit.remove(b)
356 # treat as "deleted locally"
356 # treat as "deleted locally"
357 pushop.outbookmarks.append((b, dcid, ''))
357 pushop.outbookmarks.append((b, dcid, ''))
358
358
359 if explicit:
359 if explicit:
360 explicit = sorted(explicit)
360 explicit = sorted(explicit)
361 # we should probably list all of them
361 # we should probably list all of them
362 ui.warn(_('bookmark %s does not exist on the local '
362 ui.warn(_('bookmark %s does not exist on the local '
363 'or remote repository!\n') % explicit[0])
363 'or remote repository!\n') % explicit[0])
364 pushop.bkresult = 2
364 pushop.bkresult = 2
365
365
366 pushop.outbookmarks.sort()
366 pushop.outbookmarks.sort()
367
367
368 def _pushcheckoutgoing(pushop):
368 def _pushcheckoutgoing(pushop):
369 outgoing = pushop.outgoing
369 outgoing = pushop.outgoing
370 unfi = pushop.repo.unfiltered()
370 unfi = pushop.repo.unfiltered()
371 if not outgoing.missing:
371 if not outgoing.missing:
372 # nothing to push
372 # nothing to push
373 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
373 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
374 return False
374 return False
375 # something to push
375 # something to push
376 if not pushop.force:
376 if not pushop.force:
377 # if repo.obsstore == False --> no obsolete
377 # if repo.obsstore == False --> no obsolete
378 # then, save the iteration
378 # then, save the iteration
379 if unfi.obsstore:
379 if unfi.obsstore:
380 # this message are here for 80 char limit reason
380 # this message are here for 80 char limit reason
381 mso = _("push includes obsolete changeset: %s!")
381 mso = _("push includes obsolete changeset: %s!")
382 mst = {"unstable": _("push includes unstable changeset: %s!"),
382 mst = {"unstable": _("push includes unstable changeset: %s!"),
383 "bumped": _("push includes bumped changeset: %s!"),
383 "bumped": _("push includes bumped changeset: %s!"),
384 "divergent": _("push includes divergent changeset: %s!")}
384 "divergent": _("push includes divergent changeset: %s!")}
385 # If we are to push if there is at least one
385 # If we are to push if there is at least one
386 # obsolete or unstable changeset in missing, at
386 # obsolete or unstable changeset in missing, at
387 # least one of the missinghead will be obsolete or
387 # least one of the missinghead will be obsolete or
388 # unstable. So checking heads only is ok
388 # unstable. So checking heads only is ok
389 for node in outgoing.missingheads:
389 for node in outgoing.missingheads:
390 ctx = unfi[node]
390 ctx = unfi[node]
391 if ctx.obsolete():
391 if ctx.obsolete():
392 raise util.Abort(mso % ctx)
392 raise util.Abort(mso % ctx)
393 elif ctx.troubled():
393 elif ctx.troubled():
394 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
394 raise util.Abort(mst[ctx.troubles()[0]] % ctx)
395 newbm = pushop.ui.configlist('bookmarks', 'pushing')
395 newbm = pushop.ui.configlist('bookmarks', 'pushing')
396 discovery.checkheads(unfi, pushop.remote, outgoing,
396 discovery.checkheads(unfi, pushop.remote, outgoing,
397 pushop.remoteheads,
397 pushop.remoteheads,
398 pushop.newbranch,
398 pushop.newbranch,
399 bool(pushop.incoming),
399 bool(pushop.incoming),
400 newbm)
400 newbm)
401 return True
401 return True
402
402
403 # List of names of steps to perform for an outgoing bundle2, order matters.
403 # List of names of steps to perform for an outgoing bundle2, order matters.
404 b2partsgenorder = []
404 b2partsgenorder = []
405
405
406 # Mapping between step name and function
406 # Mapping between step name and function
407 #
407 #
408 # This exists to help extensions wrap steps if necessary
408 # This exists to help extensions wrap steps if necessary
409 b2partsgenmapping = {}
409 b2partsgenmapping = {}
410
410
411 def b2partsgenerator(stepname):
411 def b2partsgenerator(stepname):
412 """decorator for function generating bundle2 part
412 """decorator for function generating bundle2 part
413
413
414 The function is added to the step -> function mapping and appended to the
414 The function is added to the step -> function mapping and appended to the
415 list of steps. Beware that decorated functions will be added in order
415 list of steps. Beware that decorated functions will be added in order
416 (this may matter).
416 (this may matter).
417
417
418 You can only use this decorator for new steps, if you want to wrap a step
418 You can only use this decorator for new steps, if you want to wrap a step
419 from an extension, attack the b2partsgenmapping dictionary directly."""
419 from an extension, attack the b2partsgenmapping dictionary directly."""
420 def dec(func):
420 def dec(func):
421 assert stepname not in b2partsgenmapping
421 assert stepname not in b2partsgenmapping
422 b2partsgenmapping[stepname] = func
422 b2partsgenmapping[stepname] = func
423 b2partsgenorder.append(stepname)
423 b2partsgenorder.append(stepname)
424 return func
424 return func
425 return dec
425 return dec
426
426
427 @b2partsgenerator('changeset')
427 @b2partsgenerator('changeset')
428 def _pushb2ctx(pushop, bundler):
428 def _pushb2ctx(pushop, bundler):
429 """handle changegroup push through bundle2
429 """handle changegroup push through bundle2
430
430
431 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
431 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
432 """
432 """
433 if 'changesets' in pushop.stepsdone:
433 if 'changesets' in pushop.stepsdone:
434 return
434 return
435 pushop.stepsdone.add('changesets')
435 pushop.stepsdone.add('changesets')
436 # Send known heads to the server for race detection.
436 # Send known heads to the server for race detection.
437 if not _pushcheckoutgoing(pushop):
437 if not _pushcheckoutgoing(pushop):
438 return
438 return
439 pushop.repo.prepushoutgoinghooks(pushop.repo,
439 pushop.repo.prepushoutgoinghooks(pushop.repo,
440 pushop.remote,
440 pushop.remote,
441 pushop.outgoing)
441 pushop.outgoing)
442 if not pushop.force:
442 if not pushop.force:
443 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
443 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
444 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
444 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
445 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
445 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
446 def handlereply(op):
446 def handlereply(op):
447 """extract addchangroup returns from server reply"""
447 """extract addchangroup returns from server reply"""
448 cgreplies = op.records.getreplies(cgpart.id)
448 cgreplies = op.records.getreplies(cgpart.id)
449 assert len(cgreplies['changegroup']) == 1
449 assert len(cgreplies['changegroup']) == 1
450 pushop.cgresult = cgreplies['changegroup'][0]['return']
450 pushop.cgresult = cgreplies['changegroup'][0]['return']
451 return handlereply
451 return handlereply
452
452
453 @b2partsgenerator('phase')
453 @b2partsgenerator('phase')
454 def _pushb2phases(pushop, bundler):
454 def _pushb2phases(pushop, bundler):
455 """handle phase push through bundle2"""
455 """handle phase push through bundle2"""
456 if 'phases' in pushop.stepsdone:
456 if 'phases' in pushop.stepsdone:
457 return
457 return
458 b2caps = bundle2.bundle2caps(pushop.remote)
458 b2caps = bundle2.bundle2caps(pushop.remote)
459 if not 'b2x:pushkey' in b2caps:
459 if not 'b2x:pushkey' in b2caps:
460 return
460 return
461 pushop.stepsdone.add('phases')
461 pushop.stepsdone.add('phases')
462 part2node = []
462 part2node = []
463 enc = pushkey.encode
463 enc = pushkey.encode
464 for newremotehead in pushop.outdatedphases:
464 for newremotehead in pushop.outdatedphases:
465 part = bundler.newpart('b2x:pushkey')
465 part = bundler.newpart('b2x:pushkey')
466 part.addparam('namespace', enc('phases'))
466 part.addparam('namespace', enc('phases'))
467 part.addparam('key', enc(newremotehead.hex()))
467 part.addparam('key', enc(newremotehead.hex()))
468 part.addparam('old', enc(str(phases.draft)))
468 part.addparam('old', enc(str(phases.draft)))
469 part.addparam('new', enc(str(phases.public)))
469 part.addparam('new', enc(str(phases.public)))
470 part2node.append((part.id, newremotehead))
470 part2node.append((part.id, newremotehead))
471 def handlereply(op):
471 def handlereply(op):
472 for partid, node in part2node:
472 for partid, node in part2node:
473 partrep = op.records.getreplies(partid)
473 partrep = op.records.getreplies(partid)
474 results = partrep['pushkey']
474 results = partrep['pushkey']
475 assert len(results) <= 1
475 assert len(results) <= 1
476 msg = None
476 msg = None
477 if not results:
477 if not results:
478 msg = _('server ignored update of %s to public!\n') % node
478 msg = _('server ignored update of %s to public!\n') % node
479 elif not int(results[0]['return']):
479 elif not int(results[0]['return']):
480 msg = _('updating %s to public failed!\n') % node
480 msg = _('updating %s to public failed!\n') % node
481 if msg is not None:
481 if msg is not None:
482 pushop.ui.warn(msg)
482 pushop.ui.warn(msg)
483 return handlereply
483 return handlereply
484
484
485 @b2partsgenerator('obsmarkers')
485 @b2partsgenerator('obsmarkers')
486 def _pushb2obsmarkers(pushop, bundler):
486 def _pushb2obsmarkers(pushop, bundler):
487 if 'obsmarkers' in pushop.stepsdone:
487 if 'obsmarkers' in pushop.stepsdone:
488 return
488 return
489 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
489 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
490 if obsolete.commonversion(remoteversions) is None:
490 if obsolete.commonversion(remoteversions) is None:
491 return
491 return
492 pushop.stepsdone.add('obsmarkers')
492 pushop.stepsdone.add('obsmarkers')
493 if pushop.outobsmarkers:
493 if pushop.outobsmarkers:
494 buildobsmarkerspart(bundler, pushop.outobsmarkers)
494 buildobsmarkerspart(bundler, pushop.outobsmarkers)
495
495
496 @b2partsgenerator('bookmarks')
496 @b2partsgenerator('bookmarks')
497 def _pushb2bookmarks(pushop, bundler):
497 def _pushb2bookmarks(pushop, bundler):
498 """handle phase push through bundle2"""
498 """handle phase push through bundle2"""
499 if 'bookmarks' in pushop.stepsdone:
499 if 'bookmarks' in pushop.stepsdone:
500 return
500 return
501 b2caps = bundle2.bundle2caps(pushop.remote)
501 b2caps = bundle2.bundle2caps(pushop.remote)
502 if 'b2x:pushkey' not in b2caps:
502 if 'b2x:pushkey' not in b2caps:
503 return
503 return
504 pushop.stepsdone.add('bookmarks')
504 pushop.stepsdone.add('bookmarks')
505 part2book = []
505 part2book = []
506 enc = pushkey.encode
506 enc = pushkey.encode
507 for book, old, new in pushop.outbookmarks:
507 for book, old, new in pushop.outbookmarks:
508 part = bundler.newpart('b2x:pushkey')
508 part = bundler.newpart('b2x:pushkey')
509 part.addparam('namespace', enc('bookmarks'))
509 part.addparam('namespace', enc('bookmarks'))
510 part.addparam('key', enc(book))
510 part.addparam('key', enc(book))
511 part.addparam('old', enc(old))
511 part.addparam('old', enc(old))
512 part.addparam('new', enc(new))
512 part.addparam('new', enc(new))
513 action = 'update'
513 action = 'update'
514 if not old:
514 if not old:
515 action = 'export'
515 action = 'export'
516 elif not new:
516 elif not new:
517 action = 'delete'
517 action = 'delete'
518 part2book.append((part.id, book, action))
518 part2book.append((part.id, book, action))
519
519
520
520
521 def handlereply(op):
521 def handlereply(op):
522 ui = pushop.ui
522 ui = pushop.ui
523 for partid, book, action in part2book:
523 for partid, book, action in part2book:
524 partrep = op.records.getreplies(partid)
524 partrep = op.records.getreplies(partid)
525 results = partrep['pushkey']
525 results = partrep['pushkey']
526 assert len(results) <= 1
526 assert len(results) <= 1
527 if not results:
527 if not results:
528 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
528 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
529 else:
529 else:
530 ret = int(results[0]['return'])
530 ret = int(results[0]['return'])
531 if ret:
531 if ret:
532 ui.status(bookmsgmap[action][0] % book)
532 ui.status(bookmsgmap[action][0] % book)
533 else:
533 else:
534 ui.warn(bookmsgmap[action][1] % book)
534 ui.warn(bookmsgmap[action][1] % book)
535 if pushop.bkresult is not None:
535 if pushop.bkresult is not None:
536 pushop.bkresult = 1
536 pushop.bkresult = 1
537 return handlereply
537 return handlereply
538
538
539
539
540 def _pushbundle2(pushop):
540 def _pushbundle2(pushop):
541 """push data to the remote using bundle2
541 """push data to the remote using bundle2
542
542
543 The only currently supported type of data is changegroup but this will
543 The only currently supported type of data is changegroup but this will
544 evolve in the future."""
544 evolve in the future."""
545 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
545 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
546 # create reply capability
546 # create reply capability
547 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
547 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
548 bundler.newpart('b2x:replycaps', data=capsblob)
548 bundler.newpart('b2x:replycaps', data=capsblob)
549 replyhandlers = []
549 replyhandlers = []
550 for partgenname in b2partsgenorder:
550 for partgenname in b2partsgenorder:
551 partgen = b2partsgenmapping[partgenname]
551 partgen = b2partsgenmapping[partgenname]
552 ret = partgen(pushop, bundler)
552 ret = partgen(pushop, bundler)
553 if callable(ret):
553 if callable(ret):
554 replyhandlers.append(ret)
554 replyhandlers.append(ret)
555 # do not push if nothing to push
555 # do not push if nothing to push
556 if bundler.nbparts <= 1:
556 if bundler.nbparts <= 1:
557 return
557 return
558 stream = util.chunkbuffer(bundler.getchunks())
558 stream = util.chunkbuffer(bundler.getchunks())
559 try:
559 try:
560 reply = pushop.remote.unbundle(stream, ['force'], 'push')
560 reply = pushop.remote.unbundle(stream, ['force'], 'push')
561 except error.BundleValueError, exc:
561 except error.BundleValueError, exc:
562 raise util.Abort('missing support for %s' % exc)
562 raise util.Abort('missing support for %s' % exc)
563 try:
563 try:
564 op = bundle2.processbundle(pushop.repo, reply)
564 op = bundle2.processbundle(pushop.repo, reply)
565 except error.BundleValueError, exc:
565 except error.BundleValueError, exc:
566 raise util.Abort('missing support for %s' % exc)
566 raise util.Abort('missing support for %s' % exc)
567 for rephand in replyhandlers:
567 for rephand in replyhandlers:
568 rephand(op)
568 rephand(op)
569
569
570 def _pushchangeset(pushop):
570 def _pushchangeset(pushop):
571 """Make the actual push of changeset bundle to remote repo"""
571 """Make the actual push of changeset bundle to remote repo"""
572 if 'changesets' in pushop.stepsdone:
572 if 'changesets' in pushop.stepsdone:
573 return
573 return
574 pushop.stepsdone.add('changesets')
574 pushop.stepsdone.add('changesets')
575 if not _pushcheckoutgoing(pushop):
575 if not _pushcheckoutgoing(pushop):
576 return
576 return
577 pushop.repo.prepushoutgoinghooks(pushop.repo,
577 pushop.repo.prepushoutgoinghooks(pushop.repo,
578 pushop.remote,
578 pushop.remote,
579 pushop.outgoing)
579 pushop.outgoing)
580 outgoing = pushop.outgoing
580 outgoing = pushop.outgoing
581 unbundle = pushop.remote.capable('unbundle')
581 unbundle = pushop.remote.capable('unbundle')
582 # TODO: get bundlecaps from remote
582 # TODO: get bundlecaps from remote
583 bundlecaps = None
583 bundlecaps = None
584 # create a changegroup from local
584 # create a changegroup from local
585 if pushop.revs is None and not (outgoing.excluded
585 if pushop.revs is None and not (outgoing.excluded
586 or pushop.repo.changelog.filteredrevs):
586 or pushop.repo.changelog.filteredrevs):
587 # push everything,
587 # push everything,
588 # use the fast path, no race possible on push
588 # use the fast path, no race possible on push
589 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
589 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
590 cg = changegroup.getsubset(pushop.repo,
590 cg = changegroup.getsubset(pushop.repo,
591 outgoing,
591 outgoing,
592 bundler,
592 bundler,
593 'push',
593 'push',
594 fastpath=True)
594 fastpath=True)
595 else:
595 else:
596 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
596 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
597 bundlecaps)
597 bundlecaps)
598
598
599 # apply changegroup to remote
599 # apply changegroup to remote
600 if unbundle:
600 if unbundle:
601 # local repo finds heads on server, finds out what
601 # local repo finds heads on server, finds out what
602 # revs it must push. once revs transferred, if server
602 # revs it must push. once revs transferred, if server
603 # finds it has different heads (someone else won
603 # finds it has different heads (someone else won
604 # commit/push race), server aborts.
604 # commit/push race), server aborts.
605 if pushop.force:
605 if pushop.force:
606 remoteheads = ['force']
606 remoteheads = ['force']
607 else:
607 else:
608 remoteheads = pushop.remoteheads
608 remoteheads = pushop.remoteheads
609 # ssh: return remote's addchangegroup()
609 # ssh: return remote's addchangegroup()
610 # http: return remote's addchangegroup() or 0 for error
610 # http: return remote's addchangegroup() or 0 for error
611 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
611 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
612 pushop.repo.url())
612 pushop.repo.url())
613 else:
613 else:
614 # we return an integer indicating remote head count
614 # we return an integer indicating remote head count
615 # change
615 # change
616 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
616 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
617 pushop.repo.url())
617 pushop.repo.url())
618
618
619 def _pushsyncphase(pushop):
619 def _pushsyncphase(pushop):
620 """synchronise phase information locally and remotely"""
620 """synchronise phase information locally and remotely"""
621 cheads = pushop.commonheads
621 cheads = pushop.commonheads
622 # even when we don't push, exchanging phase data is useful
622 # even when we don't push, exchanging phase data is useful
623 remotephases = pushop.remote.listkeys('phases')
623 remotephases = pushop.remote.listkeys('phases')
624 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
624 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
625 and remotephases # server supports phases
625 and remotephases # server supports phases
626 and pushop.cgresult is None # nothing was pushed
626 and pushop.cgresult is None # nothing was pushed
627 and remotephases.get('publishing', False)):
627 and remotephases.get('publishing', False)):
628 # When:
628 # When:
629 # - this is a subrepo push
629 # - this is a subrepo push
630 # - and remote support phase
630 # - and remote support phase
631 # - and no changeset was pushed
631 # - and no changeset was pushed
632 # - and remote is publishing
632 # - and remote is publishing
633 # We may be in issue 3871 case!
633 # We may be in issue 3871 case!
634 # We drop the possible phase synchronisation done by
634 # We drop the possible phase synchronisation done by
635 # courtesy to publish changesets possibly locally draft
635 # courtesy to publish changesets possibly locally draft
636 # on the remote.
636 # on the remote.
637 remotephases = {'publishing': 'True'}
637 remotephases = {'publishing': 'True'}
638 if not remotephases: # old server or public only reply from non-publishing
638 if not remotephases: # old server or public only reply from non-publishing
639 _localphasemove(pushop, cheads)
639 _localphasemove(pushop, cheads)
640 # don't push any phase data as there is nothing to push
640 # don't push any phase data as there is nothing to push
641 else:
641 else:
642 ana = phases.analyzeremotephases(pushop.repo, cheads,
642 ana = phases.analyzeremotephases(pushop.repo, cheads,
643 remotephases)
643 remotephases)
644 pheads, droots = ana
644 pheads, droots = ana
645 ### Apply remote phase on local
645 ### Apply remote phase on local
646 if remotephases.get('publishing', False):
646 if remotephases.get('publishing', False):
647 _localphasemove(pushop, cheads)
647 _localphasemove(pushop, cheads)
648 else: # publish = False
648 else: # publish = False
649 _localphasemove(pushop, pheads)
649 _localphasemove(pushop, pheads)
650 _localphasemove(pushop, cheads, phases.draft)
650 _localphasemove(pushop, cheads, phases.draft)
651 ### Apply local phase on remote
651 ### Apply local phase on remote
652
652
653 if pushop.cgresult:
653 if pushop.cgresult:
654 if 'phases' in pushop.stepsdone:
654 if 'phases' in pushop.stepsdone:
655 # phases already pushed though bundle2
655 # phases already pushed though bundle2
656 return
656 return
657 outdated = pushop.outdatedphases
657 outdated = pushop.outdatedphases
658 else:
658 else:
659 outdated = pushop.fallbackoutdatedphases
659 outdated = pushop.fallbackoutdatedphases
660
660
661 pushop.stepsdone.add('phases')
661 pushop.stepsdone.add('phases')
662
662
663 # filter heads already turned public by the push
663 # filter heads already turned public by the push
664 outdated = [c for c in outdated if c.node() not in pheads]
664 outdated = [c for c in outdated if c.node() not in pheads]
665 b2caps = bundle2.bundle2caps(pushop.remote)
665 b2caps = bundle2.bundle2caps(pushop.remote)
666 if 'b2x:pushkey' in b2caps:
666 if 'b2x:pushkey' in b2caps:
667 # server supports bundle2, let's do a batched push through it
667 # server supports bundle2, let's do a batched push through it
668 #
668 #
669 # This will eventually be unified with the changesets bundle2 push
669 # This will eventually be unified with the changesets bundle2 push
670 bundler = bundle2.bundle20(pushop.ui, b2caps)
670 bundler = bundle2.bundle20(pushop.ui, b2caps)
671 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
671 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
672 bundler.newpart('b2x:replycaps', data=capsblob)
672 bundler.newpart('b2x:replycaps', data=capsblob)
673 part2node = []
673 part2node = []
674 enc = pushkey.encode
674 enc = pushkey.encode
675 for newremotehead in outdated:
675 for newremotehead in outdated:
676 part = bundler.newpart('b2x:pushkey')
676 part = bundler.newpart('b2x:pushkey')
677 part.addparam('namespace', enc('phases'))
677 part.addparam('namespace', enc('phases'))
678 part.addparam('key', enc(newremotehead.hex()))
678 part.addparam('key', enc(newremotehead.hex()))
679 part.addparam('old', enc(str(phases.draft)))
679 part.addparam('old', enc(str(phases.draft)))
680 part.addparam('new', enc(str(phases.public)))
680 part.addparam('new', enc(str(phases.public)))
681 part2node.append((part.id, newremotehead))
681 part2node.append((part.id, newremotehead))
682 stream = util.chunkbuffer(bundler.getchunks())
682 stream = util.chunkbuffer(bundler.getchunks())
683 try:
683 try:
684 reply = pushop.remote.unbundle(stream, ['force'], 'push')
684 reply = pushop.remote.unbundle(stream, ['force'], 'push')
685 op = bundle2.processbundle(pushop.repo, reply)
685 op = bundle2.processbundle(pushop.repo, reply)
686 except error.BundleValueError, exc:
686 except error.BundleValueError, exc:
687 raise util.Abort('missing support for %s' % exc)
687 raise util.Abort('missing support for %s' % exc)
688 for partid, node in part2node:
688 for partid, node in part2node:
689 partrep = op.records.getreplies(partid)
689 partrep = op.records.getreplies(partid)
690 results = partrep['pushkey']
690 results = partrep['pushkey']
691 assert len(results) <= 1
691 assert len(results) <= 1
692 msg = None
692 msg = None
693 if not results:
693 if not results:
694 msg = _('server ignored update of %s to public!\n') % node
694 msg = _('server ignored update of %s to public!\n') % node
695 elif not int(results[0]['return']):
695 elif not int(results[0]['return']):
696 msg = _('updating %s to public failed!\n') % node
696 msg = _('updating %s to public failed!\n') % node
697 if msg is not None:
697 if msg is not None:
698 pushop.ui.warn(msg)
698 pushop.ui.warn(msg)
699
699
700 else:
700 else:
701 # fallback to independant pushkey command
701 # fallback to independant pushkey command
702 for newremotehead in outdated:
702 for newremotehead in outdated:
703 r = pushop.remote.pushkey('phases',
703 r = pushop.remote.pushkey('phases',
704 newremotehead.hex(),
704 newremotehead.hex(),
705 str(phases.draft),
705 str(phases.draft),
706 str(phases.public))
706 str(phases.public))
707 if not r:
707 if not r:
708 pushop.ui.warn(_('updating %s to public failed!\n')
708 pushop.ui.warn(_('updating %s to public failed!\n')
709 % newremotehead)
709 % newremotehead)
710
710
711 def _localphasemove(pushop, nodes, phase=phases.public):
711 def _localphasemove(pushop, nodes, phase=phases.public):
712 """move <nodes> to <phase> in the local source repo"""
712 """move <nodes> to <phase> in the local source repo"""
713 if pushop.locallocked:
713 if pushop.locallocked:
714 tr = pushop.repo.transaction('push-phase-sync')
714 tr = pushop.repo.transaction('push-phase-sync')
715 try:
715 try:
716 phases.advanceboundary(pushop.repo, tr, phase, nodes)
716 phases.advanceboundary(pushop.repo, tr, phase, nodes)
717 tr.close()
717 tr.close()
718 finally:
718 finally:
719 tr.release()
719 tr.release()
720 else:
720 else:
721 # repo is not locked, do not change any phases!
721 # repo is not locked, do not change any phases!
722 # Informs the user that phases should have been moved when
722 # Informs the user that phases should have been moved when
723 # applicable.
723 # applicable.
724 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
724 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
725 phasestr = phases.phasenames[phase]
725 phasestr = phases.phasenames[phase]
726 if actualmoves:
726 if actualmoves:
727 pushop.ui.status(_('cannot lock source repo, skipping '
727 pushop.ui.status(_('cannot lock source repo, skipping '
728 'local %s phase update\n') % phasestr)
728 'local %s phase update\n') % phasestr)
729
729
730 def _pushobsolete(pushop):
730 def _pushobsolete(pushop):
731 """utility function to push obsolete markers to a remote"""
731 """utility function to push obsolete markers to a remote"""
732 if 'obsmarkers' in pushop.stepsdone:
732 if 'obsmarkers' in pushop.stepsdone:
733 return
733 return
734 pushop.ui.debug('try to push obsolete markers to remote\n')
734 pushop.ui.debug('try to push obsolete markers to remote\n')
735 repo = pushop.repo
735 repo = pushop.repo
736 remote = pushop.remote
736 remote = pushop.remote
737 pushop.stepsdone.add('obsmarkers')
737 pushop.stepsdone.add('obsmarkers')
738 if pushop.outobsmarkers:
738 if pushop.outobsmarkers:
739 rslts = []
739 rslts = []
740 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
740 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
741 for key in sorted(remotedata, reverse=True):
741 for key in sorted(remotedata, reverse=True):
742 # reverse sort to ensure we end with dump0
742 # reverse sort to ensure we end with dump0
743 data = remotedata[key]
743 data = remotedata[key]
744 rslts.append(remote.pushkey('obsolete', key, '', data))
744 rslts.append(remote.pushkey('obsolete', key, '', data))
745 if [r for r in rslts if not r]:
745 if [r for r in rslts if not r]:
746 msg = _('failed to push some obsolete markers!\n')
746 msg = _('failed to push some obsolete markers!\n')
747 repo.ui.warn(msg)
747 repo.ui.warn(msg)
748
748
749 def _pushbookmark(pushop):
749 def _pushbookmark(pushop):
750 """Update bookmark position on remote"""
750 """Update bookmark position on remote"""
751 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
751 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
752 return
752 return
753 pushop.stepsdone.add('bookmarks')
753 pushop.stepsdone.add('bookmarks')
754 ui = pushop.ui
754 ui = pushop.ui
755 remote = pushop.remote
755 remote = pushop.remote
756
756
757 for b, old, new in pushop.outbookmarks:
757 for b, old, new in pushop.outbookmarks:
758 action = 'update'
758 action = 'update'
759 if not old:
759 if not old:
760 action = 'export'
760 action = 'export'
761 elif not new:
761 elif not new:
762 action = 'delete'
762 action = 'delete'
763 if remote.pushkey('bookmarks', b, old, new):
763 if remote.pushkey('bookmarks', b, old, new):
764 ui.status(bookmsgmap[action][0] % b)
764 ui.status(bookmsgmap[action][0] % b)
765 else:
765 else:
766 ui.warn(bookmsgmap[action][1] % b)
766 ui.warn(bookmsgmap[action][1] % b)
767 # discovery can have set the value form invalid entry
767 # discovery can have set the value form invalid entry
768 if pushop.bkresult is not None:
768 if pushop.bkresult is not None:
769 pushop.bkresult = 1
769 pushop.bkresult = 1
770
770
771 class pulloperation(object):
771 class pulloperation(object):
772 """A object that represent a single pull operation
772 """A object that represent a single pull operation
773
773
774 It purpose is to carry push related state and very common operation.
774 It purpose is to carry push related state and very common operation.
775
775
776 A new should be created at the beginning of each pull and discarded
776 A new should be created at the beginning of each pull and discarded
777 afterward.
777 afterward.
778 """
778 """
779
779
780 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
780 def __init__(self, repo, remote, heads=None, force=False, bookmarks=()):
781 # repo we pull into
781 # repo we pull into
782 self.repo = repo
782 self.repo = repo
783 # repo we pull from
783 # repo we pull from
784 self.remote = remote
784 self.remote = remote
785 # revision we try to pull (None is "all")
785 # revision we try to pull (None is "all")
786 self.heads = heads
786 self.heads = heads
787 # bookmark pulled explicitly
787 # bookmark pulled explicitly
788 self.explicitbookmarks = bookmarks
788 self.explicitbookmarks = bookmarks
789 # do we force pull?
789 # do we force pull?
790 self.force = force
790 self.force = force
791 # the name the pull transaction
791 # the name the pull transaction
792 self._trname = 'pull\n' + util.hidepassword(remote.url())
792 self._trname = 'pull\n' + util.hidepassword(remote.url())
793 # hold the transaction once created
793 # hold the transaction once created
794 self._tr = None
794 self._tr = None
795 # set of common changeset between local and remote before pull
795 # set of common changeset between local and remote before pull
796 self.common = None
796 self.common = None
797 # set of pulled head
797 # set of pulled head
798 self.rheads = None
798 self.rheads = None
799 # list of missing changeset to fetch remotely
799 # list of missing changeset to fetch remotely
800 self.fetch = None
800 self.fetch = None
801 # remote bookmarks data
801 # remote bookmarks data
802 self.remotebookmarks = None
802 self.remotebookmarks = None
803 # result of changegroup pulling (used as return code by pull)
803 # result of changegroup pulling (used as return code by pull)
804 self.cgresult = None
804 self.cgresult = None
805 # list of step already done
805 # list of step already done
806 self.stepsdone = set()
806 self.stepsdone = set()
807
807
808 @util.propertycache
808 @util.propertycache
809 def pulledsubset(self):
809 def pulledsubset(self):
810 """heads of the set of changeset target by the pull"""
810 """heads of the set of changeset target by the pull"""
811 # compute target subset
811 # compute target subset
812 if self.heads is None:
812 if self.heads is None:
813 # We pulled every thing possible
813 # We pulled every thing possible
814 # sync on everything common
814 # sync on everything common
815 c = set(self.common)
815 c = set(self.common)
816 ret = list(self.common)
816 ret = list(self.common)
817 for n in self.rheads:
817 for n in self.rheads:
818 if n not in c:
818 if n not in c:
819 ret.append(n)
819 ret.append(n)
820 return ret
820 return ret
821 else:
821 else:
822 # We pulled a specific subset
822 # We pulled a specific subset
823 # sync on this subset
823 # sync on this subset
824 return self.heads
824 return self.heads
825
825
826 def gettransaction(self):
826 def gettransaction(self):
827 """get appropriate pull transaction, creating it if needed"""
827 """get appropriate pull transaction, creating it if needed"""
828 if self._tr is None:
828 if self._tr is None:
829 self._tr = self.repo.transaction(self._trname)
829 self._tr = self.repo.transaction(self._trname)
830 self._tr.hookargs['source'] = 'pull'
831 self._tr.hookargs['url'] = self.remote.url()
830 return self._tr
832 return self._tr
831
833
832 def closetransaction(self):
834 def closetransaction(self):
833 """close transaction if created"""
835 """close transaction if created"""
834 if self._tr is not None:
836 if self._tr is not None:
835 self._tr.close()
837 self._tr.close()
836
838
837 def releasetransaction(self):
839 def releasetransaction(self):
838 """release transaction if created"""
840 """release transaction if created"""
839 if self._tr is not None:
841 if self._tr is not None:
840 self._tr.release()
842 self._tr.release()
841
843
842 def pull(repo, remote, heads=None, force=False, bookmarks=()):
844 def pull(repo, remote, heads=None, force=False, bookmarks=()):
843 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
845 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks)
844 if pullop.remote.local():
846 if pullop.remote.local():
845 missing = set(pullop.remote.requirements) - pullop.repo.supported
847 missing = set(pullop.remote.requirements) - pullop.repo.supported
846 if missing:
848 if missing:
847 msg = _("required features are not"
849 msg = _("required features are not"
848 " supported in the destination:"
850 " supported in the destination:"
849 " %s") % (', '.join(sorted(missing)))
851 " %s") % (', '.join(sorted(missing)))
850 raise util.Abort(msg)
852 raise util.Abort(msg)
851
853
852 pullop.remotebookmarks = remote.listkeys('bookmarks')
854 pullop.remotebookmarks = remote.listkeys('bookmarks')
853 lock = pullop.repo.lock()
855 lock = pullop.repo.lock()
854 try:
856 try:
855 _pulldiscovery(pullop)
857 _pulldiscovery(pullop)
856 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
858 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
857 and pullop.remote.capable('bundle2-exp')):
859 and pullop.remote.capable('bundle2-exp')):
858 _pullbundle2(pullop)
860 _pullbundle2(pullop)
859 _pullchangeset(pullop)
861 _pullchangeset(pullop)
860 _pullphase(pullop)
862 _pullphase(pullop)
861 _pullbookmarks(pullop)
863 _pullbookmarks(pullop)
862 _pullobsolete(pullop)
864 _pullobsolete(pullop)
863 pullop.closetransaction()
865 pullop.closetransaction()
864 finally:
866 finally:
865 pullop.releasetransaction()
867 pullop.releasetransaction()
866 lock.release()
868 lock.release()
867
869
868 return pullop
870 return pullop
869
871
870 # list of steps to perform discovery before pull
872 # list of steps to perform discovery before pull
871 pulldiscoveryorder = []
873 pulldiscoveryorder = []
872
874
873 # Mapping between step name and function
875 # Mapping between step name and function
874 #
876 #
875 # This exists to help extensions wrap steps if necessary
877 # This exists to help extensions wrap steps if necessary
876 pulldiscoverymapping = {}
878 pulldiscoverymapping = {}
877
879
878 def pulldiscovery(stepname):
880 def pulldiscovery(stepname):
879 """decorator for function performing discovery before pull
881 """decorator for function performing discovery before pull
880
882
881 The function is added to the step -> function mapping and appended to the
883 The function is added to the step -> function mapping and appended to the
882 list of steps. Beware that decorated function will be added in order (this
884 list of steps. Beware that decorated function will be added in order (this
883 may matter).
885 may matter).
884
886
885 You can only use this decorator for a new step, if you want to wrap a step
887 You can only use this decorator for a new step, if you want to wrap a step
886 from an extension, change the pulldiscovery dictionary directly."""
888 from an extension, change the pulldiscovery dictionary directly."""
887 def dec(func):
889 def dec(func):
888 assert stepname not in pulldiscoverymapping
890 assert stepname not in pulldiscoverymapping
889 pulldiscoverymapping[stepname] = func
891 pulldiscoverymapping[stepname] = func
890 pulldiscoveryorder.append(stepname)
892 pulldiscoveryorder.append(stepname)
891 return func
893 return func
892 return dec
894 return dec
893
895
894 def _pulldiscovery(pullop):
896 def _pulldiscovery(pullop):
895 """Run all discovery steps"""
897 """Run all discovery steps"""
896 for stepname in pulldiscoveryorder:
898 for stepname in pulldiscoveryorder:
897 step = pulldiscoverymapping[stepname]
899 step = pulldiscoverymapping[stepname]
898 step(pullop)
900 step(pullop)
899
901
900 @pulldiscovery('changegroup')
902 @pulldiscovery('changegroup')
901 def _pulldiscoverychangegroup(pullop):
903 def _pulldiscoverychangegroup(pullop):
902 """discovery phase for the pull
904 """discovery phase for the pull
903
905
904 Current handle changeset discovery only, will change handle all discovery
906 Current handle changeset discovery only, will change handle all discovery
905 at some point."""
907 at some point."""
906 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
908 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
907 pullop.remote,
909 pullop.remote,
908 heads=pullop.heads,
910 heads=pullop.heads,
909 force=pullop.force)
911 force=pullop.force)
910 pullop.common, pullop.fetch, pullop.rheads = tmp
912 pullop.common, pullop.fetch, pullop.rheads = tmp
911
913
912 def _pullbundle2(pullop):
914 def _pullbundle2(pullop):
913 """pull data using bundle2
915 """pull data using bundle2
914
916
915 For now, the only supported data are changegroup."""
917 For now, the only supported data are changegroup."""
916 remotecaps = bundle2.bundle2caps(pullop.remote)
918 remotecaps = bundle2.bundle2caps(pullop.remote)
917 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
919 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
918 # pulling changegroup
920 # pulling changegroup
919 pullop.stepsdone.add('changegroup')
921 pullop.stepsdone.add('changegroup')
920
922
921 kwargs['common'] = pullop.common
923 kwargs['common'] = pullop.common
922 kwargs['heads'] = pullop.heads or pullop.rheads
924 kwargs['heads'] = pullop.heads or pullop.rheads
923 kwargs['cg'] = pullop.fetch
925 kwargs['cg'] = pullop.fetch
924 if 'b2x:listkeys' in remotecaps:
926 if 'b2x:listkeys' in remotecaps:
925 kwargs['listkeys'] = ['phase', 'bookmarks']
927 kwargs['listkeys'] = ['phase', 'bookmarks']
926 if not pullop.fetch:
928 if not pullop.fetch:
927 pullop.repo.ui.status(_("no changes found\n"))
929 pullop.repo.ui.status(_("no changes found\n"))
928 pullop.cgresult = 0
930 pullop.cgresult = 0
929 else:
931 else:
930 if pullop.heads is None and list(pullop.common) == [nullid]:
932 if pullop.heads is None and list(pullop.common) == [nullid]:
931 pullop.repo.ui.status(_("requesting all changes\n"))
933 pullop.repo.ui.status(_("requesting all changes\n"))
932 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
934 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
933 remoteversions = bundle2.obsmarkersversion(remotecaps)
935 remoteversions = bundle2.obsmarkersversion(remotecaps)
934 if obsolete.commonversion(remoteversions) is not None:
936 if obsolete.commonversion(remoteversions) is not None:
935 kwargs['obsmarkers'] = True
937 kwargs['obsmarkers'] = True
936 pullop.stepsdone.add('obsmarkers')
938 pullop.stepsdone.add('obsmarkers')
937 _pullbundle2extraprepare(pullop, kwargs)
939 _pullbundle2extraprepare(pullop, kwargs)
938 if kwargs.keys() == ['format']:
940 if kwargs.keys() == ['format']:
939 return # nothing to pull
941 return # nothing to pull
940 bundle = pullop.remote.getbundle('pull', **kwargs)
942 bundle = pullop.remote.getbundle('pull', **kwargs)
941 try:
943 try:
942 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
944 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
943 except error.BundleValueError, exc:
945 except error.BundleValueError, exc:
944 raise util.Abort('missing support for %s' % exc)
946 raise util.Abort('missing support for %s' % exc)
945
947
946 if pullop.fetch:
948 if pullop.fetch:
947 changedheads = 0
949 changedheads = 0
948 pullop.cgresult = 1
950 pullop.cgresult = 1
949 for cg in op.records['changegroup']:
951 for cg in op.records['changegroup']:
950 ret = cg['return']
952 ret = cg['return']
951 # If any changegroup result is 0, return 0
953 # If any changegroup result is 0, return 0
952 if ret == 0:
954 if ret == 0:
953 pullop.cgresult = 0
955 pullop.cgresult = 0
954 break
956 break
955 if ret < -1:
957 if ret < -1:
956 changedheads += ret + 1
958 changedheads += ret + 1
957 elif ret > 1:
959 elif ret > 1:
958 changedheads += ret - 1
960 changedheads += ret - 1
959 if changedheads > 0:
961 if changedheads > 0:
960 pullop.cgresult = 1 + changedheads
962 pullop.cgresult = 1 + changedheads
961 elif changedheads < 0:
963 elif changedheads < 0:
962 pullop.cgresult = -1 + changedheads
964 pullop.cgresult = -1 + changedheads
963
965
964 # processing phases change
966 # processing phases change
965 for namespace, value in op.records['listkeys']:
967 for namespace, value in op.records['listkeys']:
966 if namespace == 'phases':
968 if namespace == 'phases':
967 _pullapplyphases(pullop, value)
969 _pullapplyphases(pullop, value)
968
970
969 # processing bookmark update
971 # processing bookmark update
970 for namespace, value in op.records['listkeys']:
972 for namespace, value in op.records['listkeys']:
971 if namespace == 'bookmarks':
973 if namespace == 'bookmarks':
972 pullop.remotebookmarks = value
974 pullop.remotebookmarks = value
973 _pullbookmarks(pullop)
975 _pullbookmarks(pullop)
974
976
975 def _pullbundle2extraprepare(pullop, kwargs):
977 def _pullbundle2extraprepare(pullop, kwargs):
976 """hook function so that extensions can extend the getbundle call"""
978 """hook function so that extensions can extend the getbundle call"""
977 pass
979 pass
978
980
979 def _pullchangeset(pullop):
981 def _pullchangeset(pullop):
980 """pull changeset from unbundle into the local repo"""
982 """pull changeset from unbundle into the local repo"""
981 # We delay the open of the transaction as late as possible so we
983 # We delay the open of the transaction as late as possible so we
982 # don't open transaction for nothing or you break future useful
984 # don't open transaction for nothing or you break future useful
983 # rollback call
985 # rollback call
984 if 'changegroup' in pullop.stepsdone:
986 if 'changegroup' in pullop.stepsdone:
985 return
987 return
986 pullop.stepsdone.add('changegroup')
988 pullop.stepsdone.add('changegroup')
987 if not pullop.fetch:
989 if not pullop.fetch:
988 pullop.repo.ui.status(_("no changes found\n"))
990 pullop.repo.ui.status(_("no changes found\n"))
989 pullop.cgresult = 0
991 pullop.cgresult = 0
990 return
992 return
991 pullop.gettransaction()
993 pullop.gettransaction()
992 if pullop.heads is None and list(pullop.common) == [nullid]:
994 if pullop.heads is None and list(pullop.common) == [nullid]:
993 pullop.repo.ui.status(_("requesting all changes\n"))
995 pullop.repo.ui.status(_("requesting all changes\n"))
994 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
996 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
995 # issue1320, avoid a race if remote changed after discovery
997 # issue1320, avoid a race if remote changed after discovery
996 pullop.heads = pullop.rheads
998 pullop.heads = pullop.rheads
997
999
998 if pullop.remote.capable('getbundle'):
1000 if pullop.remote.capable('getbundle'):
999 # TODO: get bundlecaps from remote
1001 # TODO: get bundlecaps from remote
1000 cg = pullop.remote.getbundle('pull', common=pullop.common,
1002 cg = pullop.remote.getbundle('pull', common=pullop.common,
1001 heads=pullop.heads or pullop.rheads)
1003 heads=pullop.heads or pullop.rheads)
1002 elif pullop.heads is None:
1004 elif pullop.heads is None:
1003 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1005 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1004 elif not pullop.remote.capable('changegroupsubset'):
1006 elif not pullop.remote.capable('changegroupsubset'):
1005 raise util.Abort(_("partial pull cannot be done because "
1007 raise util.Abort(_("partial pull cannot be done because "
1006 "other repository doesn't support "
1008 "other repository doesn't support "
1007 "changegroupsubset."))
1009 "changegroupsubset."))
1008 else:
1010 else:
1009 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1011 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1010 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1012 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
1011 pullop.remote.url())
1013 pullop.remote.url())
1012
1014
1013 def _pullphase(pullop):
1015 def _pullphase(pullop):
1014 # Get remote phases data from remote
1016 # Get remote phases data from remote
1015 if 'phases' in pullop.stepsdone:
1017 if 'phases' in pullop.stepsdone:
1016 return
1018 return
1017 remotephases = pullop.remote.listkeys('phases')
1019 remotephases = pullop.remote.listkeys('phases')
1018 _pullapplyphases(pullop, remotephases)
1020 _pullapplyphases(pullop, remotephases)
1019
1021
1020 def _pullapplyphases(pullop, remotephases):
1022 def _pullapplyphases(pullop, remotephases):
1021 """apply phase movement from observed remote state"""
1023 """apply phase movement from observed remote state"""
1022 if 'phases' in pullop.stepsdone:
1024 if 'phases' in pullop.stepsdone:
1023 return
1025 return
1024 pullop.stepsdone.add('phases')
1026 pullop.stepsdone.add('phases')
1025 publishing = bool(remotephases.get('publishing', False))
1027 publishing = bool(remotephases.get('publishing', False))
1026 if remotephases and not publishing:
1028 if remotephases and not publishing:
1027 # remote is new and unpublishing
1029 # remote is new and unpublishing
1028 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1030 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1029 pullop.pulledsubset,
1031 pullop.pulledsubset,
1030 remotephases)
1032 remotephases)
1031 dheads = pullop.pulledsubset
1033 dheads = pullop.pulledsubset
1032 else:
1034 else:
1033 # Remote is old or publishing all common changesets
1035 # Remote is old or publishing all common changesets
1034 # should be seen as public
1036 # should be seen as public
1035 pheads = pullop.pulledsubset
1037 pheads = pullop.pulledsubset
1036 dheads = []
1038 dheads = []
1037 unfi = pullop.repo.unfiltered()
1039 unfi = pullop.repo.unfiltered()
1038 phase = unfi._phasecache.phase
1040 phase = unfi._phasecache.phase
1039 rev = unfi.changelog.nodemap.get
1041 rev = unfi.changelog.nodemap.get
1040 public = phases.public
1042 public = phases.public
1041 draft = phases.draft
1043 draft = phases.draft
1042
1044
1043 # exclude changesets already public locally and update the others
1045 # exclude changesets already public locally and update the others
1044 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1046 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1045 if pheads:
1047 if pheads:
1046 tr = pullop.gettransaction()
1048 tr = pullop.gettransaction()
1047 phases.advanceboundary(pullop.repo, tr, public, pheads)
1049 phases.advanceboundary(pullop.repo, tr, public, pheads)
1048
1050
1049 # exclude changesets already draft locally and update the others
1051 # exclude changesets already draft locally and update the others
1050 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1052 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1051 if dheads:
1053 if dheads:
1052 tr = pullop.gettransaction()
1054 tr = pullop.gettransaction()
1053 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1055 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1054
1056
1055 def _pullbookmarks(pullop):
1057 def _pullbookmarks(pullop):
1056 """process the remote bookmark information to update the local one"""
1058 """process the remote bookmark information to update the local one"""
1057 if 'bookmarks' in pullop.stepsdone:
1059 if 'bookmarks' in pullop.stepsdone:
1058 return
1060 return
1059 pullop.stepsdone.add('bookmarks')
1061 pullop.stepsdone.add('bookmarks')
1060 repo = pullop.repo
1062 repo = pullop.repo
1061 remotebookmarks = pullop.remotebookmarks
1063 remotebookmarks = pullop.remotebookmarks
1062 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1064 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1063 pullop.remote.url(),
1065 pullop.remote.url(),
1064 pullop.gettransaction,
1066 pullop.gettransaction,
1065 explicit=pullop.explicitbookmarks)
1067 explicit=pullop.explicitbookmarks)
1066
1068
1067 def _pullobsolete(pullop):
1069 def _pullobsolete(pullop):
1068 """utility function to pull obsolete markers from a remote
1070 """utility function to pull obsolete markers from a remote
1069
1071
1070 The `gettransaction` is function that return the pull transaction, creating
1072 The `gettransaction` is function that return the pull transaction, creating
1071 one if necessary. We return the transaction to inform the calling code that
1073 one if necessary. We return the transaction to inform the calling code that
1072 a new transaction have been created (when applicable).
1074 a new transaction have been created (when applicable).
1073
1075
1074 Exists mostly to allow overriding for experimentation purpose"""
1076 Exists mostly to allow overriding for experimentation purpose"""
1075 if 'obsmarkers' in pullop.stepsdone:
1077 if 'obsmarkers' in pullop.stepsdone:
1076 return
1078 return
1077 pullop.stepsdone.add('obsmarkers')
1079 pullop.stepsdone.add('obsmarkers')
1078 tr = None
1080 tr = None
1079 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1081 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1080 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1082 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1081 remoteobs = pullop.remote.listkeys('obsolete')
1083 remoteobs = pullop.remote.listkeys('obsolete')
1082 if 'dump0' in remoteobs:
1084 if 'dump0' in remoteobs:
1083 tr = pullop.gettransaction()
1085 tr = pullop.gettransaction()
1084 for key in sorted(remoteobs, reverse=True):
1086 for key in sorted(remoteobs, reverse=True):
1085 if key.startswith('dump'):
1087 if key.startswith('dump'):
1086 data = base85.b85decode(remoteobs[key])
1088 data = base85.b85decode(remoteobs[key])
1087 pullop.repo.obsstore.mergemarkers(tr, data)
1089 pullop.repo.obsstore.mergemarkers(tr, data)
1088 pullop.repo.invalidatevolatilesets()
1090 pullop.repo.invalidatevolatilesets()
1089 return tr
1091 return tr
1090
1092
1091 def caps20to10(repo):
1093 def caps20to10(repo):
1092 """return a set with appropriate options to use bundle20 during getbundle"""
1094 """return a set with appropriate options to use bundle20 during getbundle"""
1093 caps = set(['HG2X'])
1095 caps = set(['HG2X'])
1094 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1096 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1095 caps.add('bundle2=' + urllib.quote(capsblob))
1097 caps.add('bundle2=' + urllib.quote(capsblob))
1096 return caps
1098 return caps
1097
1099
1098 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1100 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1099 getbundle2partsorder = []
1101 getbundle2partsorder = []
1100
1102
1101 # Mapping between step name and function
1103 # Mapping between step name and function
1102 #
1104 #
1103 # This exists to help extensions wrap steps if necessary
1105 # This exists to help extensions wrap steps if necessary
1104 getbundle2partsmapping = {}
1106 getbundle2partsmapping = {}
1105
1107
1106 def getbundle2partsgenerator(stepname):
1108 def getbundle2partsgenerator(stepname):
1107 """decorator for function generating bundle2 part for getbundle
1109 """decorator for function generating bundle2 part for getbundle
1108
1110
1109 The function is added to the step -> function mapping and appended to the
1111 The function is added to the step -> function mapping and appended to the
1110 list of steps. Beware that decorated functions will be added in order
1112 list of steps. Beware that decorated functions will be added in order
1111 (this may matter).
1113 (this may matter).
1112
1114
1113 You can only use this decorator for new steps, if you want to wrap a step
1115 You can only use this decorator for new steps, if you want to wrap a step
1114 from an extension, attack the getbundle2partsmapping dictionary directly."""
1116 from an extension, attack the getbundle2partsmapping dictionary directly."""
1115 def dec(func):
1117 def dec(func):
1116 assert stepname not in getbundle2partsmapping
1118 assert stepname not in getbundle2partsmapping
1117 getbundle2partsmapping[stepname] = func
1119 getbundle2partsmapping[stepname] = func
1118 getbundle2partsorder.append(stepname)
1120 getbundle2partsorder.append(stepname)
1119 return func
1121 return func
1120 return dec
1122 return dec
1121
1123
1122 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1124 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1123 **kwargs):
1125 **kwargs):
1124 """return a full bundle (with potentially multiple kind of parts)
1126 """return a full bundle (with potentially multiple kind of parts)
1125
1127
1126 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
1128 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
1127 passed. For now, the bundle can contain only changegroup, but this will
1129 passed. For now, the bundle can contain only changegroup, but this will
1128 changes when more part type will be available for bundle2.
1130 changes when more part type will be available for bundle2.
1129
1131
1130 This is different from changegroup.getchangegroup that only returns an HG10
1132 This is different from changegroup.getchangegroup that only returns an HG10
1131 changegroup bundle. They may eventually get reunited in the future when we
1133 changegroup bundle. They may eventually get reunited in the future when we
1132 have a clearer idea of the API we what to query different data.
1134 have a clearer idea of the API we what to query different data.
1133
1135
1134 The implementation is at a very early stage and will get massive rework
1136 The implementation is at a very early stage and will get massive rework
1135 when the API of bundle is refined.
1137 when the API of bundle is refined.
1136 """
1138 """
1137 # bundle10 case
1139 # bundle10 case
1138 if bundlecaps is None or 'HG2X' not in bundlecaps:
1140 if bundlecaps is None or 'HG2X' not in bundlecaps:
1139 if bundlecaps and not kwargs.get('cg', True):
1141 if bundlecaps and not kwargs.get('cg', True):
1140 raise ValueError(_('request for bundle10 must include changegroup'))
1142 raise ValueError(_('request for bundle10 must include changegroup'))
1141
1143
1142 if kwargs:
1144 if kwargs:
1143 raise ValueError(_('unsupported getbundle arguments: %s')
1145 raise ValueError(_('unsupported getbundle arguments: %s')
1144 % ', '.join(sorted(kwargs.keys())))
1146 % ', '.join(sorted(kwargs.keys())))
1145 return changegroup.getchangegroup(repo, source, heads=heads,
1147 return changegroup.getchangegroup(repo, source, heads=heads,
1146 common=common, bundlecaps=bundlecaps)
1148 common=common, bundlecaps=bundlecaps)
1147
1149
1148 # bundle20 case
1150 # bundle20 case
1149 b2caps = {}
1151 b2caps = {}
1150 for bcaps in bundlecaps:
1152 for bcaps in bundlecaps:
1151 if bcaps.startswith('bundle2='):
1153 if bcaps.startswith('bundle2='):
1152 blob = urllib.unquote(bcaps[len('bundle2='):])
1154 blob = urllib.unquote(bcaps[len('bundle2='):])
1153 b2caps.update(bundle2.decodecaps(blob))
1155 b2caps.update(bundle2.decodecaps(blob))
1154 bundler = bundle2.bundle20(repo.ui, b2caps)
1156 bundler = bundle2.bundle20(repo.ui, b2caps)
1155
1157
1156 for name in getbundle2partsorder:
1158 for name in getbundle2partsorder:
1157 func = getbundle2partsmapping[name]
1159 func = getbundle2partsmapping[name]
1158 kwargs['heads'] = heads
1160 kwargs['heads'] = heads
1159 kwargs['common'] = common
1161 kwargs['common'] = common
1160 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1162 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1161 **kwargs)
1163 **kwargs)
1162
1164
1163 return util.chunkbuffer(bundler.getchunks())
1165 return util.chunkbuffer(bundler.getchunks())
1164
1166
1165 @getbundle2partsgenerator('changegroup')
1167 @getbundle2partsgenerator('changegroup')
1166 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1168 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1167 b2caps=None, heads=None, common=None, **kwargs):
1169 b2caps=None, heads=None, common=None, **kwargs):
1168 """add a changegroup part to the requested bundle"""
1170 """add a changegroup part to the requested bundle"""
1169 cg = None
1171 cg = None
1170 if kwargs.get('cg', True):
1172 if kwargs.get('cg', True):
1171 # build changegroup bundle here.
1173 # build changegroup bundle here.
1172 cg = changegroup.getchangegroup(repo, source, heads=heads,
1174 cg = changegroup.getchangegroup(repo, source, heads=heads,
1173 common=common, bundlecaps=bundlecaps)
1175 common=common, bundlecaps=bundlecaps)
1174
1176
1175 if cg:
1177 if cg:
1176 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1178 bundler.newpart('b2x:changegroup', data=cg.getchunks())
1177
1179
1178 @getbundle2partsgenerator('listkeys')
1180 @getbundle2partsgenerator('listkeys')
1179 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1181 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1180 b2caps=None, **kwargs):
1182 b2caps=None, **kwargs):
1181 """add parts containing listkeys namespaces to the requested bundle"""
1183 """add parts containing listkeys namespaces to the requested bundle"""
1182 listkeys = kwargs.get('listkeys', ())
1184 listkeys = kwargs.get('listkeys', ())
1183 for namespace in listkeys:
1185 for namespace in listkeys:
1184 part = bundler.newpart('b2x:listkeys')
1186 part = bundler.newpart('b2x:listkeys')
1185 part.addparam('namespace', namespace)
1187 part.addparam('namespace', namespace)
1186 keys = repo.listkeys(namespace).items()
1188 keys = repo.listkeys(namespace).items()
1187 part.data = pushkey.encodekeys(keys)
1189 part.data = pushkey.encodekeys(keys)
1188
1190
1189 @getbundle2partsgenerator('obsmarkers')
1191 @getbundle2partsgenerator('obsmarkers')
1190 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1192 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1191 b2caps=None, heads=None, **kwargs):
1193 b2caps=None, heads=None, **kwargs):
1192 """add an obsolescence markers part to the requested bundle"""
1194 """add an obsolescence markers part to the requested bundle"""
1193 if kwargs.get('obsmarkers', False):
1195 if kwargs.get('obsmarkers', False):
1194 if heads is None:
1196 if heads is None:
1195 heads = repo.heads()
1197 heads = repo.heads()
1196 subset = [c.node() for c in repo.set('::%ln', heads)]
1198 subset = [c.node() for c in repo.set('::%ln', heads)]
1197 markers = repo.obsstore.relevantmarkers(subset)
1199 markers = repo.obsstore.relevantmarkers(subset)
1198 buildobsmarkerspart(bundler, markers)
1200 buildobsmarkerspart(bundler, markers)
1199
1201
1200 @getbundle2partsgenerator('extra')
1202 @getbundle2partsgenerator('extra')
1201 def _getbundleextrapart(bundler, repo, source, bundlecaps=None,
1203 def _getbundleextrapart(bundler, repo, source, bundlecaps=None,
1202 b2caps=None, **kwargs):
1204 b2caps=None, **kwargs):
1203 """hook function to let extensions add parts to the requested bundle"""
1205 """hook function to let extensions add parts to the requested bundle"""
1204 pass
1206 pass
1205
1207
1206 def check_heads(repo, their_heads, context):
1208 def check_heads(repo, their_heads, context):
1207 """check if the heads of a repo have been modified
1209 """check if the heads of a repo have been modified
1208
1210
1209 Used by peer for unbundling.
1211 Used by peer for unbundling.
1210 """
1212 """
1211 heads = repo.heads()
1213 heads = repo.heads()
1212 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1214 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1213 if not (their_heads == ['force'] or their_heads == heads or
1215 if not (their_heads == ['force'] or their_heads == heads or
1214 their_heads == ['hashed', heads_hash]):
1216 their_heads == ['hashed', heads_hash]):
1215 # someone else committed/pushed/unbundled while we
1217 # someone else committed/pushed/unbundled while we
1216 # were transferring data
1218 # were transferring data
1217 raise error.PushRaced('repository changed while %s - '
1219 raise error.PushRaced('repository changed while %s - '
1218 'please try again' % context)
1220 'please try again' % context)
1219
1221
1220 def unbundle(repo, cg, heads, source, url):
1222 def unbundle(repo, cg, heads, source, url):
1221 """Apply a bundle to a repo.
1223 """Apply a bundle to a repo.
1222
1224
1223 this function makes sure the repo is locked during the application and have
1225 this function makes sure the repo is locked during the application and have
1224 mechanism to check that no push race occurred between the creation of the
1226 mechanism to check that no push race occurred between the creation of the
1225 bundle and its application.
1227 bundle and its application.
1226
1228
1227 If the push was raced as PushRaced exception is raised."""
1229 If the push was raced as PushRaced exception is raised."""
1228 r = 0
1230 r = 0
1229 # need a transaction when processing a bundle2 stream
1231 # need a transaction when processing a bundle2 stream
1230 tr = None
1232 tr = None
1231 lock = repo.lock()
1233 lock = repo.lock()
1232 try:
1234 try:
1233 check_heads(repo, heads, 'uploading changes')
1235 check_heads(repo, heads, 'uploading changes')
1234 # push can proceed
1236 # push can proceed
1235 if util.safehasattr(cg, 'params'):
1237 if util.safehasattr(cg, 'params'):
1236 try:
1238 try:
1237 tr = repo.transaction('unbundle')
1239 tr = repo.transaction('unbundle')
1238 tr.hookargs['source'] = source
1240 tr.hookargs['source'] = source
1239 tr.hookargs['url'] = url
1241 tr.hookargs['url'] = url
1240 tr.hookargs['bundle2-exp'] = '1'
1242 tr.hookargs['bundle2-exp'] = '1'
1241 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1243 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1242 cl = repo.unfiltered().changelog
1244 cl = repo.unfiltered().changelog
1243 p = cl.writepending() and repo.root or ""
1245 p = cl.writepending() and repo.root or ""
1244 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
1246 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
1245 **tr.hookargs)
1247 **tr.hookargs)
1246 tr.close()
1248 tr.close()
1247 repo.hook('b2x-transactionclose', **tr.hookargs)
1249 repo.hook('b2x-transactionclose', **tr.hookargs)
1248 except Exception, exc:
1250 except Exception, exc:
1249 exc.duringunbundle2 = True
1251 exc.duringunbundle2 = True
1250 raise
1252 raise
1251 else:
1253 else:
1252 r = changegroup.addchangegroup(repo, cg, source, url)
1254 r = changegroup.addchangegroup(repo, cg, source, url)
1253 finally:
1255 finally:
1254 if tr is not None:
1256 if tr is not None:
1255 tr.release()
1257 tr.release()
1256 lock.release()
1258 lock.release()
1257 return r
1259 return r
@@ -1,478 +1,478 b''
1 Test exchange of common information using bundle2
1 Test exchange of common information using bundle2
2
2
3
3
4 $ getmainid() {
4 $ getmainid() {
5 > hg -R main log --template '{node}\n' --rev "$1"
5 > hg -R main log --template '{node}\n' --rev "$1"
6 > }
6 > }
7
7
8 enable obsolescence
8 enable obsolescence
9
9
10 $ cat >> $HGRCPATH << EOF
10 $ cat >> $HGRCPATH << EOF
11 > [experimental]
11 > [experimental]
12 > evolution=createmarkers,exchange
12 > evolution=createmarkers,exchange
13 > bundle2-exp=True
13 > bundle2-exp=True
14 > [ui]
14 > [ui]
15 > ssh=python "$TESTDIR/dummyssh"
15 > ssh=python "$TESTDIR/dummyssh"
16 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
16 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
17 > [web]
17 > [web]
18 > push_ssl = false
18 > push_ssl = false
19 > allow_push = *
19 > allow_push = *
20 > [phases]
20 > [phases]
21 > publish=False
21 > publish=False
22 > [hooks]
22 > [hooks]
23 > changegroup = sh -c "HG_LOCAL= python \"$TESTDIR/printenv.py\" changegroup"
23 > changegroup = sh -c "HG_LOCAL= python \"$TESTDIR/printenv.py\" changegroup"
24 > b2x-transactionclose = sh -c "HG_LOCAL= python \"$TESTDIR/printenv.py\" b2x-transactionclose"
24 > b2x-transactionclose = sh -c "HG_LOCAL= python \"$TESTDIR/printenv.py\" b2x-transactionclose"
25 > EOF
25 > EOF
26
26
27 The extension requires a repo (currently unused)
27 The extension requires a repo (currently unused)
28
28
29 $ hg init main
29 $ hg init main
30 $ cd main
30 $ cd main
31 $ touch a
31 $ touch a
32 $ hg add a
32 $ hg add a
33 $ hg commit -m 'a'
33 $ hg commit -m 'a'
34
34
35 $ hg unbundle $TESTDIR/bundles/rebase.hg
35 $ hg unbundle $TESTDIR/bundles/rebase.hg
36 adding changesets
36 adding changesets
37 adding manifests
37 adding manifests
38 adding file changes
38 adding file changes
39 added 8 changesets with 7 changes to 7 files (+3 heads)
39 added 8 changesets with 7 changes to 7 files (+3 heads)
40 changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=unbundle HG_URL=bundle:*/rebase.hg (glob)
40 changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=unbundle HG_URL=bundle:*/rebase.hg (glob)
41 (run 'hg heads' to see heads, 'hg merge' to merge)
41 (run 'hg heads' to see heads, 'hg merge' to merge)
42
42
43 $ cd ..
43 $ cd ..
44
44
45 Real world exchange
45 Real world exchange
46 =====================
46 =====================
47
47
48 Add more obsolescence information
48 Add more obsolescence information
49
49
50 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
50 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
51 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
51 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
52
52
53 clone --pull
53 clone --pull
54
54
55 $ hg -R main phase --public cd010b8cd998
55 $ hg -R main phase --public cd010b8cd998
56 $ hg clone main other --pull --rev 9520eea781bc
56 $ hg clone main other --pull --rev 9520eea781bc
57 adding changesets
57 adding changesets
58 adding manifests
58 adding manifests
59 adding file changes
59 adding file changes
60 added 2 changesets with 2 changes to 2 files
60 added 2 changesets with 2 changes to 2 files
61 1 new obsolescence markers
61 1 new obsolescence markers
62 changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=bundle2 HG_URL=bundle2
62 changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
63 updating to branch default
63 updating to branch default
64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
64 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
65 $ hg -R other log -G
65 $ hg -R other log -G
66 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
66 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
67 |
67 |
68 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
68 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
69
69
70 $ hg -R other debugobsolete
70 $ hg -R other debugobsolete
71 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
71 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
72
72
73 pull
73 pull
74
74
75 $ hg -R main phase --public 9520eea781bc
75 $ hg -R main phase --public 9520eea781bc
76 $ hg -R other pull -r 24b6387c8c8c
76 $ hg -R other pull -r 24b6387c8c8c
77 pulling from $TESTTMP/main (glob)
77 pulling from $TESTTMP/main (glob)
78 searching for changes
78 searching for changes
79 adding changesets
79 adding changesets
80 adding manifests
80 adding manifests
81 adding file changes
81 adding file changes
82 added 1 changesets with 1 changes to 1 files (+1 heads)
82 added 1 changesets with 1 changes to 1 files (+1 heads)
83 1 new obsolescence markers
83 1 new obsolescence markers
84 changegroup hook: HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_SOURCE=bundle2 HG_URL=bundle2
84 changegroup hook: HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
85 (run 'hg heads' to see heads, 'hg merge' to merge)
85 (run 'hg heads' to see heads, 'hg merge' to merge)
86 $ hg -R other log -G
86 $ hg -R other log -G
87 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
87 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
88 |
88 |
89 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
89 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
90 |/
90 |/
91 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
91 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
92
92
93 $ hg -R other debugobsolete
93 $ hg -R other debugobsolete
94 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
94 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
95 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
95 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
96
96
97 pull empty (with phase movement)
97 pull empty (with phase movement)
98
98
99 $ hg -R main phase --public 24b6387c8c8c
99 $ hg -R main phase --public 24b6387c8c8c
100 $ hg -R other pull -r 24b6387c8c8c
100 $ hg -R other pull -r 24b6387c8c8c
101 pulling from $TESTTMP/main (glob)
101 pulling from $TESTTMP/main (glob)
102 no changes found
102 no changes found
103 $ hg -R other log -G
103 $ hg -R other log -G
104 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
104 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
105 |
105 |
106 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
106 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
107 |/
107 |/
108 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
108 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
109
109
110 $ hg -R other debugobsolete
110 $ hg -R other debugobsolete
111 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
111 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
112 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
112 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
113
113
114 pull empty
114 pull empty
115
115
116 $ hg -R other pull -r 24b6387c8c8c
116 $ hg -R other pull -r 24b6387c8c8c
117 pulling from $TESTTMP/main (glob)
117 pulling from $TESTTMP/main (glob)
118 no changes found
118 no changes found
119 $ hg -R other log -G
119 $ hg -R other log -G
120 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
120 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
121 |
121 |
122 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
122 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
123 |/
123 |/
124 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
124 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
125
125
126 $ hg -R other debugobsolete
126 $ hg -R other debugobsolete
127 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
129
129
130 add extra data to test their exchange during push
130 add extra data to test their exchange during push
131
131
132 $ hg -R main bookmark --rev eea13746799a book_eea1
132 $ hg -R main bookmark --rev eea13746799a book_eea1
133 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
133 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
134 $ hg -R main bookmark --rev 02de42196ebe book_02de
134 $ hg -R main bookmark --rev 02de42196ebe book_02de
135 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
135 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
136 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
136 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
137 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
137 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
138 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
138 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
139 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
139 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
140 $ hg -R main bookmark --rev 32af7686d403 book_32af
140 $ hg -R main bookmark --rev 32af7686d403 book_32af
141 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
141 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
142
142
143 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
143 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
144 $ hg -R other bookmark --rev cd010b8cd998 book_02de
144 $ hg -R other bookmark --rev cd010b8cd998 book_02de
145 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
145 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
146 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
146 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
147 $ hg -R other bookmark --rev cd010b8cd998 book_32af
147 $ hg -R other bookmark --rev cd010b8cd998 book_32af
148
148
149 $ hg -R main phase --public eea13746799a
149 $ hg -R main phase --public eea13746799a
150
150
151 push
151 push
152 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
152 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
153 pushing to other
153 pushing to other
154 searching for changes
154 searching for changes
155 b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_URL=push
155 b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_URL=push
156 changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_SOURCE=push HG_URL=push
156 changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_SOURCE=push HG_URL=push
157 remote: adding changesets
157 remote: adding changesets
158 remote: adding manifests
158 remote: adding manifests
159 remote: adding file changes
159 remote: adding file changes
160 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
160 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
161 remote: 1 new obsolescence markers
161 remote: 1 new obsolescence markers
162 updating bookmark book_eea1
162 updating bookmark book_eea1
163 $ hg -R other log -G
163 $ hg -R other log -G
164 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
164 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
165 |\
165 |\
166 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
166 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
167 | |
167 | |
168 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
168 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
169 |/
169 |/
170 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
170 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
171
171
172 $ hg -R other debugobsolete
172 $ hg -R other debugobsolete
173 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
173 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
174 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
174 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
175 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
175 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
176
176
177 pull over ssh
177 pull over ssh
178
178
179 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
179 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
180 pulling from ssh://user@dummy/main
180 pulling from ssh://user@dummy/main
181 searching for changes
181 searching for changes
182 adding changesets
182 adding changesets
183 adding manifests
183 adding manifests
184 adding file changes
184 adding file changes
185 added 1 changesets with 1 changes to 1 files (+1 heads)
185 added 1 changesets with 1 changes to 1 files (+1 heads)
186 1 new obsolescence markers
186 1 new obsolescence markers
187 updating bookmark book_02de
187 updating bookmark book_02de
188 changegroup hook: HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_SOURCE=bundle2 HG_URL=bundle2
188 changegroup hook: HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_SOURCE=pull HG_URL=ssh://user@dummy/main
189 (run 'hg heads' to see heads, 'hg merge' to merge)
189 (run 'hg heads' to see heads, 'hg merge' to merge)
190 $ hg -R other debugobsolete
190 $ hg -R other debugobsolete
191 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
191 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
192 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
192 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
193 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
193 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
194 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
194 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
195
195
196 pull over http
196 pull over http
197
197
198 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
198 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
199 $ cat main.pid >> $DAEMON_PIDS
199 $ cat main.pid >> $DAEMON_PIDS
200
200
201 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
201 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
202 pulling from http://localhost:$HGPORT/
202 pulling from http://localhost:$HGPORT/
203 searching for changes
203 searching for changes
204 adding changesets
204 adding changesets
205 adding manifests
205 adding manifests
206 adding file changes
206 adding file changes
207 added 1 changesets with 1 changes to 1 files (+1 heads)
207 added 1 changesets with 1 changes to 1 files (+1 heads)
208 1 new obsolescence markers
208 1 new obsolescence markers
209 updating bookmark book_42cc
209 updating bookmark book_42cc
210 changegroup hook: HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_SOURCE=bundle2 HG_URL=bundle2
210 changegroup hook: HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/
211 (run 'hg heads .' to see heads, 'hg merge' to merge)
211 (run 'hg heads .' to see heads, 'hg merge' to merge)
212 $ cat main-error.log
212 $ cat main-error.log
213 $ hg -R other debugobsolete
213 $ hg -R other debugobsolete
214 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
214 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
215 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
215 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
216 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
216 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
217 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
217 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
218 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
218 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
219
219
220 push over ssh
220 push over ssh
221
221
222 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
222 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
223 pushing to ssh://user@dummy/other
223 pushing to ssh://user@dummy/other
224 searching for changes
224 searching for changes
225 remote: adding changesets
225 remote: adding changesets
226 remote: adding manifests
226 remote: adding manifests
227 remote: adding file changes
227 remote: adding file changes
228 remote: added 1 changesets with 1 changes to 1 files
228 remote: added 1 changesets with 1 changes to 1 files
229 remote: 1 new obsolescence markers
229 remote: 1 new obsolescence markers
230 updating bookmark book_5fdd
230 updating bookmark book_5fdd
231 remote: b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
231 remote: b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
232 remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
232 remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
233 $ hg -R other log -G
233 $ hg -R other log -G
234 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
234 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
235 |
235 |
236 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
236 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
237 |
237 |
238 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
238 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
239 | |
239 | |
240 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
240 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
241 | |/|
241 | |/|
242 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
242 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
243 |/ /
243 |/ /
244 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
244 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
245 |/
245 |/
246 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
246 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
247
247
248 $ hg -R other debugobsolete
248 $ hg -R other debugobsolete
249 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
249 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
250 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
250 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
251 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
251 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
252 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
252 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
253 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
253 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
254 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
254 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
255
255
256 push over http
256 push over http
257
257
258 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
258 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
259 $ cat other.pid >> $DAEMON_PIDS
259 $ cat other.pid >> $DAEMON_PIDS
260
260
261 $ hg -R main phase --public 32af7686d403
261 $ hg -R main phase --public 32af7686d403
262 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
262 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
263 pushing to http://localhost:$HGPORT2/
263 pushing to http://localhost:$HGPORT2/
264 searching for changes
264 searching for changes
265 remote: adding changesets
265 remote: adding changesets
266 remote: adding manifests
266 remote: adding manifests
267 remote: adding file changes
267 remote: adding file changes
268 remote: added 1 changesets with 1 changes to 1 files
268 remote: added 1 changesets with 1 changes to 1 files
269 remote: 1 new obsolescence markers
269 remote: 1 new obsolescence markers
270 updating bookmark book_32af
270 updating bookmark book_32af
271 $ cat other-error.log
271 $ cat other-error.log
272
272
273 Check final content.
273 Check final content.
274
274
275 $ hg -R other log -G
275 $ hg -R other log -G
276 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
276 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
277 |
277 |
278 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
278 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
279 |
279 |
280 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
280 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
281 |
281 |
282 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
282 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
283 | |
283 | |
284 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
284 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
285 | |/|
285 | |/|
286 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
286 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
287 |/ /
287 |/ /
288 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
288 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
289 |/
289 |/
290 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
290 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
291
291
292 $ hg -R other debugobsolete
292 $ hg -R other debugobsolete
293 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
293 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
294 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
294 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
295 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
295 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
300
300
301 Error Handling
301 Error Handling
302 ==============
302 ==============
303
303
304 Check that errors are properly returned to the client during push.
304 Check that errors are properly returned to the client during push.
305
305
306 Setting up
306 Setting up
307
307
308 $ cat > failpush.py << EOF
308 $ cat > failpush.py << EOF
309 > """A small extension that makes push fails when using bundle2
309 > """A small extension that makes push fails when using bundle2
310 >
310 >
311 > used to test error handling in bundle2
311 > used to test error handling in bundle2
312 > """
312 > """
313 >
313 >
314 > from mercurial import util
314 > from mercurial import util
315 > from mercurial import bundle2
315 > from mercurial import bundle2
316 > from mercurial import exchange
316 > from mercurial import exchange
317 > from mercurial import extensions
317 > from mercurial import extensions
318 >
318 >
319 > def _pushbundle2failpart(pushop, bundler):
319 > def _pushbundle2failpart(pushop, bundler):
320 > reason = pushop.ui.config('failpush', 'reason', None)
320 > reason = pushop.ui.config('failpush', 'reason', None)
321 > part = None
321 > part = None
322 > if reason == 'abort':
322 > if reason == 'abort':
323 > bundler.newpart('test:abort')
323 > bundler.newpart('test:abort')
324 > if reason == 'unknown':
324 > if reason == 'unknown':
325 > bundler.newpart('TEST:UNKNOWN')
325 > bundler.newpart('TEST:UNKNOWN')
326 > if reason == 'race':
326 > if reason == 'race':
327 > # 20 Bytes of crap
327 > # 20 Bytes of crap
328 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
328 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
329 >
329 >
330 > @bundle2.parthandler("test:abort")
330 > @bundle2.parthandler("test:abort")
331 > def handleabort(op, part):
331 > def handleabort(op, part):
332 > raise util.Abort('Abandon ship!', hint="don't panic")
332 > raise util.Abort('Abandon ship!', hint="don't panic")
333 >
333 >
334 > def uisetup(ui):
334 > def uisetup(ui):
335 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
335 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
336 > exchange.b2partsgenorder.insert(0, 'failpart')
336 > exchange.b2partsgenorder.insert(0, 'failpart')
337 >
337 >
338 > EOF
338 > EOF
339
339
340 $ cd main
340 $ cd main
341 $ hg up tip
341 $ hg up tip
342 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
342 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
343 $ echo 'I' > I
343 $ echo 'I' > I
344 $ hg add I
344 $ hg add I
345 $ hg ci -m 'I'
345 $ hg ci -m 'I'
346 $ hg id
346 $ hg id
347 e7ec4e813ba6 tip
347 e7ec4e813ba6 tip
348 $ cd ..
348 $ cd ..
349
349
350 $ cat << EOF >> $HGRCPATH
350 $ cat << EOF >> $HGRCPATH
351 > [extensions]
351 > [extensions]
352 > failpush=$TESTTMP/failpush.py
352 > failpush=$TESTTMP/failpush.py
353 > EOF
353 > EOF
354
354
355 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
355 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
356 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
356 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
357 $ cat other.pid >> $DAEMON_PIDS
357 $ cat other.pid >> $DAEMON_PIDS
358
358
359 Doing the actual push: Abort error
359 Doing the actual push: Abort error
360
360
361 $ cat << EOF >> $HGRCPATH
361 $ cat << EOF >> $HGRCPATH
362 > [failpush]
362 > [failpush]
363 > reason = abort
363 > reason = abort
364 > EOF
364 > EOF
365
365
366 $ hg -R main push other -r e7ec4e813ba6
366 $ hg -R main push other -r e7ec4e813ba6
367 pushing to other
367 pushing to other
368 searching for changes
368 searching for changes
369 abort: Abandon ship!
369 abort: Abandon ship!
370 (don't panic)
370 (don't panic)
371 [255]
371 [255]
372
372
373 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
373 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
374 pushing to ssh://user@dummy/other
374 pushing to ssh://user@dummy/other
375 searching for changes
375 searching for changes
376 abort: Abandon ship!
376 abort: Abandon ship!
377 (don't panic)
377 (don't panic)
378 [255]
378 [255]
379
379
380 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
380 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
381 pushing to http://localhost:$HGPORT2/
381 pushing to http://localhost:$HGPORT2/
382 searching for changes
382 searching for changes
383 abort: Abandon ship!
383 abort: Abandon ship!
384 (don't panic)
384 (don't panic)
385 [255]
385 [255]
386
386
387
387
388 Doing the actual push: unknown mandatory parts
388 Doing the actual push: unknown mandatory parts
389
389
390 $ cat << EOF >> $HGRCPATH
390 $ cat << EOF >> $HGRCPATH
391 > [failpush]
391 > [failpush]
392 > reason = unknown
392 > reason = unknown
393 > EOF
393 > EOF
394
394
395 $ hg -R main push other -r e7ec4e813ba6
395 $ hg -R main push other -r e7ec4e813ba6
396 pushing to other
396 pushing to other
397 searching for changes
397 searching for changes
398 abort: missing support for test:unknown
398 abort: missing support for test:unknown
399 [255]
399 [255]
400
400
401 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
401 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
402 pushing to ssh://user@dummy/other
402 pushing to ssh://user@dummy/other
403 searching for changes
403 searching for changes
404 abort: missing support for test:unknown
404 abort: missing support for test:unknown
405 [255]
405 [255]
406
406
407 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
407 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
408 pushing to http://localhost:$HGPORT2/
408 pushing to http://localhost:$HGPORT2/
409 searching for changes
409 searching for changes
410 abort: missing support for test:unknown
410 abort: missing support for test:unknown
411 [255]
411 [255]
412
412
413 Doing the actual push: race
413 Doing the actual push: race
414
414
415 $ cat << EOF >> $HGRCPATH
415 $ cat << EOF >> $HGRCPATH
416 > [failpush]
416 > [failpush]
417 > reason = race
417 > reason = race
418 > EOF
418 > EOF
419
419
420 $ hg -R main push other -r e7ec4e813ba6
420 $ hg -R main push other -r e7ec4e813ba6
421 pushing to other
421 pushing to other
422 searching for changes
422 searching for changes
423 abort: push failed:
423 abort: push failed:
424 'repository changed while pushing - please try again'
424 'repository changed while pushing - please try again'
425 [255]
425 [255]
426
426
427 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
427 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
428 pushing to ssh://user@dummy/other
428 pushing to ssh://user@dummy/other
429 searching for changes
429 searching for changes
430 abort: push failed:
430 abort: push failed:
431 'repository changed while pushing - please try again'
431 'repository changed while pushing - please try again'
432 [255]
432 [255]
433
433
434 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
434 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
435 pushing to http://localhost:$HGPORT2/
435 pushing to http://localhost:$HGPORT2/
436 searching for changes
436 searching for changes
437 abort: push failed:
437 abort: push failed:
438 'repository changed while pushing - please try again'
438 'repository changed while pushing - please try again'
439 [255]
439 [255]
440
440
441 Doing the actual push: hook abort
441 Doing the actual push: hook abort
442
442
443 $ cat << EOF >> $HGRCPATH
443 $ cat << EOF >> $HGRCPATH
444 > [failpush]
444 > [failpush]
445 > reason =
445 > reason =
446 > [hooks]
446 > [hooks]
447 > b2x-pretransactionclose.failpush = false
447 > b2x-pretransactionclose.failpush = false
448 > EOF
448 > EOF
449
449
450 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
450 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
451 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
451 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
452 $ cat other.pid >> $DAEMON_PIDS
452 $ cat other.pid >> $DAEMON_PIDS
453
453
454 $ hg -R main push other -r e7ec4e813ba6
454 $ hg -R main push other -r e7ec4e813ba6
455 pushing to other
455 pushing to other
456 searching for changes
456 searching for changes
457 transaction abort!
457 transaction abort!
458 rollback completed
458 rollback completed
459 changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=push HG_URL=push
459 changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=push HG_URL=push
460 abort: b2x-pretransactionclose.failpush hook exited with status 1
460 abort: b2x-pretransactionclose.failpush hook exited with status 1
461 [255]
461 [255]
462
462
463 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
463 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
464 pushing to ssh://user@dummy/other
464 pushing to ssh://user@dummy/other
465 searching for changes
465 searching for changes
466 abort: b2x-pretransactionclose.failpush hook exited with status 1
466 abort: b2x-pretransactionclose.failpush hook exited with status 1
467 remote: transaction abort!
467 remote: transaction abort!
468 remote: rollback completed
468 remote: rollback completed
469 remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
469 remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
470 [255]
470 [255]
471
471
472 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
472 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
473 pushing to http://localhost:$HGPORT2/
473 pushing to http://localhost:$HGPORT2/
474 searching for changes
474 searching for changes
475 abort: b2x-pretransactionclose.failpush hook exited with status 1
475 abort: b2x-pretransactionclose.failpush hook exited with status 1
476 [255]
476 [255]
477
477
478
478
@@ -1,261 +1,261 b''
1 Create an extension to test bundle2 with multiple changegroups
1 Create an extension to test bundle2 with multiple changegroups
2
2
3 $ cat > bundle2.py <<EOF
3 $ cat > bundle2.py <<EOF
4 > """
4 > """
5 > """
5 > """
6 > from mercurial import changegroup, exchange
6 > from mercurial import changegroup, exchange
7 >
7 >
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
9 > b2caps=None, heads=None, common=None,
9 > b2caps=None, heads=None, common=None,
10 > **kwargs):
10 > **kwargs):
11 > # Create two changegroups given the common changesets and heads for the
11 > # Create two changegroups given the common changesets and heads for the
12 > # changegroup part we are being requested. Use the parent of each head
12 > # changegroup part we are being requested. Use the parent of each head
13 > # in 'heads' as intermediate heads for the first changegroup.
13 > # in 'heads' as intermediate heads for the first changegroup.
14 > intermediates = [repo[r].p1().node() for r in heads]
14 > intermediates = [repo[r].p1().node() for r in heads]
15 > cg = changegroup.getchangegroup(repo, source, heads=intermediates,
15 > cg = changegroup.getchangegroup(repo, source, heads=intermediates,
16 > common=common, bundlecaps=bundlecaps)
16 > common=common, bundlecaps=bundlecaps)
17 > bundler.newpart('b2x:output', data='changegroup1')
17 > bundler.newpart('b2x:output', data='changegroup1')
18 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
18 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
19 > cg = changegroup.getchangegroup(repo, source, heads=heads,
19 > cg = changegroup.getchangegroup(repo, source, heads=heads,
20 > common=common + intermediates,
20 > common=common + intermediates,
21 > bundlecaps=bundlecaps)
21 > bundlecaps=bundlecaps)
22 > bundler.newpart('b2x:output', data='changegroup2')
22 > bundler.newpart('b2x:output', data='changegroup2')
23 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
23 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
24 >
24 >
25 > def _pull(repo, *args, **kwargs):
25 > def _pull(repo, *args, **kwargs):
26 > pullop = _orig_pull(repo, *args, **kwargs)
26 > pullop = _orig_pull(repo, *args, **kwargs)
27 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
27 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
28 > return pullop
28 > return pullop
29 >
29 >
30 > _orig_pull = exchange.pull
30 > _orig_pull = exchange.pull
31 > exchange.pull = _pull
31 > exchange.pull = _pull
32 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
32 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
33 > EOF
33 > EOF
34
34
35 $ cat >> $HGRCPATH << EOF
35 $ cat >> $HGRCPATH << EOF
36 > [experimental]
36 > [experimental]
37 > bundle2-exp=True
37 > bundle2-exp=True
38 > [ui]
38 > [ui]
39 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
39 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
40 > EOF
40 > EOF
41
41
42 Start with a simple repository with a single commit
42 Start with a simple repository with a single commit
43
43
44 $ hg init repo
44 $ hg init repo
45 $ cd repo
45 $ cd repo
46 $ cat > .hg/hgrc << EOF
46 $ cat > .hg/hgrc << EOF
47 > [extensions]
47 > [extensions]
48 > bundle2=$TESTTMP/bundle2.py
48 > bundle2=$TESTTMP/bundle2.py
49 > EOF
49 > EOF
50
50
51 $ echo A > A
51 $ echo A > A
52 $ hg commit -A -m A -q
52 $ hg commit -A -m A -q
53 $ cd ..
53 $ cd ..
54
54
55 Clone
55 Clone
56
56
57 $ hg clone -q repo clone
57 $ hg clone -q repo clone
58
58
59 Add two linear commits
59 Add two linear commits
60
60
61 $ cd repo
61 $ cd repo
62 $ echo B > B
62 $ echo B > B
63 $ hg commit -A -m B -q
63 $ hg commit -A -m B -q
64 $ echo C > C
64 $ echo C > C
65 $ hg commit -A -m C -q
65 $ hg commit -A -m C -q
66
66
67 $ cd ../clone
67 $ cd ../clone
68 $ cat >> .hg/hgrc <<EOF
68 $ cat >> .hg/hgrc <<EOF
69 > [hooks]
69 > [hooks]
70 > pretxnchangegroup = sh -c "python \"$TESTDIR/printenv.py\" pretxnchangegroup"
70 > pretxnchangegroup = sh -c "python \"$TESTDIR/printenv.py\" pretxnchangegroup"
71 > changegroup = sh -c "python \"$TESTDIR/printenv.py\" changegroup"
71 > changegroup = sh -c "python \"$TESTDIR/printenv.py\" changegroup"
72 > incoming = sh -c "python \"$TESTDIR/printenv.py\" incoming"
72 > incoming = sh -c "python \"$TESTDIR/printenv.py\" incoming"
73 > EOF
73 > EOF
74
74
75 Pull the new commits in the clone
75 Pull the new commits in the clone
76
76
77 $ hg pull
77 $ hg pull
78 pulling from $TESTTMP/repo (glob)
78 pulling from $TESTTMP/repo (glob)
79 searching for changes
79 searching for changes
80 remote: changegroup1
80 remote: changegroup1
81 adding changesets
81 adding changesets
82 adding manifests
82 adding manifests
83 adding file changes
83 adding file changes
84 added 1 changesets with 1 changes to 1 files
84 added 1 changesets with 1 changes to 1 files
85 pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=bundle2 HG_URL=bundle2
85 pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
86 remote: changegroup2
86 remote: changegroup2
87 adding changesets
87 adding changesets
88 adding manifests
88 adding manifests
89 adding file changes
89 adding file changes
90 added 1 changesets with 1 changes to 1 files
90 added 1 changesets with 1 changes to 1 files
91 pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
91 pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
92 changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=bundle2 HG_URL=bundle2
92 changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
93 incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=bundle2 HG_URL=bundle2
93 incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
94 changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
94 changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
95 incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
95 incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
96 pullop.cgresult is 1
96 pullop.cgresult is 1
97 (run 'hg update' to get a working copy)
97 (run 'hg update' to get a working copy)
98 $ hg update
98 $ hg update
99 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
99 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
100 $ hg log -G
100 $ hg log -G
101 @ 2:f838bfaca5c7 public test C
101 @ 2:f838bfaca5c7 public test C
102 |
102 |
103 o 1:27547f69f254 public test B
103 o 1:27547f69f254 public test B
104 |
104 |
105 o 0:4a2df7238c3b public test A
105 o 0:4a2df7238c3b public test A
106
106
107 Add more changesets with multiple heads to the original repository
107 Add more changesets with multiple heads to the original repository
108
108
109 $ cd ../repo
109 $ cd ../repo
110 $ echo D > D
110 $ echo D > D
111 $ hg commit -A -m D -q
111 $ hg commit -A -m D -q
112 $ hg up -r 1
112 $ hg up -r 1
113 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
113 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
114 $ echo E > E
114 $ echo E > E
115 $ hg commit -A -m E -q
115 $ hg commit -A -m E -q
116 $ echo F > F
116 $ echo F > F
117 $ hg commit -A -m F -q
117 $ hg commit -A -m F -q
118 $ hg up -r 1
118 $ hg up -r 1
119 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
119 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
120 $ echo G > G
120 $ echo G > G
121 $ hg commit -A -m G -q
121 $ hg commit -A -m G -q
122 $ hg up -r 3
122 $ hg up -r 3
123 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
123 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
124 $ echo H > H
124 $ echo H > H
125 $ hg commit -A -m H -q
125 $ hg commit -A -m H -q
126 $ hg log -G
126 $ hg log -G
127 @ 7:5cd59d311f65 draft test H
127 @ 7:5cd59d311f65 draft test H
128 |
128 |
129 | o 6:1d14c3ce6ac0 draft test G
129 | o 6:1d14c3ce6ac0 draft test G
130 | |
130 | |
131 | | o 5:7f219660301f draft test F
131 | | o 5:7f219660301f draft test F
132 | | |
132 | | |
133 | | o 4:8a5212ebc852 draft test E
133 | | o 4:8a5212ebc852 draft test E
134 | |/
134 | |/
135 o | 3:b3325c91a4d9 draft test D
135 o | 3:b3325c91a4d9 draft test D
136 | |
136 | |
137 o | 2:f838bfaca5c7 draft test C
137 o | 2:f838bfaca5c7 draft test C
138 |/
138 |/
139 o 1:27547f69f254 draft test B
139 o 1:27547f69f254 draft test B
140 |
140 |
141 o 0:4a2df7238c3b draft test A
141 o 0:4a2df7238c3b draft test A
142
142
143 New heads are reported during transfer and properly accounted for in
143 New heads are reported during transfer and properly accounted for in
144 pullop.cgresult
144 pullop.cgresult
145
145
146 $ cd ../clone
146 $ cd ../clone
147 $ hg pull
147 $ hg pull
148 pulling from $TESTTMP/repo (glob)
148 pulling from $TESTTMP/repo (glob)
149 searching for changes
149 searching for changes
150 remote: changegroup1
150 remote: changegroup1
151 adding changesets
151 adding changesets
152 adding manifests
152 adding manifests
153 adding file changes
153 adding file changes
154 added 2 changesets with 2 changes to 2 files (+1 heads)
154 added 2 changesets with 2 changes to 2 files (+1 heads)
155 pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_PENDING=$TESTTMP/clone HG_SOURCE=bundle2 HG_URL=bundle2
155 pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
156 remote: changegroup2
156 remote: changegroup2
157 adding changesets
157 adding changesets
158 adding manifests
158 adding manifests
159 adding file changes
159 adding file changes
160 added 3 changesets with 3 changes to 3 files (+1 heads)
160 added 3 changesets with 3 changes to 3 files (+1 heads)
161 pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
161 pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
162 changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=bundle2 HG_URL=bundle2
162 changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
163 incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=bundle2 HG_URL=bundle2
163 incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
164 incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=bundle2 HG_URL=bundle2
164 incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
165 changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
165 changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
166 incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
166 incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
167 incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
167 incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
168 incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
168 incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
169 pullop.cgresult is 3
169 pullop.cgresult is 3
170 (run 'hg heads' to see heads, 'hg merge' to merge)
170 (run 'hg heads' to see heads, 'hg merge' to merge)
171 $ hg log -G
171 $ hg log -G
172 o 7:5cd59d311f65 public test H
172 o 7:5cd59d311f65 public test H
173 |
173 |
174 | o 6:1d14c3ce6ac0 public test G
174 | o 6:1d14c3ce6ac0 public test G
175 | |
175 | |
176 | | o 5:7f219660301f public test F
176 | | o 5:7f219660301f public test F
177 | | |
177 | | |
178 | | o 4:8a5212ebc852 public test E
178 | | o 4:8a5212ebc852 public test E
179 | |/
179 | |/
180 o | 3:b3325c91a4d9 public test D
180 o | 3:b3325c91a4d9 public test D
181 | |
181 | |
182 @ | 2:f838bfaca5c7 public test C
182 @ | 2:f838bfaca5c7 public test C
183 |/
183 |/
184 o 1:27547f69f254 public test B
184 o 1:27547f69f254 public test B
185 |
185 |
186 o 0:4a2df7238c3b public test A
186 o 0:4a2df7238c3b public test A
187
187
188 Removing a head from the original repository by merging it
188 Removing a head from the original repository by merging it
189
189
190 $ cd ../repo
190 $ cd ../repo
191 $ hg merge -r 6 -q
191 $ hg merge -r 6 -q
192 $ hg commit -m Merge
192 $ hg commit -m Merge
193 $ echo I > I
193 $ echo I > I
194 $ hg commit -A -m H -q
194 $ hg commit -A -m H -q
195 $ hg log -G
195 $ hg log -G
196 @ 9:9d18e5bd9ab0 draft test H
196 @ 9:9d18e5bd9ab0 draft test H
197 |
197 |
198 o 8:71bd7b46de72 draft test Merge
198 o 8:71bd7b46de72 draft test Merge
199 |\
199 |\
200 | o 7:5cd59d311f65 draft test H
200 | o 7:5cd59d311f65 draft test H
201 | |
201 | |
202 o | 6:1d14c3ce6ac0 draft test G
202 o | 6:1d14c3ce6ac0 draft test G
203 | |
203 | |
204 | | o 5:7f219660301f draft test F
204 | | o 5:7f219660301f draft test F
205 | | |
205 | | |
206 +---o 4:8a5212ebc852 draft test E
206 +---o 4:8a5212ebc852 draft test E
207 | |
207 | |
208 | o 3:b3325c91a4d9 draft test D
208 | o 3:b3325c91a4d9 draft test D
209 | |
209 | |
210 | o 2:f838bfaca5c7 draft test C
210 | o 2:f838bfaca5c7 draft test C
211 |/
211 |/
212 o 1:27547f69f254 draft test B
212 o 1:27547f69f254 draft test B
213 |
213 |
214 o 0:4a2df7238c3b draft test A
214 o 0:4a2df7238c3b draft test A
215
215
216 Removed heads are reported during transfer and properly accounted for in
216 Removed heads are reported during transfer and properly accounted for in
217 pullop.cgresult
217 pullop.cgresult
218
218
219 $ cd ../clone
219 $ cd ../clone
220 $ hg pull
220 $ hg pull
221 pulling from $TESTTMP/repo (glob)
221 pulling from $TESTTMP/repo (glob)
222 searching for changes
222 searching for changes
223 remote: changegroup1
223 remote: changegroup1
224 adding changesets
224 adding changesets
225 adding manifests
225 adding manifests
226 adding file changes
226 adding file changes
227 added 1 changesets with 0 changes to 0 files (-1 heads)
227 added 1 changesets with 0 changes to 0 files (-1 heads)
228 pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=bundle2 HG_URL=bundle2
228 pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
229 remote: changegroup2
229 remote: changegroup2
230 adding changesets
230 adding changesets
231 adding manifests
231 adding manifests
232 adding file changes
232 adding file changes
233 added 1 changesets with 1 changes to 1 files
233 added 1 changesets with 1 changes to 1 files
234 pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
234 pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
235 changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=bundle2 HG_URL=bundle2
235 changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
236 incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=bundle2 HG_URL=bundle2
236 incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
237 changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
237 changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
238 incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=bundle2 HG_URL=bundle2
238 incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
239 pullop.cgresult is -2
239 pullop.cgresult is -2
240 (run 'hg update' to get a working copy)
240 (run 'hg update' to get a working copy)
241 $ hg log -G
241 $ hg log -G
242 o 9:9d18e5bd9ab0 public test H
242 o 9:9d18e5bd9ab0 public test H
243 |
243 |
244 o 8:71bd7b46de72 public test Merge
244 o 8:71bd7b46de72 public test Merge
245 |\
245 |\
246 | o 7:5cd59d311f65 public test H
246 | o 7:5cd59d311f65 public test H
247 | |
247 | |
248 o | 6:1d14c3ce6ac0 public test G
248 o | 6:1d14c3ce6ac0 public test G
249 | |
249 | |
250 | | o 5:7f219660301f public test F
250 | | o 5:7f219660301f public test F
251 | | |
251 | | |
252 +---o 4:8a5212ebc852 public test E
252 +---o 4:8a5212ebc852 public test E
253 | |
253 | |
254 | o 3:b3325c91a4d9 public test D
254 | o 3:b3325c91a4d9 public test D
255 | |
255 | |
256 | @ 2:f838bfaca5c7 public test C
256 | @ 2:f838bfaca5c7 public test C
257 |/
257 |/
258 o 1:27547f69f254 public test B
258 o 1:27547f69f254 public test B
259 |
259 |
260 o 0:4a2df7238c3b public test A
260 o 0:4a2df7238c3b public test A
261
261
General Comments 0
You need to be logged in to leave comments. Login now