##// END OF EJS Templates
exchange: use "served" repo filter to guess what the server will publish...
av6 -
r48286:f03e9d30 default
parent child Browse files
Show More
@@ -1,2819 +1,2823 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import weakref
11 import weakref
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullrev,
16 nullrev,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 bundlecaches,
21 bundlecaches,
22 changegroup,
22 changegroup,
23 discovery,
23 discovery,
24 error,
24 error,
25 exchangev2,
25 exchangev2,
26 lock as lockmod,
26 lock as lockmod,
27 logexchange,
27 logexchange,
28 narrowspec,
28 narrowspec,
29 obsolete,
29 obsolete,
30 obsutil,
30 obsutil,
31 phases,
31 phases,
32 pushkey,
32 pushkey,
33 pycompat,
33 pycompat,
34 requirements,
34 requirements,
35 scmutil,
35 scmutil,
36 streamclone,
36 streamclone,
37 url as urlmod,
37 url as urlmod,
38 util,
38 util,
39 wireprototypes,
39 wireprototypes,
40 )
40 )
41 from .utils import (
41 from .utils import (
42 hashutil,
42 hashutil,
43 stringutil,
43 stringutil,
44 urlutil,
44 urlutil,
45 )
45 )
46 from .interfaces import repository
46 from .interfaces import repository
47
47
48 urlerr = util.urlerr
48 urlerr = util.urlerr
49 urlreq = util.urlreq
49 urlreq = util.urlreq
50
50
51 _NARROWACL_SECTION = b'narrowacl'
51 _NARROWACL_SECTION = b'narrowacl'
52
52
53
53
54 def readbundle(ui, fh, fname, vfs=None):
54 def readbundle(ui, fh, fname, vfs=None):
55 header = changegroup.readexactly(fh, 4)
55 header = changegroup.readexactly(fh, 4)
56
56
57 alg = None
57 alg = None
58 if not fname:
58 if not fname:
59 fname = b"stream"
59 fname = b"stream"
60 if not header.startswith(b'HG') and header.startswith(b'\0'):
60 if not header.startswith(b'HG') and header.startswith(b'\0'):
61 fh = changegroup.headerlessfixup(fh, header)
61 fh = changegroup.headerlessfixup(fh, header)
62 header = b"HG10"
62 header = b"HG10"
63 alg = b'UN'
63 alg = b'UN'
64 elif vfs:
64 elif vfs:
65 fname = vfs.join(fname)
65 fname = vfs.join(fname)
66
66
67 magic, version = header[0:2], header[2:4]
67 magic, version = header[0:2], header[2:4]
68
68
69 if magic != b'HG':
69 if magic != b'HG':
70 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
70 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
71 if version == b'10':
71 if version == b'10':
72 if alg is None:
72 if alg is None:
73 alg = changegroup.readexactly(fh, 2)
73 alg = changegroup.readexactly(fh, 2)
74 return changegroup.cg1unpacker(fh, alg)
74 return changegroup.cg1unpacker(fh, alg)
75 elif version.startswith(b'2'):
75 elif version.startswith(b'2'):
76 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
76 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
77 elif version == b'S1':
77 elif version == b'S1':
78 return streamclone.streamcloneapplier(fh)
78 return streamclone.streamcloneapplier(fh)
79 else:
79 else:
80 raise error.Abort(
80 raise error.Abort(
81 _(b'%s: unknown bundle version %s') % (fname, version)
81 _(b'%s: unknown bundle version %s') % (fname, version)
82 )
82 )
83
83
84
84
85 def getbundlespec(ui, fh):
85 def getbundlespec(ui, fh):
86 """Infer the bundlespec from a bundle file handle.
86 """Infer the bundlespec from a bundle file handle.
87
87
88 The input file handle is seeked and the original seek position is not
88 The input file handle is seeked and the original seek position is not
89 restored.
89 restored.
90 """
90 """
91
91
92 def speccompression(alg):
92 def speccompression(alg):
93 try:
93 try:
94 return util.compengines.forbundletype(alg).bundletype()[0]
94 return util.compengines.forbundletype(alg).bundletype()[0]
95 except KeyError:
95 except KeyError:
96 return None
96 return None
97
97
98 b = readbundle(ui, fh, None)
98 b = readbundle(ui, fh, None)
99 if isinstance(b, changegroup.cg1unpacker):
99 if isinstance(b, changegroup.cg1unpacker):
100 alg = b._type
100 alg = b._type
101 if alg == b'_truncatedBZ':
101 if alg == b'_truncatedBZ':
102 alg = b'BZ'
102 alg = b'BZ'
103 comp = speccompression(alg)
103 comp = speccompression(alg)
104 if not comp:
104 if not comp:
105 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
105 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
106 return b'%s-v1' % comp
106 return b'%s-v1' % comp
107 elif isinstance(b, bundle2.unbundle20):
107 elif isinstance(b, bundle2.unbundle20):
108 if b'Compression' in b.params:
108 if b'Compression' in b.params:
109 comp = speccompression(b.params[b'Compression'])
109 comp = speccompression(b.params[b'Compression'])
110 if not comp:
110 if not comp:
111 raise error.Abort(
111 raise error.Abort(
112 _(b'unknown compression algorithm: %s') % comp
112 _(b'unknown compression algorithm: %s') % comp
113 )
113 )
114 else:
114 else:
115 comp = b'none'
115 comp = b'none'
116
116
117 version = None
117 version = None
118 for part in b.iterparts():
118 for part in b.iterparts():
119 if part.type == b'changegroup':
119 if part.type == b'changegroup':
120 version = part.params[b'version']
120 version = part.params[b'version']
121 if version in (b'01', b'02'):
121 if version in (b'01', b'02'):
122 version = b'v2'
122 version = b'v2'
123 else:
123 else:
124 raise error.Abort(
124 raise error.Abort(
125 _(
125 _(
126 b'changegroup version %s does not have '
126 b'changegroup version %s does not have '
127 b'a known bundlespec'
127 b'a known bundlespec'
128 )
128 )
129 % version,
129 % version,
130 hint=_(b'try upgrading your Mercurial client'),
130 hint=_(b'try upgrading your Mercurial client'),
131 )
131 )
132 elif part.type == b'stream2' and version is None:
132 elif part.type == b'stream2' and version is None:
133 # A stream2 part requires to be part of a v2 bundle
133 # A stream2 part requires to be part of a v2 bundle
134 requirements = urlreq.unquote(part.params[b'requirements'])
134 requirements = urlreq.unquote(part.params[b'requirements'])
135 splitted = requirements.split()
135 splitted = requirements.split()
136 params = bundle2._formatrequirementsparams(splitted)
136 params = bundle2._formatrequirementsparams(splitted)
137 return b'none-v2;stream=v2;%s' % params
137 return b'none-v2;stream=v2;%s' % params
138
138
139 if not version:
139 if not version:
140 raise error.Abort(
140 raise error.Abort(
141 _(b'could not identify changegroup version in bundle')
141 _(b'could not identify changegroup version in bundle')
142 )
142 )
143
143
144 return b'%s-%s' % (comp, version)
144 return b'%s-%s' % (comp, version)
145 elif isinstance(b, streamclone.streamcloneapplier):
145 elif isinstance(b, streamclone.streamcloneapplier):
146 requirements = streamclone.readbundle1header(fh)[2]
146 requirements = streamclone.readbundle1header(fh)[2]
147 formatted = bundle2._formatrequirementsparams(requirements)
147 formatted = bundle2._formatrequirementsparams(requirements)
148 return b'none-packed1;%s' % formatted
148 return b'none-packed1;%s' % formatted
149 else:
149 else:
150 raise error.Abort(_(b'unknown bundle type: %s') % b)
150 raise error.Abort(_(b'unknown bundle type: %s') % b)
151
151
152
152
153 def _computeoutgoing(repo, heads, common):
153 def _computeoutgoing(repo, heads, common):
154 """Computes which revs are outgoing given a set of common
154 """Computes which revs are outgoing given a set of common
155 and a set of heads.
155 and a set of heads.
156
156
157 This is a separate function so extensions can have access to
157 This is a separate function so extensions can have access to
158 the logic.
158 the logic.
159
159
160 Returns a discovery.outgoing object.
160 Returns a discovery.outgoing object.
161 """
161 """
162 cl = repo.changelog
162 cl = repo.changelog
163 if common:
163 if common:
164 hasnode = cl.hasnode
164 hasnode = cl.hasnode
165 common = [n for n in common if hasnode(n)]
165 common = [n for n in common if hasnode(n)]
166 else:
166 else:
167 common = [repo.nullid]
167 common = [repo.nullid]
168 if not heads:
168 if not heads:
169 heads = cl.heads()
169 heads = cl.heads()
170 return discovery.outgoing(repo, common, heads)
170 return discovery.outgoing(repo, common, heads)
171
171
172
172
173 def _checkpublish(pushop):
173 def _checkpublish(pushop):
174 repo = pushop.repo
174 repo = pushop.repo
175 ui = repo.ui
175 ui = repo.ui
176 behavior = ui.config(b'experimental', b'auto-publish')
176 behavior = ui.config(b'experimental', b'auto-publish')
177 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
177 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
178 return
178 return
179 remotephases = listkeys(pushop.remote, b'phases')
179 remotephases = listkeys(pushop.remote, b'phases')
180 if not remotephases.get(b'publishing', False):
180 if not remotephases.get(b'publishing', False):
181 return
181 return
182
182
183 if pushop.revs is None:
183 if pushop.revs is None:
184 published = repo.filtered(b'served').revs(b'not public()')
184 published = repo.filtered(b'served').revs(b'not public()')
185 else:
185 else:
186 published = repo.revs(b'::%ln - public()', pushop.revs)
186 published = repo.revs(b'::%ln - public()', pushop.revs)
187 # we want to use pushop.revs in the revset even if they themselves are
188 # secret, but we don't want to have anything that the server won't see
189 # in the result of this expression
190 published &= repo.filtered(b'served')
187 if published:
191 if published:
188 if behavior == b'warn':
192 if behavior == b'warn':
189 ui.warn(
193 ui.warn(
190 _(b'%i changesets about to be published\n') % len(published)
194 _(b'%i changesets about to be published\n') % len(published)
191 )
195 )
192 elif behavior == b'confirm':
196 elif behavior == b'confirm':
193 if ui.promptchoice(
197 if ui.promptchoice(
194 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
198 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
195 % len(published)
199 % len(published)
196 ):
200 ):
197 raise error.CanceledError(_(b'user quit'))
201 raise error.CanceledError(_(b'user quit'))
198 elif behavior == b'abort':
202 elif behavior == b'abort':
199 msg = _(b'push would publish %i changesets') % len(published)
203 msg = _(b'push would publish %i changesets') % len(published)
200 hint = _(
204 hint = _(
201 b"use --publish or adjust 'experimental.auto-publish'"
205 b"use --publish or adjust 'experimental.auto-publish'"
202 b" config"
206 b" config"
203 )
207 )
204 raise error.Abort(msg, hint=hint)
208 raise error.Abort(msg, hint=hint)
205
209
206
210
207 def _forcebundle1(op):
211 def _forcebundle1(op):
208 """return true if a pull/push must use bundle1
212 """return true if a pull/push must use bundle1
209
213
210 This function is used to allow testing of the older bundle version"""
214 This function is used to allow testing of the older bundle version"""
211 ui = op.repo.ui
215 ui = op.repo.ui
212 # The goal is this config is to allow developer to choose the bundle
216 # The goal is this config is to allow developer to choose the bundle
213 # version used during exchanged. This is especially handy during test.
217 # version used during exchanged. This is especially handy during test.
214 # Value is a list of bundle version to be picked from, highest version
218 # Value is a list of bundle version to be picked from, highest version
215 # should be used.
219 # should be used.
216 #
220 #
217 # developer config: devel.legacy.exchange
221 # developer config: devel.legacy.exchange
218 exchange = ui.configlist(b'devel', b'legacy.exchange')
222 exchange = ui.configlist(b'devel', b'legacy.exchange')
219 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
223 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
220 return forcebundle1 or not op.remote.capable(b'bundle2')
224 return forcebundle1 or not op.remote.capable(b'bundle2')
221
225
222
226
223 class pushoperation(object):
227 class pushoperation(object):
224 """A object that represent a single push operation
228 """A object that represent a single push operation
225
229
226 Its purpose is to carry push related state and very common operations.
230 Its purpose is to carry push related state and very common operations.
227
231
228 A new pushoperation should be created at the beginning of each push and
232 A new pushoperation should be created at the beginning of each push and
229 discarded afterward.
233 discarded afterward.
230 """
234 """
231
235
232 def __init__(
236 def __init__(
233 self,
237 self,
234 repo,
238 repo,
235 remote,
239 remote,
236 force=False,
240 force=False,
237 revs=None,
241 revs=None,
238 newbranch=False,
242 newbranch=False,
239 bookmarks=(),
243 bookmarks=(),
240 publish=False,
244 publish=False,
241 pushvars=None,
245 pushvars=None,
242 ):
246 ):
243 # repo we push from
247 # repo we push from
244 self.repo = repo
248 self.repo = repo
245 self.ui = repo.ui
249 self.ui = repo.ui
246 # repo we push to
250 # repo we push to
247 self.remote = remote
251 self.remote = remote
248 # force option provided
252 # force option provided
249 self.force = force
253 self.force = force
250 # revs to be pushed (None is "all")
254 # revs to be pushed (None is "all")
251 self.revs = revs
255 self.revs = revs
252 # bookmark explicitly pushed
256 # bookmark explicitly pushed
253 self.bookmarks = bookmarks
257 self.bookmarks = bookmarks
254 # allow push of new branch
258 # allow push of new branch
255 self.newbranch = newbranch
259 self.newbranch = newbranch
256 # step already performed
260 # step already performed
257 # (used to check what steps have been already performed through bundle2)
261 # (used to check what steps have been already performed through bundle2)
258 self.stepsdone = set()
262 self.stepsdone = set()
259 # Integer version of the changegroup push result
263 # Integer version of the changegroup push result
260 # - None means nothing to push
264 # - None means nothing to push
261 # - 0 means HTTP error
265 # - 0 means HTTP error
262 # - 1 means we pushed and remote head count is unchanged *or*
266 # - 1 means we pushed and remote head count is unchanged *or*
263 # we have outgoing changesets but refused to push
267 # we have outgoing changesets but refused to push
264 # - other values as described by addchangegroup()
268 # - other values as described by addchangegroup()
265 self.cgresult = None
269 self.cgresult = None
266 # Boolean value for the bookmark push
270 # Boolean value for the bookmark push
267 self.bkresult = None
271 self.bkresult = None
268 # discover.outgoing object (contains common and outgoing data)
272 # discover.outgoing object (contains common and outgoing data)
269 self.outgoing = None
273 self.outgoing = None
270 # all remote topological heads before the push
274 # all remote topological heads before the push
271 self.remoteheads = None
275 self.remoteheads = None
272 # Details of the remote branch pre and post push
276 # Details of the remote branch pre and post push
273 #
277 #
274 # mapping: {'branch': ([remoteheads],
278 # mapping: {'branch': ([remoteheads],
275 # [newheads],
279 # [newheads],
276 # [unsyncedheads],
280 # [unsyncedheads],
277 # [discardedheads])}
281 # [discardedheads])}
278 # - branch: the branch name
282 # - branch: the branch name
279 # - remoteheads: the list of remote heads known locally
283 # - remoteheads: the list of remote heads known locally
280 # None if the branch is new
284 # None if the branch is new
281 # - newheads: the new remote heads (known locally) with outgoing pushed
285 # - newheads: the new remote heads (known locally) with outgoing pushed
282 # - unsyncedheads: the list of remote heads unknown locally.
286 # - unsyncedheads: the list of remote heads unknown locally.
283 # - discardedheads: the list of remote heads made obsolete by the push
287 # - discardedheads: the list of remote heads made obsolete by the push
284 self.pushbranchmap = None
288 self.pushbranchmap = None
285 # testable as a boolean indicating if any nodes are missing locally.
289 # testable as a boolean indicating if any nodes are missing locally.
286 self.incoming = None
290 self.incoming = None
287 # summary of the remote phase situation
291 # summary of the remote phase situation
288 self.remotephases = None
292 self.remotephases = None
289 # phases changes that must be pushed along side the changesets
293 # phases changes that must be pushed along side the changesets
290 self.outdatedphases = None
294 self.outdatedphases = None
291 # phases changes that must be pushed if changeset push fails
295 # phases changes that must be pushed if changeset push fails
292 self.fallbackoutdatedphases = None
296 self.fallbackoutdatedphases = None
293 # outgoing obsmarkers
297 # outgoing obsmarkers
294 self.outobsmarkers = set()
298 self.outobsmarkers = set()
295 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
299 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
296 self.outbookmarks = []
300 self.outbookmarks = []
297 # transaction manager
301 # transaction manager
298 self.trmanager = None
302 self.trmanager = None
299 # map { pushkey partid -> callback handling failure}
303 # map { pushkey partid -> callback handling failure}
300 # used to handle exception from mandatory pushkey part failure
304 # used to handle exception from mandatory pushkey part failure
301 self.pkfailcb = {}
305 self.pkfailcb = {}
302 # an iterable of pushvars or None
306 # an iterable of pushvars or None
303 self.pushvars = pushvars
307 self.pushvars = pushvars
304 # publish pushed changesets
308 # publish pushed changesets
305 self.publish = publish
309 self.publish = publish
306
310
307 @util.propertycache
311 @util.propertycache
308 def futureheads(self):
312 def futureheads(self):
309 """future remote heads if the changeset push succeeds"""
313 """future remote heads if the changeset push succeeds"""
310 return self.outgoing.ancestorsof
314 return self.outgoing.ancestorsof
311
315
312 @util.propertycache
316 @util.propertycache
313 def fallbackheads(self):
317 def fallbackheads(self):
314 """future remote heads if the changeset push fails"""
318 """future remote heads if the changeset push fails"""
315 if self.revs is None:
319 if self.revs is None:
316 # not target to push, all common are relevant
320 # not target to push, all common are relevant
317 return self.outgoing.commonheads
321 return self.outgoing.commonheads
318 unfi = self.repo.unfiltered()
322 unfi = self.repo.unfiltered()
319 # I want cheads = heads(::ancestorsof and ::commonheads)
323 # I want cheads = heads(::ancestorsof and ::commonheads)
320 # (ancestorsof is revs with secret changeset filtered out)
324 # (ancestorsof is revs with secret changeset filtered out)
321 #
325 #
322 # This can be expressed as:
326 # This can be expressed as:
323 # cheads = ( (ancestorsof and ::commonheads)
327 # cheads = ( (ancestorsof and ::commonheads)
324 # + (commonheads and ::ancestorsof))"
328 # + (commonheads and ::ancestorsof))"
325 # )
329 # )
326 #
330 #
327 # while trying to push we already computed the following:
331 # while trying to push we already computed the following:
328 # common = (::commonheads)
332 # common = (::commonheads)
329 # missing = ((commonheads::ancestorsof) - commonheads)
333 # missing = ((commonheads::ancestorsof) - commonheads)
330 #
334 #
331 # We can pick:
335 # We can pick:
332 # * ancestorsof part of common (::commonheads)
336 # * ancestorsof part of common (::commonheads)
333 common = self.outgoing.common
337 common = self.outgoing.common
334 rev = self.repo.changelog.index.rev
338 rev = self.repo.changelog.index.rev
335 cheads = [node for node in self.revs if rev(node) in common]
339 cheads = [node for node in self.revs if rev(node) in common]
336 # and
340 # and
337 # * commonheads parents on missing
341 # * commonheads parents on missing
338 revset = unfi.set(
342 revset = unfi.set(
339 b'%ln and parents(roots(%ln))',
343 b'%ln and parents(roots(%ln))',
340 self.outgoing.commonheads,
344 self.outgoing.commonheads,
341 self.outgoing.missing,
345 self.outgoing.missing,
342 )
346 )
343 cheads.extend(c.node() for c in revset)
347 cheads.extend(c.node() for c in revset)
344 return cheads
348 return cheads
345
349
346 @property
350 @property
347 def commonheads(self):
351 def commonheads(self):
348 """set of all common heads after changeset bundle push"""
352 """set of all common heads after changeset bundle push"""
349 if self.cgresult:
353 if self.cgresult:
350 return self.futureheads
354 return self.futureheads
351 else:
355 else:
352 return self.fallbackheads
356 return self.fallbackheads
353
357
354
358
355 # mapping of message used when pushing bookmark
359 # mapping of message used when pushing bookmark
356 bookmsgmap = {
360 bookmsgmap = {
357 b'update': (
361 b'update': (
358 _(b"updating bookmark %s\n"),
362 _(b"updating bookmark %s\n"),
359 _(b'updating bookmark %s failed\n'),
363 _(b'updating bookmark %s failed\n'),
360 ),
364 ),
361 b'export': (
365 b'export': (
362 _(b"exporting bookmark %s\n"),
366 _(b"exporting bookmark %s\n"),
363 _(b'exporting bookmark %s failed\n'),
367 _(b'exporting bookmark %s failed\n'),
364 ),
368 ),
365 b'delete': (
369 b'delete': (
366 _(b"deleting remote bookmark %s\n"),
370 _(b"deleting remote bookmark %s\n"),
367 _(b'deleting remote bookmark %s failed\n'),
371 _(b'deleting remote bookmark %s failed\n'),
368 ),
372 ),
369 }
373 }
370
374
371
375
372 def push(
376 def push(
373 repo,
377 repo,
374 remote,
378 remote,
375 force=False,
379 force=False,
376 revs=None,
380 revs=None,
377 newbranch=False,
381 newbranch=False,
378 bookmarks=(),
382 bookmarks=(),
379 publish=False,
383 publish=False,
380 opargs=None,
384 opargs=None,
381 ):
385 ):
382 """Push outgoing changesets (limited by revs) from a local
386 """Push outgoing changesets (limited by revs) from a local
383 repository to remote. Return an integer:
387 repository to remote. Return an integer:
384 - None means nothing to push
388 - None means nothing to push
385 - 0 means HTTP error
389 - 0 means HTTP error
386 - 1 means we pushed and remote head count is unchanged *or*
390 - 1 means we pushed and remote head count is unchanged *or*
387 we have outgoing changesets but refused to push
391 we have outgoing changesets but refused to push
388 - other values as described by addchangegroup()
392 - other values as described by addchangegroup()
389 """
393 """
390 if opargs is None:
394 if opargs is None:
391 opargs = {}
395 opargs = {}
392 pushop = pushoperation(
396 pushop = pushoperation(
393 repo,
397 repo,
394 remote,
398 remote,
395 force,
399 force,
396 revs,
400 revs,
397 newbranch,
401 newbranch,
398 bookmarks,
402 bookmarks,
399 publish,
403 publish,
400 **pycompat.strkwargs(opargs)
404 **pycompat.strkwargs(opargs)
401 )
405 )
402 if pushop.remote.local():
406 if pushop.remote.local():
403 missing = (
407 missing = (
404 set(pushop.repo.requirements) - pushop.remote.local().supported
408 set(pushop.repo.requirements) - pushop.remote.local().supported
405 )
409 )
406 if missing:
410 if missing:
407 msg = _(
411 msg = _(
408 b"required features are not"
412 b"required features are not"
409 b" supported in the destination:"
413 b" supported in the destination:"
410 b" %s"
414 b" %s"
411 ) % (b', '.join(sorted(missing)))
415 ) % (b', '.join(sorted(missing)))
412 raise error.Abort(msg)
416 raise error.Abort(msg)
413
417
414 if not pushop.remote.canpush():
418 if not pushop.remote.canpush():
415 raise error.Abort(_(b"destination does not support push"))
419 raise error.Abort(_(b"destination does not support push"))
416
420
417 if not pushop.remote.capable(b'unbundle'):
421 if not pushop.remote.capable(b'unbundle'):
418 raise error.Abort(
422 raise error.Abort(
419 _(
423 _(
420 b'cannot push: destination does not support the '
424 b'cannot push: destination does not support the '
421 b'unbundle wire protocol command'
425 b'unbundle wire protocol command'
422 )
426 )
423 )
427 )
424 for category in sorted(bundle2.read_remote_wanted_sidedata(pushop.remote)):
428 for category in sorted(bundle2.read_remote_wanted_sidedata(pushop.remote)):
425 # Check that a computer is registered for that category for at least
429 # Check that a computer is registered for that category for at least
426 # one revlog kind.
430 # one revlog kind.
427 for kind, computers in repo._sidedata_computers.items():
431 for kind, computers in repo._sidedata_computers.items():
428 if computers.get(category):
432 if computers.get(category):
429 break
433 break
430 else:
434 else:
431 raise error.Abort(
435 raise error.Abort(
432 _(
436 _(
433 b'cannot push: required sidedata category not supported'
437 b'cannot push: required sidedata category not supported'
434 b" by this client: '%s'"
438 b" by this client: '%s'"
435 )
439 )
436 % pycompat.bytestr(category)
440 % pycompat.bytestr(category)
437 )
441 )
438 # get lock as we might write phase data
442 # get lock as we might write phase data
439 wlock = lock = None
443 wlock = lock = None
440 try:
444 try:
441 # bundle2 push may receive a reply bundle touching bookmarks
445 # bundle2 push may receive a reply bundle touching bookmarks
442 # requiring the wlock. Take it now to ensure proper ordering.
446 # requiring the wlock. Take it now to ensure proper ordering.
443 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
447 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
444 if (
448 if (
445 (not _forcebundle1(pushop))
449 (not _forcebundle1(pushop))
446 and maypushback
450 and maypushback
447 and not bookmod.bookmarksinstore(repo)
451 and not bookmod.bookmarksinstore(repo)
448 ):
452 ):
449 wlock = pushop.repo.wlock()
453 wlock = pushop.repo.wlock()
450 lock = pushop.repo.lock()
454 lock = pushop.repo.lock()
451 pushop.trmanager = transactionmanager(
455 pushop.trmanager = transactionmanager(
452 pushop.repo, b'push-response', pushop.remote.url()
456 pushop.repo, b'push-response', pushop.remote.url()
453 )
457 )
454 except error.LockUnavailable as err:
458 except error.LockUnavailable as err:
455 # source repo cannot be locked.
459 # source repo cannot be locked.
456 # We do not abort the push, but just disable the local phase
460 # We do not abort the push, but just disable the local phase
457 # synchronisation.
461 # synchronisation.
458 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
462 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
459 err
463 err
460 )
464 )
461 pushop.ui.debug(msg)
465 pushop.ui.debug(msg)
462
466
463 with wlock or util.nullcontextmanager():
467 with wlock or util.nullcontextmanager():
464 with lock or util.nullcontextmanager():
468 with lock or util.nullcontextmanager():
465 with pushop.trmanager or util.nullcontextmanager():
469 with pushop.trmanager or util.nullcontextmanager():
466 pushop.repo.checkpush(pushop)
470 pushop.repo.checkpush(pushop)
467 _checkpublish(pushop)
471 _checkpublish(pushop)
468 _pushdiscovery(pushop)
472 _pushdiscovery(pushop)
469 if not pushop.force:
473 if not pushop.force:
470 _checksubrepostate(pushop)
474 _checksubrepostate(pushop)
471 if not _forcebundle1(pushop):
475 if not _forcebundle1(pushop):
472 _pushbundle2(pushop)
476 _pushbundle2(pushop)
473 _pushchangeset(pushop)
477 _pushchangeset(pushop)
474 _pushsyncphase(pushop)
478 _pushsyncphase(pushop)
475 _pushobsolete(pushop)
479 _pushobsolete(pushop)
476 _pushbookmark(pushop)
480 _pushbookmark(pushop)
477
481
478 if repo.ui.configbool(b'experimental', b'remotenames'):
482 if repo.ui.configbool(b'experimental', b'remotenames'):
479 logexchange.pullremotenames(repo, remote)
483 logexchange.pullremotenames(repo, remote)
480
484
481 return pushop
485 return pushop
482
486
483
487
484 # list of steps to perform discovery before push
488 # list of steps to perform discovery before push
485 pushdiscoveryorder = []
489 pushdiscoveryorder = []
486
490
487 # Mapping between step name and function
491 # Mapping between step name and function
488 #
492 #
489 # This exists to help extensions wrap steps if necessary
493 # This exists to help extensions wrap steps if necessary
490 pushdiscoverymapping = {}
494 pushdiscoverymapping = {}
491
495
492
496
493 def pushdiscovery(stepname):
497 def pushdiscovery(stepname):
494 """decorator for function performing discovery before push
498 """decorator for function performing discovery before push
495
499
496 The function is added to the step -> function mapping and appended to the
500 The function is added to the step -> function mapping and appended to the
497 list of steps. Beware that decorated function will be added in order (this
501 list of steps. Beware that decorated function will be added in order (this
498 may matter).
502 may matter).
499
503
500 You can only use this decorator for a new step, if you want to wrap a step
504 You can only use this decorator for a new step, if you want to wrap a step
501 from an extension, change the pushdiscovery dictionary directly."""
505 from an extension, change the pushdiscovery dictionary directly."""
502
506
503 def dec(func):
507 def dec(func):
504 assert stepname not in pushdiscoverymapping
508 assert stepname not in pushdiscoverymapping
505 pushdiscoverymapping[stepname] = func
509 pushdiscoverymapping[stepname] = func
506 pushdiscoveryorder.append(stepname)
510 pushdiscoveryorder.append(stepname)
507 return func
511 return func
508
512
509 return dec
513 return dec
510
514
511
515
512 def _pushdiscovery(pushop):
516 def _pushdiscovery(pushop):
513 """Run all discovery steps"""
517 """Run all discovery steps"""
514 for stepname in pushdiscoveryorder:
518 for stepname in pushdiscoveryorder:
515 step = pushdiscoverymapping[stepname]
519 step = pushdiscoverymapping[stepname]
516 step(pushop)
520 step(pushop)
517
521
518
522
519 def _checksubrepostate(pushop):
523 def _checksubrepostate(pushop):
520 """Ensure all outgoing referenced subrepo revisions are present locally"""
524 """Ensure all outgoing referenced subrepo revisions are present locally"""
521 for n in pushop.outgoing.missing:
525 for n in pushop.outgoing.missing:
522 ctx = pushop.repo[n]
526 ctx = pushop.repo[n]
523
527
524 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
528 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
525 for subpath in sorted(ctx.substate):
529 for subpath in sorted(ctx.substate):
526 sub = ctx.sub(subpath)
530 sub = ctx.sub(subpath)
527 sub.verify(onpush=True)
531 sub.verify(onpush=True)
528
532
529
533
530 @pushdiscovery(b'changeset')
534 @pushdiscovery(b'changeset')
531 def _pushdiscoverychangeset(pushop):
535 def _pushdiscoverychangeset(pushop):
532 """discover the changeset that need to be pushed"""
536 """discover the changeset that need to be pushed"""
533 fci = discovery.findcommonincoming
537 fci = discovery.findcommonincoming
534 if pushop.revs:
538 if pushop.revs:
535 commoninc = fci(
539 commoninc = fci(
536 pushop.repo,
540 pushop.repo,
537 pushop.remote,
541 pushop.remote,
538 force=pushop.force,
542 force=pushop.force,
539 ancestorsof=pushop.revs,
543 ancestorsof=pushop.revs,
540 )
544 )
541 else:
545 else:
542 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
546 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
543 common, inc, remoteheads = commoninc
547 common, inc, remoteheads = commoninc
544 fco = discovery.findcommonoutgoing
548 fco = discovery.findcommonoutgoing
545 outgoing = fco(
549 outgoing = fco(
546 pushop.repo,
550 pushop.repo,
547 pushop.remote,
551 pushop.remote,
548 onlyheads=pushop.revs,
552 onlyheads=pushop.revs,
549 commoninc=commoninc,
553 commoninc=commoninc,
550 force=pushop.force,
554 force=pushop.force,
551 )
555 )
552 pushop.outgoing = outgoing
556 pushop.outgoing = outgoing
553 pushop.remoteheads = remoteheads
557 pushop.remoteheads = remoteheads
554 pushop.incoming = inc
558 pushop.incoming = inc
555
559
556
560
557 @pushdiscovery(b'phase')
561 @pushdiscovery(b'phase')
558 def _pushdiscoveryphase(pushop):
562 def _pushdiscoveryphase(pushop):
559 """discover the phase that needs to be pushed
563 """discover the phase that needs to be pushed
560
564
561 (computed for both success and failure case for changesets push)"""
565 (computed for both success and failure case for changesets push)"""
562 outgoing = pushop.outgoing
566 outgoing = pushop.outgoing
563 unfi = pushop.repo.unfiltered()
567 unfi = pushop.repo.unfiltered()
564 remotephases = listkeys(pushop.remote, b'phases')
568 remotephases = listkeys(pushop.remote, b'phases')
565
569
566 if (
570 if (
567 pushop.ui.configbool(b'ui', b'_usedassubrepo')
571 pushop.ui.configbool(b'ui', b'_usedassubrepo')
568 and remotephases # server supports phases
572 and remotephases # server supports phases
569 and not pushop.outgoing.missing # no changesets to be pushed
573 and not pushop.outgoing.missing # no changesets to be pushed
570 and remotephases.get(b'publishing', False)
574 and remotephases.get(b'publishing', False)
571 ):
575 ):
572 # When:
576 # When:
573 # - this is a subrepo push
577 # - this is a subrepo push
574 # - and remote support phase
578 # - and remote support phase
575 # - and no changeset are to be pushed
579 # - and no changeset are to be pushed
576 # - and remote is publishing
580 # - and remote is publishing
577 # We may be in issue 3781 case!
581 # We may be in issue 3781 case!
578 # We drop the possible phase synchronisation done by
582 # We drop the possible phase synchronisation done by
579 # courtesy to publish changesets possibly locally draft
583 # courtesy to publish changesets possibly locally draft
580 # on the remote.
584 # on the remote.
581 pushop.outdatedphases = []
585 pushop.outdatedphases = []
582 pushop.fallbackoutdatedphases = []
586 pushop.fallbackoutdatedphases = []
583 return
587 return
584
588
585 pushop.remotephases = phases.remotephasessummary(
589 pushop.remotephases = phases.remotephasessummary(
586 pushop.repo, pushop.fallbackheads, remotephases
590 pushop.repo, pushop.fallbackheads, remotephases
587 )
591 )
588 droots = pushop.remotephases.draftroots
592 droots = pushop.remotephases.draftroots
589
593
590 extracond = b''
594 extracond = b''
591 if not pushop.remotephases.publishing:
595 if not pushop.remotephases.publishing:
592 extracond = b' and public()'
596 extracond = b' and public()'
593 revset = b'heads((%%ln::%%ln) %s)' % extracond
597 revset = b'heads((%%ln::%%ln) %s)' % extracond
594 # Get the list of all revs draft on remote by public here.
598 # Get the list of all revs draft on remote by public here.
595 # XXX Beware that revset break if droots is not strictly
599 # XXX Beware that revset break if droots is not strictly
596 # XXX root we may want to ensure it is but it is costly
600 # XXX root we may want to ensure it is but it is costly
597 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
601 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
598 if not pushop.remotephases.publishing and pushop.publish:
602 if not pushop.remotephases.publishing and pushop.publish:
599 future = list(
603 future = list(
600 unfi.set(
604 unfi.set(
601 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
605 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
602 )
606 )
603 )
607 )
604 elif not outgoing.missing:
608 elif not outgoing.missing:
605 future = fallback
609 future = fallback
606 else:
610 else:
607 # adds changeset we are going to push as draft
611 # adds changeset we are going to push as draft
608 #
612 #
609 # should not be necessary for publishing server, but because of an
613 # should not be necessary for publishing server, but because of an
610 # issue fixed in xxxxx we have to do it anyway.
614 # issue fixed in xxxxx we have to do it anyway.
611 fdroots = list(
615 fdroots = list(
612 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
616 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
613 )
617 )
614 fdroots = [f.node() for f in fdroots]
618 fdroots = [f.node() for f in fdroots]
615 future = list(unfi.set(revset, fdroots, pushop.futureheads))
619 future = list(unfi.set(revset, fdroots, pushop.futureheads))
616 pushop.outdatedphases = future
620 pushop.outdatedphases = future
617 pushop.fallbackoutdatedphases = fallback
621 pushop.fallbackoutdatedphases = fallback
618
622
619
623
620 @pushdiscovery(b'obsmarker')
624 @pushdiscovery(b'obsmarker')
621 def _pushdiscoveryobsmarkers(pushop):
625 def _pushdiscoveryobsmarkers(pushop):
622 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
626 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
623 return
627 return
624
628
625 if not pushop.repo.obsstore:
629 if not pushop.repo.obsstore:
626 return
630 return
627
631
628 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
632 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
629 return
633 return
630
634
631 repo = pushop.repo
635 repo = pushop.repo
632 # very naive computation, that can be quite expensive on big repo.
636 # very naive computation, that can be quite expensive on big repo.
633 # However: evolution is currently slow on them anyway.
637 # However: evolution is currently slow on them anyway.
634 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
638 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
635 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
639 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
636
640
637
641
638 @pushdiscovery(b'bookmarks')
642 @pushdiscovery(b'bookmarks')
639 def _pushdiscoverybookmarks(pushop):
643 def _pushdiscoverybookmarks(pushop):
640 ui = pushop.ui
644 ui = pushop.ui
641 repo = pushop.repo.unfiltered()
645 repo = pushop.repo.unfiltered()
642 remote = pushop.remote
646 remote = pushop.remote
643 ui.debug(b"checking for updated bookmarks\n")
647 ui.debug(b"checking for updated bookmarks\n")
644 ancestors = ()
648 ancestors = ()
645 if pushop.revs:
649 if pushop.revs:
646 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
650 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
647 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
651 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
648
652
649 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
653 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
650
654
651 explicit = {
655 explicit = {
652 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
656 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
653 }
657 }
654
658
655 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
659 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
656 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
660 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
657
661
658
662
659 def _processcompared(pushop, pushed, explicit, remotebms, comp):
663 def _processcompared(pushop, pushed, explicit, remotebms, comp):
660 """take decision on bookmarks to push to the remote repo
664 """take decision on bookmarks to push to the remote repo
661
665
662 Exists to help extensions alter this behavior.
666 Exists to help extensions alter this behavior.
663 """
667 """
664 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
668 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
665
669
666 repo = pushop.repo
670 repo = pushop.repo
667
671
668 for b, scid, dcid in advsrc:
672 for b, scid, dcid in advsrc:
669 if b in explicit:
673 if b in explicit:
670 explicit.remove(b)
674 explicit.remove(b)
671 if not pushed or repo[scid].rev() in pushed:
675 if not pushed or repo[scid].rev() in pushed:
672 pushop.outbookmarks.append((b, dcid, scid))
676 pushop.outbookmarks.append((b, dcid, scid))
673 # search added bookmark
677 # search added bookmark
674 for b, scid, dcid in addsrc:
678 for b, scid, dcid in addsrc:
675 if b in explicit:
679 if b in explicit:
676 explicit.remove(b)
680 explicit.remove(b)
677 if bookmod.isdivergent(b):
681 if bookmod.isdivergent(b):
678 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
682 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
679 pushop.bkresult = 2
683 pushop.bkresult = 2
680 else:
684 else:
681 pushop.outbookmarks.append((b, b'', scid))
685 pushop.outbookmarks.append((b, b'', scid))
682 # search for overwritten bookmark
686 # search for overwritten bookmark
683 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
687 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
684 if b in explicit:
688 if b in explicit:
685 explicit.remove(b)
689 explicit.remove(b)
686 pushop.outbookmarks.append((b, dcid, scid))
690 pushop.outbookmarks.append((b, dcid, scid))
687 # search for bookmark to delete
691 # search for bookmark to delete
688 for b, scid, dcid in adddst:
692 for b, scid, dcid in adddst:
689 if b in explicit:
693 if b in explicit:
690 explicit.remove(b)
694 explicit.remove(b)
691 # treat as "deleted locally"
695 # treat as "deleted locally"
692 pushop.outbookmarks.append((b, dcid, b''))
696 pushop.outbookmarks.append((b, dcid, b''))
693 # identical bookmarks shouldn't get reported
697 # identical bookmarks shouldn't get reported
694 for b, scid, dcid in same:
698 for b, scid, dcid in same:
695 if b in explicit:
699 if b in explicit:
696 explicit.remove(b)
700 explicit.remove(b)
697
701
698 if explicit:
702 if explicit:
699 explicit = sorted(explicit)
703 explicit = sorted(explicit)
700 # we should probably list all of them
704 # we should probably list all of them
701 pushop.ui.warn(
705 pushop.ui.warn(
702 _(
706 _(
703 b'bookmark %s does not exist on the local '
707 b'bookmark %s does not exist on the local '
704 b'or remote repository!\n'
708 b'or remote repository!\n'
705 )
709 )
706 % explicit[0]
710 % explicit[0]
707 )
711 )
708 pushop.bkresult = 2
712 pushop.bkresult = 2
709
713
710 pushop.outbookmarks.sort()
714 pushop.outbookmarks.sort()
711
715
712
716
713 def _pushcheckoutgoing(pushop):
717 def _pushcheckoutgoing(pushop):
714 outgoing = pushop.outgoing
718 outgoing = pushop.outgoing
715 unfi = pushop.repo.unfiltered()
719 unfi = pushop.repo.unfiltered()
716 if not outgoing.missing:
720 if not outgoing.missing:
717 # nothing to push
721 # nothing to push
718 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
722 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
719 return False
723 return False
720 # something to push
724 # something to push
721 if not pushop.force:
725 if not pushop.force:
722 # if repo.obsstore == False --> no obsolete
726 # if repo.obsstore == False --> no obsolete
723 # then, save the iteration
727 # then, save the iteration
724 if unfi.obsstore:
728 if unfi.obsstore:
725 # this message are here for 80 char limit reason
729 # this message are here for 80 char limit reason
726 mso = _(b"push includes obsolete changeset: %s!")
730 mso = _(b"push includes obsolete changeset: %s!")
727 mspd = _(b"push includes phase-divergent changeset: %s!")
731 mspd = _(b"push includes phase-divergent changeset: %s!")
728 mscd = _(b"push includes content-divergent changeset: %s!")
732 mscd = _(b"push includes content-divergent changeset: %s!")
729 mst = {
733 mst = {
730 b"orphan": _(b"push includes orphan changeset: %s!"),
734 b"orphan": _(b"push includes orphan changeset: %s!"),
731 b"phase-divergent": mspd,
735 b"phase-divergent": mspd,
732 b"content-divergent": mscd,
736 b"content-divergent": mscd,
733 }
737 }
734 # If we are to push if there is at least one
738 # If we are to push if there is at least one
735 # obsolete or unstable changeset in missing, at
739 # obsolete or unstable changeset in missing, at
736 # least one of the missinghead will be obsolete or
740 # least one of the missinghead will be obsolete or
737 # unstable. So checking heads only is ok
741 # unstable. So checking heads only is ok
738 for node in outgoing.ancestorsof:
742 for node in outgoing.ancestorsof:
739 ctx = unfi[node]
743 ctx = unfi[node]
740 if ctx.obsolete():
744 if ctx.obsolete():
741 raise error.Abort(mso % ctx)
745 raise error.Abort(mso % ctx)
742 elif ctx.isunstable():
746 elif ctx.isunstable():
743 # TODO print more than one instability in the abort
747 # TODO print more than one instability in the abort
744 # message
748 # message
745 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
749 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
746
750
747 discovery.checkheads(pushop)
751 discovery.checkheads(pushop)
748 return True
752 return True
749
753
750
754
751 # List of names of steps to perform for an outgoing bundle2, order matters.
755 # List of names of steps to perform for an outgoing bundle2, order matters.
752 b2partsgenorder = []
756 b2partsgenorder = []
753
757
754 # Mapping between step name and function
758 # Mapping between step name and function
755 #
759 #
756 # This exists to help extensions wrap steps if necessary
760 # This exists to help extensions wrap steps if necessary
757 b2partsgenmapping = {}
761 b2partsgenmapping = {}
758
762
759
763
760 def b2partsgenerator(stepname, idx=None):
764 def b2partsgenerator(stepname, idx=None):
761 """decorator for function generating bundle2 part
765 """decorator for function generating bundle2 part
762
766
763 The function is added to the step -> function mapping and appended to the
767 The function is added to the step -> function mapping and appended to the
764 list of steps. Beware that decorated functions will be added in order
768 list of steps. Beware that decorated functions will be added in order
765 (this may matter).
769 (this may matter).
766
770
767 You can only use this decorator for new steps, if you want to wrap a step
771 You can only use this decorator for new steps, if you want to wrap a step
768 from an extension, attack the b2partsgenmapping dictionary directly."""
772 from an extension, attack the b2partsgenmapping dictionary directly."""
769
773
770 def dec(func):
774 def dec(func):
771 assert stepname not in b2partsgenmapping
775 assert stepname not in b2partsgenmapping
772 b2partsgenmapping[stepname] = func
776 b2partsgenmapping[stepname] = func
773 if idx is None:
777 if idx is None:
774 b2partsgenorder.append(stepname)
778 b2partsgenorder.append(stepname)
775 else:
779 else:
776 b2partsgenorder.insert(idx, stepname)
780 b2partsgenorder.insert(idx, stepname)
777 return func
781 return func
778
782
779 return dec
783 return dec
780
784
781
785
782 def _pushb2ctxcheckheads(pushop, bundler):
786 def _pushb2ctxcheckheads(pushop, bundler):
783 """Generate race condition checking parts
787 """Generate race condition checking parts
784
788
785 Exists as an independent function to aid extensions
789 Exists as an independent function to aid extensions
786 """
790 """
787 # * 'force' do not check for push race,
791 # * 'force' do not check for push race,
788 # * if we don't push anything, there are nothing to check.
792 # * if we don't push anything, there are nothing to check.
789 if not pushop.force and pushop.outgoing.ancestorsof:
793 if not pushop.force and pushop.outgoing.ancestorsof:
790 allowunrelated = b'related' in bundler.capabilities.get(
794 allowunrelated = b'related' in bundler.capabilities.get(
791 b'checkheads', ()
795 b'checkheads', ()
792 )
796 )
793 emptyremote = pushop.pushbranchmap is None
797 emptyremote = pushop.pushbranchmap is None
794 if not allowunrelated or emptyremote:
798 if not allowunrelated or emptyremote:
795 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
799 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
796 else:
800 else:
797 affected = set()
801 affected = set()
798 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
802 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
799 remoteheads, newheads, unsyncedheads, discardedheads = heads
803 remoteheads, newheads, unsyncedheads, discardedheads = heads
800 if remoteheads is not None:
804 if remoteheads is not None:
801 remote = set(remoteheads)
805 remote = set(remoteheads)
802 affected |= set(discardedheads) & remote
806 affected |= set(discardedheads) & remote
803 affected |= remote - set(newheads)
807 affected |= remote - set(newheads)
804 if affected:
808 if affected:
805 data = iter(sorted(affected))
809 data = iter(sorted(affected))
806 bundler.newpart(b'check:updated-heads', data=data)
810 bundler.newpart(b'check:updated-heads', data=data)
807
811
808
812
809 def _pushing(pushop):
813 def _pushing(pushop):
810 """return True if we are pushing anything"""
814 """return True if we are pushing anything"""
811 return bool(
815 return bool(
812 pushop.outgoing.missing
816 pushop.outgoing.missing
813 or pushop.outdatedphases
817 or pushop.outdatedphases
814 or pushop.outobsmarkers
818 or pushop.outobsmarkers
815 or pushop.outbookmarks
819 or pushop.outbookmarks
816 )
820 )
817
821
818
822
819 @b2partsgenerator(b'check-bookmarks')
823 @b2partsgenerator(b'check-bookmarks')
820 def _pushb2checkbookmarks(pushop, bundler):
824 def _pushb2checkbookmarks(pushop, bundler):
821 """insert bookmark move checking"""
825 """insert bookmark move checking"""
822 if not _pushing(pushop) or pushop.force:
826 if not _pushing(pushop) or pushop.force:
823 return
827 return
824 b2caps = bundle2.bundle2caps(pushop.remote)
828 b2caps = bundle2.bundle2caps(pushop.remote)
825 hasbookmarkcheck = b'bookmarks' in b2caps
829 hasbookmarkcheck = b'bookmarks' in b2caps
826 if not (pushop.outbookmarks and hasbookmarkcheck):
830 if not (pushop.outbookmarks and hasbookmarkcheck):
827 return
831 return
828 data = []
832 data = []
829 for book, old, new in pushop.outbookmarks:
833 for book, old, new in pushop.outbookmarks:
830 data.append((book, old))
834 data.append((book, old))
831 checkdata = bookmod.binaryencode(pushop.repo, data)
835 checkdata = bookmod.binaryencode(pushop.repo, data)
832 bundler.newpart(b'check:bookmarks', data=checkdata)
836 bundler.newpart(b'check:bookmarks', data=checkdata)
833
837
834
838
835 @b2partsgenerator(b'check-phases')
839 @b2partsgenerator(b'check-phases')
836 def _pushb2checkphases(pushop, bundler):
840 def _pushb2checkphases(pushop, bundler):
837 """insert phase move checking"""
841 """insert phase move checking"""
838 if not _pushing(pushop) or pushop.force:
842 if not _pushing(pushop) or pushop.force:
839 return
843 return
840 b2caps = bundle2.bundle2caps(pushop.remote)
844 b2caps = bundle2.bundle2caps(pushop.remote)
841 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
845 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
842 if pushop.remotephases is not None and hasphaseheads:
846 if pushop.remotephases is not None and hasphaseheads:
843 # check that the remote phase has not changed
847 # check that the remote phase has not changed
844 checks = {p: [] for p in phases.allphases}
848 checks = {p: [] for p in phases.allphases}
845 checks[phases.public].extend(pushop.remotephases.publicheads)
849 checks[phases.public].extend(pushop.remotephases.publicheads)
846 checks[phases.draft].extend(pushop.remotephases.draftroots)
850 checks[phases.draft].extend(pushop.remotephases.draftroots)
847 if any(pycompat.itervalues(checks)):
851 if any(pycompat.itervalues(checks)):
848 for phase in checks:
852 for phase in checks:
849 checks[phase].sort()
853 checks[phase].sort()
850 checkdata = phases.binaryencode(checks)
854 checkdata = phases.binaryencode(checks)
851 bundler.newpart(b'check:phases', data=checkdata)
855 bundler.newpart(b'check:phases', data=checkdata)
852
856
853
857
854 @b2partsgenerator(b'changeset')
858 @b2partsgenerator(b'changeset')
855 def _pushb2ctx(pushop, bundler):
859 def _pushb2ctx(pushop, bundler):
856 """handle changegroup push through bundle2
860 """handle changegroup push through bundle2
857
861
858 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
862 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
859 """
863 """
860 if b'changesets' in pushop.stepsdone:
864 if b'changesets' in pushop.stepsdone:
861 return
865 return
862 pushop.stepsdone.add(b'changesets')
866 pushop.stepsdone.add(b'changesets')
863 # Send known heads to the server for race detection.
867 # Send known heads to the server for race detection.
864 if not _pushcheckoutgoing(pushop):
868 if not _pushcheckoutgoing(pushop):
865 return
869 return
866 pushop.repo.prepushoutgoinghooks(pushop)
870 pushop.repo.prepushoutgoinghooks(pushop)
867
871
868 _pushb2ctxcheckheads(pushop, bundler)
872 _pushb2ctxcheckheads(pushop, bundler)
869
873
870 b2caps = bundle2.bundle2caps(pushop.remote)
874 b2caps = bundle2.bundle2caps(pushop.remote)
871 version = b'01'
875 version = b'01'
872 cgversions = b2caps.get(b'changegroup')
876 cgversions = b2caps.get(b'changegroup')
873 if cgversions: # 3.1 and 3.2 ship with an empty value
877 if cgversions: # 3.1 and 3.2 ship with an empty value
874 cgversions = [
878 cgversions = [
875 v
879 v
876 for v in cgversions
880 for v in cgversions
877 if v in changegroup.supportedoutgoingversions(pushop.repo)
881 if v in changegroup.supportedoutgoingversions(pushop.repo)
878 ]
882 ]
879 if not cgversions:
883 if not cgversions:
880 raise error.Abort(_(b'no common changegroup version'))
884 raise error.Abort(_(b'no common changegroup version'))
881 version = max(cgversions)
885 version = max(cgversions)
882
886
883 remote_sidedata = bundle2.read_remote_wanted_sidedata(pushop.remote)
887 remote_sidedata = bundle2.read_remote_wanted_sidedata(pushop.remote)
884 cgstream = changegroup.makestream(
888 cgstream = changegroup.makestream(
885 pushop.repo,
889 pushop.repo,
886 pushop.outgoing,
890 pushop.outgoing,
887 version,
891 version,
888 b'push',
892 b'push',
889 bundlecaps=b2caps,
893 bundlecaps=b2caps,
890 remote_sidedata=remote_sidedata,
894 remote_sidedata=remote_sidedata,
891 )
895 )
892 cgpart = bundler.newpart(b'changegroup', data=cgstream)
896 cgpart = bundler.newpart(b'changegroup', data=cgstream)
893 if cgversions:
897 if cgversions:
894 cgpart.addparam(b'version', version)
898 cgpart.addparam(b'version', version)
895 if scmutil.istreemanifest(pushop.repo):
899 if scmutil.istreemanifest(pushop.repo):
896 cgpart.addparam(b'treemanifest', b'1')
900 cgpart.addparam(b'treemanifest', b'1')
897 if repository.REPO_FEATURE_SIDE_DATA in pushop.repo.features:
901 if repository.REPO_FEATURE_SIDE_DATA in pushop.repo.features:
898 cgpart.addparam(b'exp-sidedata', b'1')
902 cgpart.addparam(b'exp-sidedata', b'1')
899
903
900 def handlereply(op):
904 def handlereply(op):
901 """extract addchangegroup returns from server reply"""
905 """extract addchangegroup returns from server reply"""
902 cgreplies = op.records.getreplies(cgpart.id)
906 cgreplies = op.records.getreplies(cgpart.id)
903 assert len(cgreplies[b'changegroup']) == 1
907 assert len(cgreplies[b'changegroup']) == 1
904 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
908 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
905
909
906 return handlereply
910 return handlereply
907
911
908
912
909 @b2partsgenerator(b'phase')
913 @b2partsgenerator(b'phase')
910 def _pushb2phases(pushop, bundler):
914 def _pushb2phases(pushop, bundler):
911 """handle phase push through bundle2"""
915 """handle phase push through bundle2"""
912 if b'phases' in pushop.stepsdone:
916 if b'phases' in pushop.stepsdone:
913 return
917 return
914 b2caps = bundle2.bundle2caps(pushop.remote)
918 b2caps = bundle2.bundle2caps(pushop.remote)
915 ui = pushop.repo.ui
919 ui = pushop.repo.ui
916
920
917 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
921 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
918 haspushkey = b'pushkey' in b2caps
922 haspushkey = b'pushkey' in b2caps
919 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
923 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
920
924
921 if hasphaseheads and not legacyphase:
925 if hasphaseheads and not legacyphase:
922 return _pushb2phaseheads(pushop, bundler)
926 return _pushb2phaseheads(pushop, bundler)
923 elif haspushkey:
927 elif haspushkey:
924 return _pushb2phasespushkey(pushop, bundler)
928 return _pushb2phasespushkey(pushop, bundler)
925
929
926
930
927 def _pushb2phaseheads(pushop, bundler):
931 def _pushb2phaseheads(pushop, bundler):
928 """push phase information through a bundle2 - binary part"""
932 """push phase information through a bundle2 - binary part"""
929 pushop.stepsdone.add(b'phases')
933 pushop.stepsdone.add(b'phases')
930 if pushop.outdatedphases:
934 if pushop.outdatedphases:
931 updates = {p: [] for p in phases.allphases}
935 updates = {p: [] for p in phases.allphases}
932 updates[0].extend(h.node() for h in pushop.outdatedphases)
936 updates[0].extend(h.node() for h in pushop.outdatedphases)
933 phasedata = phases.binaryencode(updates)
937 phasedata = phases.binaryencode(updates)
934 bundler.newpart(b'phase-heads', data=phasedata)
938 bundler.newpart(b'phase-heads', data=phasedata)
935
939
936
940
937 def _pushb2phasespushkey(pushop, bundler):
941 def _pushb2phasespushkey(pushop, bundler):
938 """push phase information through a bundle2 - pushkey part"""
942 """push phase information through a bundle2 - pushkey part"""
939 pushop.stepsdone.add(b'phases')
943 pushop.stepsdone.add(b'phases')
940 part2node = []
944 part2node = []
941
945
942 def handlefailure(pushop, exc):
946 def handlefailure(pushop, exc):
943 targetid = int(exc.partid)
947 targetid = int(exc.partid)
944 for partid, node in part2node:
948 for partid, node in part2node:
945 if partid == targetid:
949 if partid == targetid:
946 raise error.Abort(_(b'updating %s to public failed') % node)
950 raise error.Abort(_(b'updating %s to public failed') % node)
947
951
948 enc = pushkey.encode
952 enc = pushkey.encode
949 for newremotehead in pushop.outdatedphases:
953 for newremotehead in pushop.outdatedphases:
950 part = bundler.newpart(b'pushkey')
954 part = bundler.newpart(b'pushkey')
951 part.addparam(b'namespace', enc(b'phases'))
955 part.addparam(b'namespace', enc(b'phases'))
952 part.addparam(b'key', enc(newremotehead.hex()))
956 part.addparam(b'key', enc(newremotehead.hex()))
953 part.addparam(b'old', enc(b'%d' % phases.draft))
957 part.addparam(b'old', enc(b'%d' % phases.draft))
954 part.addparam(b'new', enc(b'%d' % phases.public))
958 part.addparam(b'new', enc(b'%d' % phases.public))
955 part2node.append((part.id, newremotehead))
959 part2node.append((part.id, newremotehead))
956 pushop.pkfailcb[part.id] = handlefailure
960 pushop.pkfailcb[part.id] = handlefailure
957
961
958 def handlereply(op):
962 def handlereply(op):
959 for partid, node in part2node:
963 for partid, node in part2node:
960 partrep = op.records.getreplies(partid)
964 partrep = op.records.getreplies(partid)
961 results = partrep[b'pushkey']
965 results = partrep[b'pushkey']
962 assert len(results) <= 1
966 assert len(results) <= 1
963 msg = None
967 msg = None
964 if not results:
968 if not results:
965 msg = _(b'server ignored update of %s to public!\n') % node
969 msg = _(b'server ignored update of %s to public!\n') % node
966 elif not int(results[0][b'return']):
970 elif not int(results[0][b'return']):
967 msg = _(b'updating %s to public failed!\n') % node
971 msg = _(b'updating %s to public failed!\n') % node
968 if msg is not None:
972 if msg is not None:
969 pushop.ui.warn(msg)
973 pushop.ui.warn(msg)
970
974
971 return handlereply
975 return handlereply
972
976
973
977
974 @b2partsgenerator(b'obsmarkers')
978 @b2partsgenerator(b'obsmarkers')
975 def _pushb2obsmarkers(pushop, bundler):
979 def _pushb2obsmarkers(pushop, bundler):
976 if b'obsmarkers' in pushop.stepsdone:
980 if b'obsmarkers' in pushop.stepsdone:
977 return
981 return
978 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
982 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
979 if obsolete.commonversion(remoteversions) is None:
983 if obsolete.commonversion(remoteversions) is None:
980 return
984 return
981 pushop.stepsdone.add(b'obsmarkers')
985 pushop.stepsdone.add(b'obsmarkers')
982 if pushop.outobsmarkers:
986 if pushop.outobsmarkers:
983 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
987 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
984 bundle2.buildobsmarkerspart(bundler, markers)
988 bundle2.buildobsmarkerspart(bundler, markers)
985
989
986
990
987 @b2partsgenerator(b'bookmarks')
991 @b2partsgenerator(b'bookmarks')
988 def _pushb2bookmarks(pushop, bundler):
992 def _pushb2bookmarks(pushop, bundler):
989 """handle bookmark push through bundle2"""
993 """handle bookmark push through bundle2"""
990 if b'bookmarks' in pushop.stepsdone:
994 if b'bookmarks' in pushop.stepsdone:
991 return
995 return
992 b2caps = bundle2.bundle2caps(pushop.remote)
996 b2caps = bundle2.bundle2caps(pushop.remote)
993
997
994 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
998 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
995 legacybooks = b'bookmarks' in legacy
999 legacybooks = b'bookmarks' in legacy
996
1000
997 if not legacybooks and b'bookmarks' in b2caps:
1001 if not legacybooks and b'bookmarks' in b2caps:
998 return _pushb2bookmarkspart(pushop, bundler)
1002 return _pushb2bookmarkspart(pushop, bundler)
999 elif b'pushkey' in b2caps:
1003 elif b'pushkey' in b2caps:
1000 return _pushb2bookmarkspushkey(pushop, bundler)
1004 return _pushb2bookmarkspushkey(pushop, bundler)
1001
1005
1002
1006
1003 def _bmaction(old, new):
1007 def _bmaction(old, new):
1004 """small utility for bookmark pushing"""
1008 """small utility for bookmark pushing"""
1005 if not old:
1009 if not old:
1006 return b'export'
1010 return b'export'
1007 elif not new:
1011 elif not new:
1008 return b'delete'
1012 return b'delete'
1009 return b'update'
1013 return b'update'
1010
1014
1011
1015
1012 def _abortonsecretctx(pushop, node, b):
1016 def _abortonsecretctx(pushop, node, b):
1013 """abort if a given bookmark points to a secret changeset"""
1017 """abort if a given bookmark points to a secret changeset"""
1014 if node and pushop.repo[node].phase() == phases.secret:
1018 if node and pushop.repo[node].phase() == phases.secret:
1015 raise error.Abort(
1019 raise error.Abort(
1016 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1020 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1017 )
1021 )
1018
1022
1019
1023
1020 def _pushb2bookmarkspart(pushop, bundler):
1024 def _pushb2bookmarkspart(pushop, bundler):
1021 pushop.stepsdone.add(b'bookmarks')
1025 pushop.stepsdone.add(b'bookmarks')
1022 if not pushop.outbookmarks:
1026 if not pushop.outbookmarks:
1023 return
1027 return
1024
1028
1025 allactions = []
1029 allactions = []
1026 data = []
1030 data = []
1027 for book, old, new in pushop.outbookmarks:
1031 for book, old, new in pushop.outbookmarks:
1028 _abortonsecretctx(pushop, new, book)
1032 _abortonsecretctx(pushop, new, book)
1029 data.append((book, new))
1033 data.append((book, new))
1030 allactions.append((book, _bmaction(old, new)))
1034 allactions.append((book, _bmaction(old, new)))
1031 checkdata = bookmod.binaryencode(pushop.repo, data)
1035 checkdata = bookmod.binaryencode(pushop.repo, data)
1032 bundler.newpart(b'bookmarks', data=checkdata)
1036 bundler.newpart(b'bookmarks', data=checkdata)
1033
1037
1034 def handlereply(op):
1038 def handlereply(op):
1035 ui = pushop.ui
1039 ui = pushop.ui
1036 # if success
1040 # if success
1037 for book, action in allactions:
1041 for book, action in allactions:
1038 ui.status(bookmsgmap[action][0] % book)
1042 ui.status(bookmsgmap[action][0] % book)
1039
1043
1040 return handlereply
1044 return handlereply
1041
1045
1042
1046
1043 def _pushb2bookmarkspushkey(pushop, bundler):
1047 def _pushb2bookmarkspushkey(pushop, bundler):
1044 pushop.stepsdone.add(b'bookmarks')
1048 pushop.stepsdone.add(b'bookmarks')
1045 part2book = []
1049 part2book = []
1046 enc = pushkey.encode
1050 enc = pushkey.encode
1047
1051
1048 def handlefailure(pushop, exc):
1052 def handlefailure(pushop, exc):
1049 targetid = int(exc.partid)
1053 targetid = int(exc.partid)
1050 for partid, book, action in part2book:
1054 for partid, book, action in part2book:
1051 if partid == targetid:
1055 if partid == targetid:
1052 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1056 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1053 # we should not be called for part we did not generated
1057 # we should not be called for part we did not generated
1054 assert False
1058 assert False
1055
1059
1056 for book, old, new in pushop.outbookmarks:
1060 for book, old, new in pushop.outbookmarks:
1057 _abortonsecretctx(pushop, new, book)
1061 _abortonsecretctx(pushop, new, book)
1058 part = bundler.newpart(b'pushkey')
1062 part = bundler.newpart(b'pushkey')
1059 part.addparam(b'namespace', enc(b'bookmarks'))
1063 part.addparam(b'namespace', enc(b'bookmarks'))
1060 part.addparam(b'key', enc(book))
1064 part.addparam(b'key', enc(book))
1061 part.addparam(b'old', enc(hex(old)))
1065 part.addparam(b'old', enc(hex(old)))
1062 part.addparam(b'new', enc(hex(new)))
1066 part.addparam(b'new', enc(hex(new)))
1063 action = b'update'
1067 action = b'update'
1064 if not old:
1068 if not old:
1065 action = b'export'
1069 action = b'export'
1066 elif not new:
1070 elif not new:
1067 action = b'delete'
1071 action = b'delete'
1068 part2book.append((part.id, book, action))
1072 part2book.append((part.id, book, action))
1069 pushop.pkfailcb[part.id] = handlefailure
1073 pushop.pkfailcb[part.id] = handlefailure
1070
1074
1071 def handlereply(op):
1075 def handlereply(op):
1072 ui = pushop.ui
1076 ui = pushop.ui
1073 for partid, book, action in part2book:
1077 for partid, book, action in part2book:
1074 partrep = op.records.getreplies(partid)
1078 partrep = op.records.getreplies(partid)
1075 results = partrep[b'pushkey']
1079 results = partrep[b'pushkey']
1076 assert len(results) <= 1
1080 assert len(results) <= 1
1077 if not results:
1081 if not results:
1078 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1082 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1079 else:
1083 else:
1080 ret = int(results[0][b'return'])
1084 ret = int(results[0][b'return'])
1081 if ret:
1085 if ret:
1082 ui.status(bookmsgmap[action][0] % book)
1086 ui.status(bookmsgmap[action][0] % book)
1083 else:
1087 else:
1084 ui.warn(bookmsgmap[action][1] % book)
1088 ui.warn(bookmsgmap[action][1] % book)
1085 if pushop.bkresult is not None:
1089 if pushop.bkresult is not None:
1086 pushop.bkresult = 1
1090 pushop.bkresult = 1
1087
1091
1088 return handlereply
1092 return handlereply
1089
1093
1090
1094
1091 @b2partsgenerator(b'pushvars', idx=0)
1095 @b2partsgenerator(b'pushvars', idx=0)
1092 def _getbundlesendvars(pushop, bundler):
1096 def _getbundlesendvars(pushop, bundler):
1093 '''send shellvars via bundle2'''
1097 '''send shellvars via bundle2'''
1094 pushvars = pushop.pushvars
1098 pushvars = pushop.pushvars
1095 if pushvars:
1099 if pushvars:
1096 shellvars = {}
1100 shellvars = {}
1097 for raw in pushvars:
1101 for raw in pushvars:
1098 if b'=' not in raw:
1102 if b'=' not in raw:
1099 msg = (
1103 msg = (
1100 b"unable to parse variable '%s', should follow "
1104 b"unable to parse variable '%s', should follow "
1101 b"'KEY=VALUE' or 'KEY=' format"
1105 b"'KEY=VALUE' or 'KEY=' format"
1102 )
1106 )
1103 raise error.Abort(msg % raw)
1107 raise error.Abort(msg % raw)
1104 k, v = raw.split(b'=', 1)
1108 k, v = raw.split(b'=', 1)
1105 shellvars[k] = v
1109 shellvars[k] = v
1106
1110
1107 part = bundler.newpart(b'pushvars')
1111 part = bundler.newpart(b'pushvars')
1108
1112
1109 for key, value in pycompat.iteritems(shellvars):
1113 for key, value in pycompat.iteritems(shellvars):
1110 part.addparam(key, value, mandatory=False)
1114 part.addparam(key, value, mandatory=False)
1111
1115
1112
1116
1113 def _pushbundle2(pushop):
1117 def _pushbundle2(pushop):
1114 """push data to the remote using bundle2
1118 """push data to the remote using bundle2
1115
1119
1116 The only currently supported type of data is changegroup but this will
1120 The only currently supported type of data is changegroup but this will
1117 evolve in the future."""
1121 evolve in the future."""
1118 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1122 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1119 pushback = pushop.trmanager and pushop.ui.configbool(
1123 pushback = pushop.trmanager and pushop.ui.configbool(
1120 b'experimental', b'bundle2.pushback'
1124 b'experimental', b'bundle2.pushback'
1121 )
1125 )
1122
1126
1123 # create reply capability
1127 # create reply capability
1124 capsblob = bundle2.encodecaps(
1128 capsblob = bundle2.encodecaps(
1125 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1129 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1126 )
1130 )
1127 bundler.newpart(b'replycaps', data=capsblob)
1131 bundler.newpart(b'replycaps', data=capsblob)
1128 replyhandlers = []
1132 replyhandlers = []
1129 for partgenname in b2partsgenorder:
1133 for partgenname in b2partsgenorder:
1130 partgen = b2partsgenmapping[partgenname]
1134 partgen = b2partsgenmapping[partgenname]
1131 ret = partgen(pushop, bundler)
1135 ret = partgen(pushop, bundler)
1132 if callable(ret):
1136 if callable(ret):
1133 replyhandlers.append(ret)
1137 replyhandlers.append(ret)
1134 # do not push if nothing to push
1138 # do not push if nothing to push
1135 if bundler.nbparts <= 1:
1139 if bundler.nbparts <= 1:
1136 return
1140 return
1137 stream = util.chunkbuffer(bundler.getchunks())
1141 stream = util.chunkbuffer(bundler.getchunks())
1138 try:
1142 try:
1139 try:
1143 try:
1140 with pushop.remote.commandexecutor() as e:
1144 with pushop.remote.commandexecutor() as e:
1141 reply = e.callcommand(
1145 reply = e.callcommand(
1142 b'unbundle',
1146 b'unbundle',
1143 {
1147 {
1144 b'bundle': stream,
1148 b'bundle': stream,
1145 b'heads': [b'force'],
1149 b'heads': [b'force'],
1146 b'url': pushop.remote.url(),
1150 b'url': pushop.remote.url(),
1147 },
1151 },
1148 ).result()
1152 ).result()
1149 except error.BundleValueError as exc:
1153 except error.BundleValueError as exc:
1150 raise error.RemoteError(_(b'missing support for %s') % exc)
1154 raise error.RemoteError(_(b'missing support for %s') % exc)
1151 try:
1155 try:
1152 trgetter = None
1156 trgetter = None
1153 if pushback:
1157 if pushback:
1154 trgetter = pushop.trmanager.transaction
1158 trgetter = pushop.trmanager.transaction
1155 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1159 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1156 except error.BundleValueError as exc:
1160 except error.BundleValueError as exc:
1157 raise error.RemoteError(_(b'missing support for %s') % exc)
1161 raise error.RemoteError(_(b'missing support for %s') % exc)
1158 except bundle2.AbortFromPart as exc:
1162 except bundle2.AbortFromPart as exc:
1159 pushop.ui.error(_(b'remote: %s\n') % exc)
1163 pushop.ui.error(_(b'remote: %s\n') % exc)
1160 if exc.hint is not None:
1164 if exc.hint is not None:
1161 pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1165 pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1162 raise error.RemoteError(_(b'push failed on remote'))
1166 raise error.RemoteError(_(b'push failed on remote'))
1163 except error.PushkeyFailed as exc:
1167 except error.PushkeyFailed as exc:
1164 partid = int(exc.partid)
1168 partid = int(exc.partid)
1165 if partid not in pushop.pkfailcb:
1169 if partid not in pushop.pkfailcb:
1166 raise
1170 raise
1167 pushop.pkfailcb[partid](pushop, exc)
1171 pushop.pkfailcb[partid](pushop, exc)
1168 for rephand in replyhandlers:
1172 for rephand in replyhandlers:
1169 rephand(op)
1173 rephand(op)
1170
1174
1171
1175
1172 def _pushchangeset(pushop):
1176 def _pushchangeset(pushop):
1173 """Make the actual push of changeset bundle to remote repo"""
1177 """Make the actual push of changeset bundle to remote repo"""
1174 if b'changesets' in pushop.stepsdone:
1178 if b'changesets' in pushop.stepsdone:
1175 return
1179 return
1176 pushop.stepsdone.add(b'changesets')
1180 pushop.stepsdone.add(b'changesets')
1177 if not _pushcheckoutgoing(pushop):
1181 if not _pushcheckoutgoing(pushop):
1178 return
1182 return
1179
1183
1180 # Should have verified this in push().
1184 # Should have verified this in push().
1181 assert pushop.remote.capable(b'unbundle')
1185 assert pushop.remote.capable(b'unbundle')
1182
1186
1183 pushop.repo.prepushoutgoinghooks(pushop)
1187 pushop.repo.prepushoutgoinghooks(pushop)
1184 outgoing = pushop.outgoing
1188 outgoing = pushop.outgoing
1185 # TODO: get bundlecaps from remote
1189 # TODO: get bundlecaps from remote
1186 bundlecaps = None
1190 bundlecaps = None
1187 # create a changegroup from local
1191 # create a changegroup from local
1188 if pushop.revs is None and not (
1192 if pushop.revs is None and not (
1189 outgoing.excluded or pushop.repo.changelog.filteredrevs
1193 outgoing.excluded or pushop.repo.changelog.filteredrevs
1190 ):
1194 ):
1191 # push everything,
1195 # push everything,
1192 # use the fast path, no race possible on push
1196 # use the fast path, no race possible on push
1193 cg = changegroup.makechangegroup(
1197 cg = changegroup.makechangegroup(
1194 pushop.repo,
1198 pushop.repo,
1195 outgoing,
1199 outgoing,
1196 b'01',
1200 b'01',
1197 b'push',
1201 b'push',
1198 fastpath=True,
1202 fastpath=True,
1199 bundlecaps=bundlecaps,
1203 bundlecaps=bundlecaps,
1200 )
1204 )
1201 else:
1205 else:
1202 cg = changegroup.makechangegroup(
1206 cg = changegroup.makechangegroup(
1203 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1207 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1204 )
1208 )
1205
1209
1206 # apply changegroup to remote
1210 # apply changegroup to remote
1207 # local repo finds heads on server, finds out what
1211 # local repo finds heads on server, finds out what
1208 # revs it must push. once revs transferred, if server
1212 # revs it must push. once revs transferred, if server
1209 # finds it has different heads (someone else won
1213 # finds it has different heads (someone else won
1210 # commit/push race), server aborts.
1214 # commit/push race), server aborts.
1211 if pushop.force:
1215 if pushop.force:
1212 remoteheads = [b'force']
1216 remoteheads = [b'force']
1213 else:
1217 else:
1214 remoteheads = pushop.remoteheads
1218 remoteheads = pushop.remoteheads
1215 # ssh: return remote's addchangegroup()
1219 # ssh: return remote's addchangegroup()
1216 # http: return remote's addchangegroup() or 0 for error
1220 # http: return remote's addchangegroup() or 0 for error
1217 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1221 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1218
1222
1219
1223
1220 def _pushsyncphase(pushop):
1224 def _pushsyncphase(pushop):
1221 """synchronise phase information locally and remotely"""
1225 """synchronise phase information locally and remotely"""
1222 cheads = pushop.commonheads
1226 cheads = pushop.commonheads
1223 # even when we don't push, exchanging phase data is useful
1227 # even when we don't push, exchanging phase data is useful
1224 remotephases = listkeys(pushop.remote, b'phases')
1228 remotephases = listkeys(pushop.remote, b'phases')
1225 if (
1229 if (
1226 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1230 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1227 and remotephases # server supports phases
1231 and remotephases # server supports phases
1228 and pushop.cgresult is None # nothing was pushed
1232 and pushop.cgresult is None # nothing was pushed
1229 and remotephases.get(b'publishing', False)
1233 and remotephases.get(b'publishing', False)
1230 ):
1234 ):
1231 # When:
1235 # When:
1232 # - this is a subrepo push
1236 # - this is a subrepo push
1233 # - and remote support phase
1237 # - and remote support phase
1234 # - and no changeset was pushed
1238 # - and no changeset was pushed
1235 # - and remote is publishing
1239 # - and remote is publishing
1236 # We may be in issue 3871 case!
1240 # We may be in issue 3871 case!
1237 # We drop the possible phase synchronisation done by
1241 # We drop the possible phase synchronisation done by
1238 # courtesy to publish changesets possibly locally draft
1242 # courtesy to publish changesets possibly locally draft
1239 # on the remote.
1243 # on the remote.
1240 remotephases = {b'publishing': b'True'}
1244 remotephases = {b'publishing': b'True'}
1241 if not remotephases: # old server or public only reply from non-publishing
1245 if not remotephases: # old server or public only reply from non-publishing
1242 _localphasemove(pushop, cheads)
1246 _localphasemove(pushop, cheads)
1243 # don't push any phase data as there is nothing to push
1247 # don't push any phase data as there is nothing to push
1244 else:
1248 else:
1245 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1249 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1246 pheads, droots = ana
1250 pheads, droots = ana
1247 ### Apply remote phase on local
1251 ### Apply remote phase on local
1248 if remotephases.get(b'publishing', False):
1252 if remotephases.get(b'publishing', False):
1249 _localphasemove(pushop, cheads)
1253 _localphasemove(pushop, cheads)
1250 else: # publish = False
1254 else: # publish = False
1251 _localphasemove(pushop, pheads)
1255 _localphasemove(pushop, pheads)
1252 _localphasemove(pushop, cheads, phases.draft)
1256 _localphasemove(pushop, cheads, phases.draft)
1253 ### Apply local phase on remote
1257 ### Apply local phase on remote
1254
1258
1255 if pushop.cgresult:
1259 if pushop.cgresult:
1256 if b'phases' in pushop.stepsdone:
1260 if b'phases' in pushop.stepsdone:
1257 # phases already pushed though bundle2
1261 # phases already pushed though bundle2
1258 return
1262 return
1259 outdated = pushop.outdatedphases
1263 outdated = pushop.outdatedphases
1260 else:
1264 else:
1261 outdated = pushop.fallbackoutdatedphases
1265 outdated = pushop.fallbackoutdatedphases
1262
1266
1263 pushop.stepsdone.add(b'phases')
1267 pushop.stepsdone.add(b'phases')
1264
1268
1265 # filter heads already turned public by the push
1269 # filter heads already turned public by the push
1266 outdated = [c for c in outdated if c.node() not in pheads]
1270 outdated = [c for c in outdated if c.node() not in pheads]
1267 # fallback to independent pushkey command
1271 # fallback to independent pushkey command
1268 for newremotehead in outdated:
1272 for newremotehead in outdated:
1269 with pushop.remote.commandexecutor() as e:
1273 with pushop.remote.commandexecutor() as e:
1270 r = e.callcommand(
1274 r = e.callcommand(
1271 b'pushkey',
1275 b'pushkey',
1272 {
1276 {
1273 b'namespace': b'phases',
1277 b'namespace': b'phases',
1274 b'key': newremotehead.hex(),
1278 b'key': newremotehead.hex(),
1275 b'old': b'%d' % phases.draft,
1279 b'old': b'%d' % phases.draft,
1276 b'new': b'%d' % phases.public,
1280 b'new': b'%d' % phases.public,
1277 },
1281 },
1278 ).result()
1282 ).result()
1279
1283
1280 if not r:
1284 if not r:
1281 pushop.ui.warn(
1285 pushop.ui.warn(
1282 _(b'updating %s to public failed!\n') % newremotehead
1286 _(b'updating %s to public failed!\n') % newremotehead
1283 )
1287 )
1284
1288
1285
1289
1286 def _localphasemove(pushop, nodes, phase=phases.public):
1290 def _localphasemove(pushop, nodes, phase=phases.public):
1287 """move <nodes> to <phase> in the local source repo"""
1291 """move <nodes> to <phase> in the local source repo"""
1288 if pushop.trmanager:
1292 if pushop.trmanager:
1289 phases.advanceboundary(
1293 phases.advanceboundary(
1290 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1294 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1291 )
1295 )
1292 else:
1296 else:
1293 # repo is not locked, do not change any phases!
1297 # repo is not locked, do not change any phases!
1294 # Informs the user that phases should have been moved when
1298 # Informs the user that phases should have been moved when
1295 # applicable.
1299 # applicable.
1296 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1300 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1297 phasestr = phases.phasenames[phase]
1301 phasestr = phases.phasenames[phase]
1298 if actualmoves:
1302 if actualmoves:
1299 pushop.ui.status(
1303 pushop.ui.status(
1300 _(
1304 _(
1301 b'cannot lock source repo, skipping '
1305 b'cannot lock source repo, skipping '
1302 b'local %s phase update\n'
1306 b'local %s phase update\n'
1303 )
1307 )
1304 % phasestr
1308 % phasestr
1305 )
1309 )
1306
1310
1307
1311
1308 def _pushobsolete(pushop):
1312 def _pushobsolete(pushop):
1309 """utility function to push obsolete markers to a remote"""
1313 """utility function to push obsolete markers to a remote"""
1310 if b'obsmarkers' in pushop.stepsdone:
1314 if b'obsmarkers' in pushop.stepsdone:
1311 return
1315 return
1312 repo = pushop.repo
1316 repo = pushop.repo
1313 remote = pushop.remote
1317 remote = pushop.remote
1314 pushop.stepsdone.add(b'obsmarkers')
1318 pushop.stepsdone.add(b'obsmarkers')
1315 if pushop.outobsmarkers:
1319 if pushop.outobsmarkers:
1316 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1320 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1317 rslts = []
1321 rslts = []
1318 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1322 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1319 remotedata = obsolete._pushkeyescape(markers)
1323 remotedata = obsolete._pushkeyescape(markers)
1320 for key in sorted(remotedata, reverse=True):
1324 for key in sorted(remotedata, reverse=True):
1321 # reverse sort to ensure we end with dump0
1325 # reverse sort to ensure we end with dump0
1322 data = remotedata[key]
1326 data = remotedata[key]
1323 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1327 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1324 if [r for r in rslts if not r]:
1328 if [r for r in rslts if not r]:
1325 msg = _(b'failed to push some obsolete markers!\n')
1329 msg = _(b'failed to push some obsolete markers!\n')
1326 repo.ui.warn(msg)
1330 repo.ui.warn(msg)
1327
1331
1328
1332
1329 def _pushbookmark(pushop):
1333 def _pushbookmark(pushop):
1330 """Update bookmark position on remote"""
1334 """Update bookmark position on remote"""
1331 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1335 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1332 return
1336 return
1333 pushop.stepsdone.add(b'bookmarks')
1337 pushop.stepsdone.add(b'bookmarks')
1334 ui = pushop.ui
1338 ui = pushop.ui
1335 remote = pushop.remote
1339 remote = pushop.remote
1336
1340
1337 for b, old, new in pushop.outbookmarks:
1341 for b, old, new in pushop.outbookmarks:
1338 action = b'update'
1342 action = b'update'
1339 if not old:
1343 if not old:
1340 action = b'export'
1344 action = b'export'
1341 elif not new:
1345 elif not new:
1342 action = b'delete'
1346 action = b'delete'
1343
1347
1344 with remote.commandexecutor() as e:
1348 with remote.commandexecutor() as e:
1345 r = e.callcommand(
1349 r = e.callcommand(
1346 b'pushkey',
1350 b'pushkey',
1347 {
1351 {
1348 b'namespace': b'bookmarks',
1352 b'namespace': b'bookmarks',
1349 b'key': b,
1353 b'key': b,
1350 b'old': hex(old),
1354 b'old': hex(old),
1351 b'new': hex(new),
1355 b'new': hex(new),
1352 },
1356 },
1353 ).result()
1357 ).result()
1354
1358
1355 if r:
1359 if r:
1356 ui.status(bookmsgmap[action][0] % b)
1360 ui.status(bookmsgmap[action][0] % b)
1357 else:
1361 else:
1358 ui.warn(bookmsgmap[action][1] % b)
1362 ui.warn(bookmsgmap[action][1] % b)
1359 # discovery can have set the value form invalid entry
1363 # discovery can have set the value form invalid entry
1360 if pushop.bkresult is not None:
1364 if pushop.bkresult is not None:
1361 pushop.bkresult = 1
1365 pushop.bkresult = 1
1362
1366
1363
1367
1364 class pulloperation(object):
1368 class pulloperation(object):
1365 """A object that represent a single pull operation
1369 """A object that represent a single pull operation
1366
1370
1367 It purpose is to carry pull related state and very common operation.
1371 It purpose is to carry pull related state and very common operation.
1368
1372
1369 A new should be created at the beginning of each pull and discarded
1373 A new should be created at the beginning of each pull and discarded
1370 afterward.
1374 afterward.
1371 """
1375 """
1372
1376
1373 def __init__(
1377 def __init__(
1374 self,
1378 self,
1375 repo,
1379 repo,
1376 remote,
1380 remote,
1377 heads=None,
1381 heads=None,
1378 force=False,
1382 force=False,
1379 bookmarks=(),
1383 bookmarks=(),
1380 remotebookmarks=None,
1384 remotebookmarks=None,
1381 streamclonerequested=None,
1385 streamclonerequested=None,
1382 includepats=None,
1386 includepats=None,
1383 excludepats=None,
1387 excludepats=None,
1384 depth=None,
1388 depth=None,
1385 ):
1389 ):
1386 # repo we pull into
1390 # repo we pull into
1387 self.repo = repo
1391 self.repo = repo
1388 # repo we pull from
1392 # repo we pull from
1389 self.remote = remote
1393 self.remote = remote
1390 # revision we try to pull (None is "all")
1394 # revision we try to pull (None is "all")
1391 self.heads = heads
1395 self.heads = heads
1392 # bookmark pulled explicitly
1396 # bookmark pulled explicitly
1393 self.explicitbookmarks = [
1397 self.explicitbookmarks = [
1394 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1398 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1395 ]
1399 ]
1396 # do we force pull?
1400 # do we force pull?
1397 self.force = force
1401 self.force = force
1398 # whether a streaming clone was requested
1402 # whether a streaming clone was requested
1399 self.streamclonerequested = streamclonerequested
1403 self.streamclonerequested = streamclonerequested
1400 # transaction manager
1404 # transaction manager
1401 self.trmanager = None
1405 self.trmanager = None
1402 # set of common changeset between local and remote before pull
1406 # set of common changeset between local and remote before pull
1403 self.common = None
1407 self.common = None
1404 # set of pulled head
1408 # set of pulled head
1405 self.rheads = None
1409 self.rheads = None
1406 # list of missing changeset to fetch remotely
1410 # list of missing changeset to fetch remotely
1407 self.fetch = None
1411 self.fetch = None
1408 # remote bookmarks data
1412 # remote bookmarks data
1409 self.remotebookmarks = remotebookmarks
1413 self.remotebookmarks = remotebookmarks
1410 # result of changegroup pulling (used as return code by pull)
1414 # result of changegroup pulling (used as return code by pull)
1411 self.cgresult = None
1415 self.cgresult = None
1412 # list of step already done
1416 # list of step already done
1413 self.stepsdone = set()
1417 self.stepsdone = set()
1414 # Whether we attempted a clone from pre-generated bundles.
1418 # Whether we attempted a clone from pre-generated bundles.
1415 self.clonebundleattempted = False
1419 self.clonebundleattempted = False
1416 # Set of file patterns to include.
1420 # Set of file patterns to include.
1417 self.includepats = includepats
1421 self.includepats = includepats
1418 # Set of file patterns to exclude.
1422 # Set of file patterns to exclude.
1419 self.excludepats = excludepats
1423 self.excludepats = excludepats
1420 # Number of ancestor changesets to pull from each pulled head.
1424 # Number of ancestor changesets to pull from each pulled head.
1421 self.depth = depth
1425 self.depth = depth
1422
1426
1423 @util.propertycache
1427 @util.propertycache
1424 def pulledsubset(self):
1428 def pulledsubset(self):
1425 """heads of the set of changeset target by the pull"""
1429 """heads of the set of changeset target by the pull"""
1426 # compute target subset
1430 # compute target subset
1427 if self.heads is None:
1431 if self.heads is None:
1428 # We pulled every thing possible
1432 # We pulled every thing possible
1429 # sync on everything common
1433 # sync on everything common
1430 c = set(self.common)
1434 c = set(self.common)
1431 ret = list(self.common)
1435 ret = list(self.common)
1432 for n in self.rheads:
1436 for n in self.rheads:
1433 if n not in c:
1437 if n not in c:
1434 ret.append(n)
1438 ret.append(n)
1435 return ret
1439 return ret
1436 else:
1440 else:
1437 # We pulled a specific subset
1441 # We pulled a specific subset
1438 # sync on this subset
1442 # sync on this subset
1439 return self.heads
1443 return self.heads
1440
1444
1441 @util.propertycache
1445 @util.propertycache
1442 def canusebundle2(self):
1446 def canusebundle2(self):
1443 return not _forcebundle1(self)
1447 return not _forcebundle1(self)
1444
1448
1445 @util.propertycache
1449 @util.propertycache
1446 def remotebundle2caps(self):
1450 def remotebundle2caps(self):
1447 return bundle2.bundle2caps(self.remote)
1451 return bundle2.bundle2caps(self.remote)
1448
1452
1449 def gettransaction(self):
1453 def gettransaction(self):
1450 # deprecated; talk to trmanager directly
1454 # deprecated; talk to trmanager directly
1451 return self.trmanager.transaction()
1455 return self.trmanager.transaction()
1452
1456
1453
1457
1454 class transactionmanager(util.transactional):
1458 class transactionmanager(util.transactional):
1455 """An object to manage the life cycle of a transaction
1459 """An object to manage the life cycle of a transaction
1456
1460
1457 It creates the transaction on demand and calls the appropriate hooks when
1461 It creates the transaction on demand and calls the appropriate hooks when
1458 closing the transaction."""
1462 closing the transaction."""
1459
1463
1460 def __init__(self, repo, source, url):
1464 def __init__(self, repo, source, url):
1461 self.repo = repo
1465 self.repo = repo
1462 self.source = source
1466 self.source = source
1463 self.url = url
1467 self.url = url
1464 self._tr = None
1468 self._tr = None
1465
1469
1466 def transaction(self):
1470 def transaction(self):
1467 """Return an open transaction object, constructing if necessary"""
1471 """Return an open transaction object, constructing if necessary"""
1468 if not self._tr:
1472 if not self._tr:
1469 trname = b'%s\n%s' % (self.source, urlutil.hidepassword(self.url))
1473 trname = b'%s\n%s' % (self.source, urlutil.hidepassword(self.url))
1470 self._tr = self.repo.transaction(trname)
1474 self._tr = self.repo.transaction(trname)
1471 self._tr.hookargs[b'source'] = self.source
1475 self._tr.hookargs[b'source'] = self.source
1472 self._tr.hookargs[b'url'] = self.url
1476 self._tr.hookargs[b'url'] = self.url
1473 return self._tr
1477 return self._tr
1474
1478
1475 def close(self):
1479 def close(self):
1476 """close transaction if created"""
1480 """close transaction if created"""
1477 if self._tr is not None:
1481 if self._tr is not None:
1478 self._tr.close()
1482 self._tr.close()
1479
1483
1480 def release(self):
1484 def release(self):
1481 """release transaction if created"""
1485 """release transaction if created"""
1482 if self._tr is not None:
1486 if self._tr is not None:
1483 self._tr.release()
1487 self._tr.release()
1484
1488
1485
1489
1486 def listkeys(remote, namespace):
1490 def listkeys(remote, namespace):
1487 with remote.commandexecutor() as e:
1491 with remote.commandexecutor() as e:
1488 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1492 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1489
1493
1490
1494
1491 def _fullpullbundle2(repo, pullop):
1495 def _fullpullbundle2(repo, pullop):
1492 # The server may send a partial reply, i.e. when inlining
1496 # The server may send a partial reply, i.e. when inlining
1493 # pre-computed bundles. In that case, update the common
1497 # pre-computed bundles. In that case, update the common
1494 # set based on the results and pull another bundle.
1498 # set based on the results and pull another bundle.
1495 #
1499 #
1496 # There are two indicators that the process is finished:
1500 # There are two indicators that the process is finished:
1497 # - no changeset has been added, or
1501 # - no changeset has been added, or
1498 # - all remote heads are known locally.
1502 # - all remote heads are known locally.
1499 # The head check must use the unfiltered view as obsoletion
1503 # The head check must use the unfiltered view as obsoletion
1500 # markers can hide heads.
1504 # markers can hide heads.
1501 unfi = repo.unfiltered()
1505 unfi = repo.unfiltered()
1502 unficl = unfi.changelog
1506 unficl = unfi.changelog
1503
1507
1504 def headsofdiff(h1, h2):
1508 def headsofdiff(h1, h2):
1505 """Returns heads(h1 % h2)"""
1509 """Returns heads(h1 % h2)"""
1506 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1510 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1507 return {ctx.node() for ctx in res}
1511 return {ctx.node() for ctx in res}
1508
1512
1509 def headsofunion(h1, h2):
1513 def headsofunion(h1, h2):
1510 """Returns heads((h1 + h2) - null)"""
1514 """Returns heads((h1 + h2) - null)"""
1511 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1515 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1512 return {ctx.node() for ctx in res}
1516 return {ctx.node() for ctx in res}
1513
1517
1514 while True:
1518 while True:
1515 old_heads = unficl.heads()
1519 old_heads = unficl.heads()
1516 clstart = len(unficl)
1520 clstart = len(unficl)
1517 _pullbundle2(pullop)
1521 _pullbundle2(pullop)
1518 if requirements.NARROW_REQUIREMENT in repo.requirements:
1522 if requirements.NARROW_REQUIREMENT in repo.requirements:
1519 # XXX narrow clones filter the heads on the server side during
1523 # XXX narrow clones filter the heads on the server side during
1520 # XXX getbundle and result in partial replies as well.
1524 # XXX getbundle and result in partial replies as well.
1521 # XXX Disable pull bundles in this case as band aid to avoid
1525 # XXX Disable pull bundles in this case as band aid to avoid
1522 # XXX extra round trips.
1526 # XXX extra round trips.
1523 break
1527 break
1524 if clstart == len(unficl):
1528 if clstart == len(unficl):
1525 break
1529 break
1526 if all(unficl.hasnode(n) for n in pullop.rheads):
1530 if all(unficl.hasnode(n) for n in pullop.rheads):
1527 break
1531 break
1528 new_heads = headsofdiff(unficl.heads(), old_heads)
1532 new_heads = headsofdiff(unficl.heads(), old_heads)
1529 pullop.common = headsofunion(new_heads, pullop.common)
1533 pullop.common = headsofunion(new_heads, pullop.common)
1530 pullop.rheads = set(pullop.rheads) - pullop.common
1534 pullop.rheads = set(pullop.rheads) - pullop.common
1531
1535
1532
1536
1533 def add_confirm_callback(repo, pullop):
1537 def add_confirm_callback(repo, pullop):
1534 """adds a finalize callback to transaction which can be used to show stats
1538 """adds a finalize callback to transaction which can be used to show stats
1535 to user and confirm the pull before committing transaction"""
1539 to user and confirm the pull before committing transaction"""
1536
1540
1537 tr = pullop.trmanager.transaction()
1541 tr = pullop.trmanager.transaction()
1538 scmutil.registersummarycallback(
1542 scmutil.registersummarycallback(
1539 repo, tr, txnname=b'pull', as_validator=True
1543 repo, tr, txnname=b'pull', as_validator=True
1540 )
1544 )
1541 reporef = weakref.ref(repo.unfiltered())
1545 reporef = weakref.ref(repo.unfiltered())
1542
1546
1543 def prompt(tr):
1547 def prompt(tr):
1544 repo = reporef()
1548 repo = reporef()
1545 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1549 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1546 if repo.ui.promptchoice(cm):
1550 if repo.ui.promptchoice(cm):
1547 raise error.Abort(b"user aborted")
1551 raise error.Abort(b"user aborted")
1548
1552
1549 tr.addvalidator(b'900-pull-prompt', prompt)
1553 tr.addvalidator(b'900-pull-prompt', prompt)
1550
1554
1551
1555
1552 def pull(
1556 def pull(
1553 repo,
1557 repo,
1554 remote,
1558 remote,
1555 heads=None,
1559 heads=None,
1556 force=False,
1560 force=False,
1557 bookmarks=(),
1561 bookmarks=(),
1558 opargs=None,
1562 opargs=None,
1559 streamclonerequested=None,
1563 streamclonerequested=None,
1560 includepats=None,
1564 includepats=None,
1561 excludepats=None,
1565 excludepats=None,
1562 depth=None,
1566 depth=None,
1563 confirm=None,
1567 confirm=None,
1564 ):
1568 ):
1565 """Fetch repository data from a remote.
1569 """Fetch repository data from a remote.
1566
1570
1567 This is the main function used to retrieve data from a remote repository.
1571 This is the main function used to retrieve data from a remote repository.
1568
1572
1569 ``repo`` is the local repository to clone into.
1573 ``repo`` is the local repository to clone into.
1570 ``remote`` is a peer instance.
1574 ``remote`` is a peer instance.
1571 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1575 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1572 default) means to pull everything from the remote.
1576 default) means to pull everything from the remote.
1573 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1577 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1574 default, all remote bookmarks are pulled.
1578 default, all remote bookmarks are pulled.
1575 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1579 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1576 initialization.
1580 initialization.
1577 ``streamclonerequested`` is a boolean indicating whether a "streaming
1581 ``streamclonerequested`` is a boolean indicating whether a "streaming
1578 clone" is requested. A "streaming clone" is essentially a raw file copy
1582 clone" is requested. A "streaming clone" is essentially a raw file copy
1579 of revlogs from the server. This only works when the local repository is
1583 of revlogs from the server. This only works when the local repository is
1580 empty. The default value of ``None`` means to respect the server
1584 empty. The default value of ``None`` means to respect the server
1581 configuration for preferring stream clones.
1585 configuration for preferring stream clones.
1582 ``includepats`` and ``excludepats`` define explicit file patterns to
1586 ``includepats`` and ``excludepats`` define explicit file patterns to
1583 include and exclude in storage, respectively. If not defined, narrow
1587 include and exclude in storage, respectively. If not defined, narrow
1584 patterns from the repo instance are used, if available.
1588 patterns from the repo instance are used, if available.
1585 ``depth`` is an integer indicating the DAG depth of history we're
1589 ``depth`` is an integer indicating the DAG depth of history we're
1586 interested in. If defined, for each revision specified in ``heads``, we
1590 interested in. If defined, for each revision specified in ``heads``, we
1587 will fetch up to this many of its ancestors and data associated with them.
1591 will fetch up to this many of its ancestors and data associated with them.
1588 ``confirm`` is a boolean indicating whether the pull should be confirmed
1592 ``confirm`` is a boolean indicating whether the pull should be confirmed
1589 before committing the transaction. This overrides HGPLAIN.
1593 before committing the transaction. This overrides HGPLAIN.
1590
1594
1591 Returns the ``pulloperation`` created for this pull.
1595 Returns the ``pulloperation`` created for this pull.
1592 """
1596 """
1593 if opargs is None:
1597 if opargs is None:
1594 opargs = {}
1598 opargs = {}
1595
1599
1596 # We allow the narrow patterns to be passed in explicitly to provide more
1600 # We allow the narrow patterns to be passed in explicitly to provide more
1597 # flexibility for API consumers.
1601 # flexibility for API consumers.
1598 if includepats or excludepats:
1602 if includepats or excludepats:
1599 includepats = includepats or set()
1603 includepats = includepats or set()
1600 excludepats = excludepats or set()
1604 excludepats = excludepats or set()
1601 else:
1605 else:
1602 includepats, excludepats = repo.narrowpats
1606 includepats, excludepats = repo.narrowpats
1603
1607
1604 narrowspec.validatepatterns(includepats)
1608 narrowspec.validatepatterns(includepats)
1605 narrowspec.validatepatterns(excludepats)
1609 narrowspec.validatepatterns(excludepats)
1606
1610
1607 pullop = pulloperation(
1611 pullop = pulloperation(
1608 repo,
1612 repo,
1609 remote,
1613 remote,
1610 heads,
1614 heads,
1611 force,
1615 force,
1612 bookmarks=bookmarks,
1616 bookmarks=bookmarks,
1613 streamclonerequested=streamclonerequested,
1617 streamclonerequested=streamclonerequested,
1614 includepats=includepats,
1618 includepats=includepats,
1615 excludepats=excludepats,
1619 excludepats=excludepats,
1616 depth=depth,
1620 depth=depth,
1617 **pycompat.strkwargs(opargs)
1621 **pycompat.strkwargs(opargs)
1618 )
1622 )
1619
1623
1620 peerlocal = pullop.remote.local()
1624 peerlocal = pullop.remote.local()
1621 if peerlocal:
1625 if peerlocal:
1622 missing = set(peerlocal.requirements) - pullop.repo.supported
1626 missing = set(peerlocal.requirements) - pullop.repo.supported
1623 if missing:
1627 if missing:
1624 msg = _(
1628 msg = _(
1625 b"required features are not"
1629 b"required features are not"
1626 b" supported in the destination:"
1630 b" supported in the destination:"
1627 b" %s"
1631 b" %s"
1628 ) % (b', '.join(sorted(missing)))
1632 ) % (b', '.join(sorted(missing)))
1629 raise error.Abort(msg)
1633 raise error.Abort(msg)
1630
1634
1631 for category in repo._wanted_sidedata:
1635 for category in repo._wanted_sidedata:
1632 # Check that a computer is registered for that category for at least
1636 # Check that a computer is registered for that category for at least
1633 # one revlog kind.
1637 # one revlog kind.
1634 for kind, computers in repo._sidedata_computers.items():
1638 for kind, computers in repo._sidedata_computers.items():
1635 if computers.get(category):
1639 if computers.get(category):
1636 break
1640 break
1637 else:
1641 else:
1638 # This should never happen since repos are supposed to be able to
1642 # This should never happen since repos are supposed to be able to
1639 # generate the sidedata they require.
1643 # generate the sidedata they require.
1640 raise error.ProgrammingError(
1644 raise error.ProgrammingError(
1641 _(
1645 _(
1642 b'sidedata category requested by local side without local'
1646 b'sidedata category requested by local side without local'
1643 b"support: '%s'"
1647 b"support: '%s'"
1644 )
1648 )
1645 % pycompat.bytestr(category)
1649 % pycompat.bytestr(category)
1646 )
1650 )
1647
1651
1648 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1652 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1649 wlock = util.nullcontextmanager()
1653 wlock = util.nullcontextmanager()
1650 if not bookmod.bookmarksinstore(repo):
1654 if not bookmod.bookmarksinstore(repo):
1651 wlock = repo.wlock()
1655 wlock = repo.wlock()
1652 with wlock, repo.lock(), pullop.trmanager:
1656 with wlock, repo.lock(), pullop.trmanager:
1653 if confirm or (
1657 if confirm or (
1654 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1658 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1655 ):
1659 ):
1656 add_confirm_callback(repo, pullop)
1660 add_confirm_callback(repo, pullop)
1657
1661
1658 # Use the modern wire protocol, if available.
1662 # Use the modern wire protocol, if available.
1659 if remote.capable(b'command-changesetdata'):
1663 if remote.capable(b'command-changesetdata'):
1660 exchangev2.pull(pullop)
1664 exchangev2.pull(pullop)
1661 else:
1665 else:
1662 # This should ideally be in _pullbundle2(). However, it needs to run
1666 # This should ideally be in _pullbundle2(). However, it needs to run
1663 # before discovery to avoid extra work.
1667 # before discovery to avoid extra work.
1664 _maybeapplyclonebundle(pullop)
1668 _maybeapplyclonebundle(pullop)
1665 streamclone.maybeperformlegacystreamclone(pullop)
1669 streamclone.maybeperformlegacystreamclone(pullop)
1666 _pulldiscovery(pullop)
1670 _pulldiscovery(pullop)
1667 if pullop.canusebundle2:
1671 if pullop.canusebundle2:
1668 _fullpullbundle2(repo, pullop)
1672 _fullpullbundle2(repo, pullop)
1669 _pullchangeset(pullop)
1673 _pullchangeset(pullop)
1670 _pullphase(pullop)
1674 _pullphase(pullop)
1671 _pullbookmarks(pullop)
1675 _pullbookmarks(pullop)
1672 _pullobsolete(pullop)
1676 _pullobsolete(pullop)
1673
1677
1674 # storing remotenames
1678 # storing remotenames
1675 if repo.ui.configbool(b'experimental', b'remotenames'):
1679 if repo.ui.configbool(b'experimental', b'remotenames'):
1676 logexchange.pullremotenames(repo, remote)
1680 logexchange.pullremotenames(repo, remote)
1677
1681
1678 return pullop
1682 return pullop
1679
1683
1680
1684
1681 # list of steps to perform discovery before pull
1685 # list of steps to perform discovery before pull
1682 pulldiscoveryorder = []
1686 pulldiscoveryorder = []
1683
1687
1684 # Mapping between step name and function
1688 # Mapping between step name and function
1685 #
1689 #
1686 # This exists to help extensions wrap steps if necessary
1690 # This exists to help extensions wrap steps if necessary
1687 pulldiscoverymapping = {}
1691 pulldiscoverymapping = {}
1688
1692
1689
1693
1690 def pulldiscovery(stepname):
1694 def pulldiscovery(stepname):
1691 """decorator for function performing discovery before pull
1695 """decorator for function performing discovery before pull
1692
1696
1693 The function is added to the step -> function mapping and appended to the
1697 The function is added to the step -> function mapping and appended to the
1694 list of steps. Beware that decorated function will be added in order (this
1698 list of steps. Beware that decorated function will be added in order (this
1695 may matter).
1699 may matter).
1696
1700
1697 You can only use this decorator for a new step, if you want to wrap a step
1701 You can only use this decorator for a new step, if you want to wrap a step
1698 from an extension, change the pulldiscovery dictionary directly."""
1702 from an extension, change the pulldiscovery dictionary directly."""
1699
1703
1700 def dec(func):
1704 def dec(func):
1701 assert stepname not in pulldiscoverymapping
1705 assert stepname not in pulldiscoverymapping
1702 pulldiscoverymapping[stepname] = func
1706 pulldiscoverymapping[stepname] = func
1703 pulldiscoveryorder.append(stepname)
1707 pulldiscoveryorder.append(stepname)
1704 return func
1708 return func
1705
1709
1706 return dec
1710 return dec
1707
1711
1708
1712
1709 def _pulldiscovery(pullop):
1713 def _pulldiscovery(pullop):
1710 """Run all discovery steps"""
1714 """Run all discovery steps"""
1711 for stepname in pulldiscoveryorder:
1715 for stepname in pulldiscoveryorder:
1712 step = pulldiscoverymapping[stepname]
1716 step = pulldiscoverymapping[stepname]
1713 step(pullop)
1717 step(pullop)
1714
1718
1715
1719
1716 @pulldiscovery(b'b1:bookmarks')
1720 @pulldiscovery(b'b1:bookmarks')
1717 def _pullbookmarkbundle1(pullop):
1721 def _pullbookmarkbundle1(pullop):
1718 """fetch bookmark data in bundle1 case
1722 """fetch bookmark data in bundle1 case
1719
1723
1720 If not using bundle2, we have to fetch bookmarks before changeset
1724 If not using bundle2, we have to fetch bookmarks before changeset
1721 discovery to reduce the chance and impact of race conditions."""
1725 discovery to reduce the chance and impact of race conditions."""
1722 if pullop.remotebookmarks is not None:
1726 if pullop.remotebookmarks is not None:
1723 return
1727 return
1724 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1728 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1725 # all known bundle2 servers now support listkeys, but lets be nice with
1729 # all known bundle2 servers now support listkeys, but lets be nice with
1726 # new implementation.
1730 # new implementation.
1727 return
1731 return
1728 books = listkeys(pullop.remote, b'bookmarks')
1732 books = listkeys(pullop.remote, b'bookmarks')
1729 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1733 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1730
1734
1731
1735
1732 @pulldiscovery(b'changegroup')
1736 @pulldiscovery(b'changegroup')
1733 def _pulldiscoverychangegroup(pullop):
1737 def _pulldiscoverychangegroup(pullop):
1734 """discovery phase for the pull
1738 """discovery phase for the pull
1735
1739
1736 Current handle changeset discovery only, will change handle all discovery
1740 Current handle changeset discovery only, will change handle all discovery
1737 at some point."""
1741 at some point."""
1738 tmp = discovery.findcommonincoming(
1742 tmp = discovery.findcommonincoming(
1739 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1743 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1740 )
1744 )
1741 common, fetch, rheads = tmp
1745 common, fetch, rheads = tmp
1742 has_node = pullop.repo.unfiltered().changelog.index.has_node
1746 has_node = pullop.repo.unfiltered().changelog.index.has_node
1743 if fetch and rheads:
1747 if fetch and rheads:
1744 # If a remote heads is filtered locally, put in back in common.
1748 # If a remote heads is filtered locally, put in back in common.
1745 #
1749 #
1746 # This is a hackish solution to catch most of "common but locally
1750 # This is a hackish solution to catch most of "common but locally
1747 # hidden situation". We do not performs discovery on unfiltered
1751 # hidden situation". We do not performs discovery on unfiltered
1748 # repository because it end up doing a pathological amount of round
1752 # repository because it end up doing a pathological amount of round
1749 # trip for w huge amount of changeset we do not care about.
1753 # trip for w huge amount of changeset we do not care about.
1750 #
1754 #
1751 # If a set of such "common but filtered" changeset exist on the server
1755 # If a set of such "common but filtered" changeset exist on the server
1752 # but are not including a remote heads, we'll not be able to detect it,
1756 # but are not including a remote heads, we'll not be able to detect it,
1753 scommon = set(common)
1757 scommon = set(common)
1754 for n in rheads:
1758 for n in rheads:
1755 if has_node(n):
1759 if has_node(n):
1756 if n not in scommon:
1760 if n not in scommon:
1757 common.append(n)
1761 common.append(n)
1758 if set(rheads).issubset(set(common)):
1762 if set(rheads).issubset(set(common)):
1759 fetch = []
1763 fetch = []
1760 pullop.common = common
1764 pullop.common = common
1761 pullop.fetch = fetch
1765 pullop.fetch = fetch
1762 pullop.rheads = rheads
1766 pullop.rheads = rheads
1763
1767
1764
1768
1765 def _pullbundle2(pullop):
1769 def _pullbundle2(pullop):
1766 """pull data using bundle2
1770 """pull data using bundle2
1767
1771
1768 For now, the only supported data are changegroup."""
1772 For now, the only supported data are changegroup."""
1769 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1773 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1770
1774
1771 # make ui easier to access
1775 # make ui easier to access
1772 ui = pullop.repo.ui
1776 ui = pullop.repo.ui
1773
1777
1774 # At the moment we don't do stream clones over bundle2. If that is
1778 # At the moment we don't do stream clones over bundle2. If that is
1775 # implemented then here's where the check for that will go.
1779 # implemented then here's where the check for that will go.
1776 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1780 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1777
1781
1778 # declare pull perimeters
1782 # declare pull perimeters
1779 kwargs[b'common'] = pullop.common
1783 kwargs[b'common'] = pullop.common
1780 kwargs[b'heads'] = pullop.heads or pullop.rheads
1784 kwargs[b'heads'] = pullop.heads or pullop.rheads
1781
1785
1782 # check server supports narrow and then adding includepats and excludepats
1786 # check server supports narrow and then adding includepats and excludepats
1783 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1787 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1784 if servernarrow and pullop.includepats:
1788 if servernarrow and pullop.includepats:
1785 kwargs[b'includepats'] = pullop.includepats
1789 kwargs[b'includepats'] = pullop.includepats
1786 if servernarrow and pullop.excludepats:
1790 if servernarrow and pullop.excludepats:
1787 kwargs[b'excludepats'] = pullop.excludepats
1791 kwargs[b'excludepats'] = pullop.excludepats
1788
1792
1789 if streaming:
1793 if streaming:
1790 kwargs[b'cg'] = False
1794 kwargs[b'cg'] = False
1791 kwargs[b'stream'] = True
1795 kwargs[b'stream'] = True
1792 pullop.stepsdone.add(b'changegroup')
1796 pullop.stepsdone.add(b'changegroup')
1793 pullop.stepsdone.add(b'phases')
1797 pullop.stepsdone.add(b'phases')
1794
1798
1795 else:
1799 else:
1796 # pulling changegroup
1800 # pulling changegroup
1797 pullop.stepsdone.add(b'changegroup')
1801 pullop.stepsdone.add(b'changegroup')
1798
1802
1799 kwargs[b'cg'] = pullop.fetch
1803 kwargs[b'cg'] = pullop.fetch
1800
1804
1801 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1805 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1802 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1806 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1803 if not legacyphase and hasbinaryphase:
1807 if not legacyphase and hasbinaryphase:
1804 kwargs[b'phases'] = True
1808 kwargs[b'phases'] = True
1805 pullop.stepsdone.add(b'phases')
1809 pullop.stepsdone.add(b'phases')
1806
1810
1807 if b'listkeys' in pullop.remotebundle2caps:
1811 if b'listkeys' in pullop.remotebundle2caps:
1808 if b'phases' not in pullop.stepsdone:
1812 if b'phases' not in pullop.stepsdone:
1809 kwargs[b'listkeys'] = [b'phases']
1813 kwargs[b'listkeys'] = [b'phases']
1810
1814
1811 bookmarksrequested = False
1815 bookmarksrequested = False
1812 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1816 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1813 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1817 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1814
1818
1815 if pullop.remotebookmarks is not None:
1819 if pullop.remotebookmarks is not None:
1816 pullop.stepsdone.add(b'request-bookmarks')
1820 pullop.stepsdone.add(b'request-bookmarks')
1817
1821
1818 if (
1822 if (
1819 b'request-bookmarks' not in pullop.stepsdone
1823 b'request-bookmarks' not in pullop.stepsdone
1820 and pullop.remotebookmarks is None
1824 and pullop.remotebookmarks is None
1821 and not legacybookmark
1825 and not legacybookmark
1822 and hasbinarybook
1826 and hasbinarybook
1823 ):
1827 ):
1824 kwargs[b'bookmarks'] = True
1828 kwargs[b'bookmarks'] = True
1825 bookmarksrequested = True
1829 bookmarksrequested = True
1826
1830
1827 if b'listkeys' in pullop.remotebundle2caps:
1831 if b'listkeys' in pullop.remotebundle2caps:
1828 if b'request-bookmarks' not in pullop.stepsdone:
1832 if b'request-bookmarks' not in pullop.stepsdone:
1829 # make sure to always includes bookmark data when migrating
1833 # make sure to always includes bookmark data when migrating
1830 # `hg incoming --bundle` to using this function.
1834 # `hg incoming --bundle` to using this function.
1831 pullop.stepsdone.add(b'request-bookmarks')
1835 pullop.stepsdone.add(b'request-bookmarks')
1832 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1836 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1833
1837
1834 # If this is a full pull / clone and the server supports the clone bundles
1838 # If this is a full pull / clone and the server supports the clone bundles
1835 # feature, tell the server whether we attempted a clone bundle. The
1839 # feature, tell the server whether we attempted a clone bundle. The
1836 # presence of this flag indicates the client supports clone bundles. This
1840 # presence of this flag indicates the client supports clone bundles. This
1837 # will enable the server to treat clients that support clone bundles
1841 # will enable the server to treat clients that support clone bundles
1838 # differently from those that don't.
1842 # differently from those that don't.
1839 if (
1843 if (
1840 pullop.remote.capable(b'clonebundles')
1844 pullop.remote.capable(b'clonebundles')
1841 and pullop.heads is None
1845 and pullop.heads is None
1842 and list(pullop.common) == [pullop.repo.nullid]
1846 and list(pullop.common) == [pullop.repo.nullid]
1843 ):
1847 ):
1844 kwargs[b'cbattempted'] = pullop.clonebundleattempted
1848 kwargs[b'cbattempted'] = pullop.clonebundleattempted
1845
1849
1846 if streaming:
1850 if streaming:
1847 pullop.repo.ui.status(_(b'streaming all changes\n'))
1851 pullop.repo.ui.status(_(b'streaming all changes\n'))
1848 elif not pullop.fetch:
1852 elif not pullop.fetch:
1849 pullop.repo.ui.status(_(b"no changes found\n"))
1853 pullop.repo.ui.status(_(b"no changes found\n"))
1850 pullop.cgresult = 0
1854 pullop.cgresult = 0
1851 else:
1855 else:
1852 if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]:
1856 if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]:
1853 pullop.repo.ui.status(_(b"requesting all changes\n"))
1857 pullop.repo.ui.status(_(b"requesting all changes\n"))
1854 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1858 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1855 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1859 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1856 if obsolete.commonversion(remoteversions) is not None:
1860 if obsolete.commonversion(remoteversions) is not None:
1857 kwargs[b'obsmarkers'] = True
1861 kwargs[b'obsmarkers'] = True
1858 pullop.stepsdone.add(b'obsmarkers')
1862 pullop.stepsdone.add(b'obsmarkers')
1859 _pullbundle2extraprepare(pullop, kwargs)
1863 _pullbundle2extraprepare(pullop, kwargs)
1860
1864
1861 remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote)
1865 remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote)
1862 if remote_sidedata:
1866 if remote_sidedata:
1863 kwargs[b'remote_sidedata'] = remote_sidedata
1867 kwargs[b'remote_sidedata'] = remote_sidedata
1864
1868
1865 with pullop.remote.commandexecutor() as e:
1869 with pullop.remote.commandexecutor() as e:
1866 args = dict(kwargs)
1870 args = dict(kwargs)
1867 args[b'source'] = b'pull'
1871 args[b'source'] = b'pull'
1868 bundle = e.callcommand(b'getbundle', args).result()
1872 bundle = e.callcommand(b'getbundle', args).result()
1869
1873
1870 try:
1874 try:
1871 op = bundle2.bundleoperation(
1875 op = bundle2.bundleoperation(
1872 pullop.repo, pullop.gettransaction, source=b'pull'
1876 pullop.repo, pullop.gettransaction, source=b'pull'
1873 )
1877 )
1874 op.modes[b'bookmarks'] = b'records'
1878 op.modes[b'bookmarks'] = b'records'
1875 bundle2.processbundle(pullop.repo, bundle, op=op)
1879 bundle2.processbundle(pullop.repo, bundle, op=op)
1876 except bundle2.AbortFromPart as exc:
1880 except bundle2.AbortFromPart as exc:
1877 pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc)
1881 pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc)
1878 raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint)
1882 raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint)
1879 except error.BundleValueError as exc:
1883 except error.BundleValueError as exc:
1880 raise error.RemoteError(_(b'missing support for %s') % exc)
1884 raise error.RemoteError(_(b'missing support for %s') % exc)
1881
1885
1882 if pullop.fetch:
1886 if pullop.fetch:
1883 pullop.cgresult = bundle2.combinechangegroupresults(op)
1887 pullop.cgresult = bundle2.combinechangegroupresults(op)
1884
1888
1885 # processing phases change
1889 # processing phases change
1886 for namespace, value in op.records[b'listkeys']:
1890 for namespace, value in op.records[b'listkeys']:
1887 if namespace == b'phases':
1891 if namespace == b'phases':
1888 _pullapplyphases(pullop, value)
1892 _pullapplyphases(pullop, value)
1889
1893
1890 # processing bookmark update
1894 # processing bookmark update
1891 if bookmarksrequested:
1895 if bookmarksrequested:
1892 books = {}
1896 books = {}
1893 for record in op.records[b'bookmarks']:
1897 for record in op.records[b'bookmarks']:
1894 books[record[b'bookmark']] = record[b"node"]
1898 books[record[b'bookmark']] = record[b"node"]
1895 pullop.remotebookmarks = books
1899 pullop.remotebookmarks = books
1896 else:
1900 else:
1897 for namespace, value in op.records[b'listkeys']:
1901 for namespace, value in op.records[b'listkeys']:
1898 if namespace == b'bookmarks':
1902 if namespace == b'bookmarks':
1899 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1903 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1900
1904
1901 # bookmark data were either already there or pulled in the bundle
1905 # bookmark data were either already there or pulled in the bundle
1902 if pullop.remotebookmarks is not None:
1906 if pullop.remotebookmarks is not None:
1903 _pullbookmarks(pullop)
1907 _pullbookmarks(pullop)
1904
1908
1905
1909
1906 def _pullbundle2extraprepare(pullop, kwargs):
1910 def _pullbundle2extraprepare(pullop, kwargs):
1907 """hook function so that extensions can extend the getbundle call"""
1911 """hook function so that extensions can extend the getbundle call"""
1908
1912
1909
1913
1910 def _pullchangeset(pullop):
1914 def _pullchangeset(pullop):
1911 """pull changeset from unbundle into the local repo"""
1915 """pull changeset from unbundle into the local repo"""
1912 # We delay the open of the transaction as late as possible so we
1916 # We delay the open of the transaction as late as possible so we
1913 # don't open transaction for nothing or you break future useful
1917 # don't open transaction for nothing or you break future useful
1914 # rollback call
1918 # rollback call
1915 if b'changegroup' in pullop.stepsdone:
1919 if b'changegroup' in pullop.stepsdone:
1916 return
1920 return
1917 pullop.stepsdone.add(b'changegroup')
1921 pullop.stepsdone.add(b'changegroup')
1918 if not pullop.fetch:
1922 if not pullop.fetch:
1919 pullop.repo.ui.status(_(b"no changes found\n"))
1923 pullop.repo.ui.status(_(b"no changes found\n"))
1920 pullop.cgresult = 0
1924 pullop.cgresult = 0
1921 return
1925 return
1922 tr = pullop.gettransaction()
1926 tr = pullop.gettransaction()
1923 if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]:
1927 if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]:
1924 pullop.repo.ui.status(_(b"requesting all changes\n"))
1928 pullop.repo.ui.status(_(b"requesting all changes\n"))
1925 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
1929 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
1926 # issue1320, avoid a race if remote changed after discovery
1930 # issue1320, avoid a race if remote changed after discovery
1927 pullop.heads = pullop.rheads
1931 pullop.heads = pullop.rheads
1928
1932
1929 if pullop.remote.capable(b'getbundle'):
1933 if pullop.remote.capable(b'getbundle'):
1930 # TODO: get bundlecaps from remote
1934 # TODO: get bundlecaps from remote
1931 cg = pullop.remote.getbundle(
1935 cg = pullop.remote.getbundle(
1932 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
1936 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
1933 )
1937 )
1934 elif pullop.heads is None:
1938 elif pullop.heads is None:
1935 with pullop.remote.commandexecutor() as e:
1939 with pullop.remote.commandexecutor() as e:
1936 cg = e.callcommand(
1940 cg = e.callcommand(
1937 b'changegroup',
1941 b'changegroup',
1938 {
1942 {
1939 b'nodes': pullop.fetch,
1943 b'nodes': pullop.fetch,
1940 b'source': b'pull',
1944 b'source': b'pull',
1941 },
1945 },
1942 ).result()
1946 ).result()
1943
1947
1944 elif not pullop.remote.capable(b'changegroupsubset'):
1948 elif not pullop.remote.capable(b'changegroupsubset'):
1945 raise error.Abort(
1949 raise error.Abort(
1946 _(
1950 _(
1947 b"partial pull cannot be done because "
1951 b"partial pull cannot be done because "
1948 b"other repository doesn't support "
1952 b"other repository doesn't support "
1949 b"changegroupsubset."
1953 b"changegroupsubset."
1950 )
1954 )
1951 )
1955 )
1952 else:
1956 else:
1953 with pullop.remote.commandexecutor() as e:
1957 with pullop.remote.commandexecutor() as e:
1954 cg = e.callcommand(
1958 cg = e.callcommand(
1955 b'changegroupsubset',
1959 b'changegroupsubset',
1956 {
1960 {
1957 b'bases': pullop.fetch,
1961 b'bases': pullop.fetch,
1958 b'heads': pullop.heads,
1962 b'heads': pullop.heads,
1959 b'source': b'pull',
1963 b'source': b'pull',
1960 },
1964 },
1961 ).result()
1965 ).result()
1962
1966
1963 bundleop = bundle2.applybundle(
1967 bundleop = bundle2.applybundle(
1964 pullop.repo, cg, tr, b'pull', pullop.remote.url()
1968 pullop.repo, cg, tr, b'pull', pullop.remote.url()
1965 )
1969 )
1966 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1970 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1967
1971
1968
1972
1969 def _pullphase(pullop):
1973 def _pullphase(pullop):
1970 # Get remote phases data from remote
1974 # Get remote phases data from remote
1971 if b'phases' in pullop.stepsdone:
1975 if b'phases' in pullop.stepsdone:
1972 return
1976 return
1973 remotephases = listkeys(pullop.remote, b'phases')
1977 remotephases = listkeys(pullop.remote, b'phases')
1974 _pullapplyphases(pullop, remotephases)
1978 _pullapplyphases(pullop, remotephases)
1975
1979
1976
1980
1977 def _pullapplyphases(pullop, remotephases):
1981 def _pullapplyphases(pullop, remotephases):
1978 """apply phase movement from observed remote state"""
1982 """apply phase movement from observed remote state"""
1979 if b'phases' in pullop.stepsdone:
1983 if b'phases' in pullop.stepsdone:
1980 return
1984 return
1981 pullop.stepsdone.add(b'phases')
1985 pullop.stepsdone.add(b'phases')
1982 publishing = bool(remotephases.get(b'publishing', False))
1986 publishing = bool(remotephases.get(b'publishing', False))
1983 if remotephases and not publishing:
1987 if remotephases and not publishing:
1984 # remote is new and non-publishing
1988 # remote is new and non-publishing
1985 pheads, _dr = phases.analyzeremotephases(
1989 pheads, _dr = phases.analyzeremotephases(
1986 pullop.repo, pullop.pulledsubset, remotephases
1990 pullop.repo, pullop.pulledsubset, remotephases
1987 )
1991 )
1988 dheads = pullop.pulledsubset
1992 dheads = pullop.pulledsubset
1989 else:
1993 else:
1990 # Remote is old or publishing all common changesets
1994 # Remote is old or publishing all common changesets
1991 # should be seen as public
1995 # should be seen as public
1992 pheads = pullop.pulledsubset
1996 pheads = pullop.pulledsubset
1993 dheads = []
1997 dheads = []
1994 unfi = pullop.repo.unfiltered()
1998 unfi = pullop.repo.unfiltered()
1995 phase = unfi._phasecache.phase
1999 phase = unfi._phasecache.phase
1996 rev = unfi.changelog.index.get_rev
2000 rev = unfi.changelog.index.get_rev
1997 public = phases.public
2001 public = phases.public
1998 draft = phases.draft
2002 draft = phases.draft
1999
2003
2000 # exclude changesets already public locally and update the others
2004 # exclude changesets already public locally and update the others
2001 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2005 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2002 if pheads:
2006 if pheads:
2003 tr = pullop.gettransaction()
2007 tr = pullop.gettransaction()
2004 phases.advanceboundary(pullop.repo, tr, public, pheads)
2008 phases.advanceboundary(pullop.repo, tr, public, pheads)
2005
2009
2006 # exclude changesets already draft locally and update the others
2010 # exclude changesets already draft locally and update the others
2007 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2011 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2008 if dheads:
2012 if dheads:
2009 tr = pullop.gettransaction()
2013 tr = pullop.gettransaction()
2010 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2014 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2011
2015
2012
2016
2013 def _pullbookmarks(pullop):
2017 def _pullbookmarks(pullop):
2014 """process the remote bookmark information to update the local one"""
2018 """process the remote bookmark information to update the local one"""
2015 if b'bookmarks' in pullop.stepsdone:
2019 if b'bookmarks' in pullop.stepsdone:
2016 return
2020 return
2017 pullop.stepsdone.add(b'bookmarks')
2021 pullop.stepsdone.add(b'bookmarks')
2018 repo = pullop.repo
2022 repo = pullop.repo
2019 remotebookmarks = pullop.remotebookmarks
2023 remotebookmarks = pullop.remotebookmarks
2020 bookmod.updatefromremote(
2024 bookmod.updatefromremote(
2021 repo.ui,
2025 repo.ui,
2022 repo,
2026 repo,
2023 remotebookmarks,
2027 remotebookmarks,
2024 pullop.remote.url(),
2028 pullop.remote.url(),
2025 pullop.gettransaction,
2029 pullop.gettransaction,
2026 explicit=pullop.explicitbookmarks,
2030 explicit=pullop.explicitbookmarks,
2027 )
2031 )
2028
2032
2029
2033
2030 def _pullobsolete(pullop):
2034 def _pullobsolete(pullop):
2031 """utility function to pull obsolete markers from a remote
2035 """utility function to pull obsolete markers from a remote
2032
2036
2033 The `gettransaction` is function that return the pull transaction, creating
2037 The `gettransaction` is function that return the pull transaction, creating
2034 one if necessary. We return the transaction to inform the calling code that
2038 one if necessary. We return the transaction to inform the calling code that
2035 a new transaction have been created (when applicable).
2039 a new transaction have been created (when applicable).
2036
2040
2037 Exists mostly to allow overriding for experimentation purpose"""
2041 Exists mostly to allow overriding for experimentation purpose"""
2038 if b'obsmarkers' in pullop.stepsdone:
2042 if b'obsmarkers' in pullop.stepsdone:
2039 return
2043 return
2040 pullop.stepsdone.add(b'obsmarkers')
2044 pullop.stepsdone.add(b'obsmarkers')
2041 tr = None
2045 tr = None
2042 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2046 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2043 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2047 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2044 remoteobs = listkeys(pullop.remote, b'obsolete')
2048 remoteobs = listkeys(pullop.remote, b'obsolete')
2045 if b'dump0' in remoteobs:
2049 if b'dump0' in remoteobs:
2046 tr = pullop.gettransaction()
2050 tr = pullop.gettransaction()
2047 markers = []
2051 markers = []
2048 for key in sorted(remoteobs, reverse=True):
2052 for key in sorted(remoteobs, reverse=True):
2049 if key.startswith(b'dump'):
2053 if key.startswith(b'dump'):
2050 data = util.b85decode(remoteobs[key])
2054 data = util.b85decode(remoteobs[key])
2051 version, newmarks = obsolete._readmarkers(data)
2055 version, newmarks = obsolete._readmarkers(data)
2052 markers += newmarks
2056 markers += newmarks
2053 if markers:
2057 if markers:
2054 pullop.repo.obsstore.add(tr, markers)
2058 pullop.repo.obsstore.add(tr, markers)
2055 pullop.repo.invalidatevolatilesets()
2059 pullop.repo.invalidatevolatilesets()
2056 return tr
2060 return tr
2057
2061
2058
2062
2059 def applynarrowacl(repo, kwargs):
2063 def applynarrowacl(repo, kwargs):
2060 """Apply narrow fetch access control.
2064 """Apply narrow fetch access control.
2061
2065
2062 This massages the named arguments for getbundle wire protocol commands
2066 This massages the named arguments for getbundle wire protocol commands
2063 so requested data is filtered through access control rules.
2067 so requested data is filtered through access control rules.
2064 """
2068 """
2065 ui = repo.ui
2069 ui = repo.ui
2066 # TODO this assumes existence of HTTP and is a layering violation.
2070 # TODO this assumes existence of HTTP and is a layering violation.
2067 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2071 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2068 user_includes = ui.configlist(
2072 user_includes = ui.configlist(
2069 _NARROWACL_SECTION,
2073 _NARROWACL_SECTION,
2070 username + b'.includes',
2074 username + b'.includes',
2071 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2075 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2072 )
2076 )
2073 user_excludes = ui.configlist(
2077 user_excludes = ui.configlist(
2074 _NARROWACL_SECTION,
2078 _NARROWACL_SECTION,
2075 username + b'.excludes',
2079 username + b'.excludes',
2076 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2080 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2077 )
2081 )
2078 if not user_includes:
2082 if not user_includes:
2079 raise error.Abort(
2083 raise error.Abort(
2080 _(b"%s configuration for user %s is empty")
2084 _(b"%s configuration for user %s is empty")
2081 % (_NARROWACL_SECTION, username)
2085 % (_NARROWACL_SECTION, username)
2082 )
2086 )
2083
2087
2084 user_includes = [
2088 user_includes = [
2085 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2089 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2086 ]
2090 ]
2087 user_excludes = [
2091 user_excludes = [
2088 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2092 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2089 ]
2093 ]
2090
2094
2091 req_includes = set(kwargs.get('includepats', []))
2095 req_includes = set(kwargs.get('includepats', []))
2092 req_excludes = set(kwargs.get('excludepats', []))
2096 req_excludes = set(kwargs.get('excludepats', []))
2093
2097
2094 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2098 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2095 req_includes, req_excludes, user_includes, user_excludes
2099 req_includes, req_excludes, user_includes, user_excludes
2096 )
2100 )
2097
2101
2098 if invalid_includes:
2102 if invalid_includes:
2099 raise error.Abort(
2103 raise error.Abort(
2100 _(b"The following includes are not accessible for %s: %s")
2104 _(b"The following includes are not accessible for %s: %s")
2101 % (username, stringutil.pprint(invalid_includes))
2105 % (username, stringutil.pprint(invalid_includes))
2102 )
2106 )
2103
2107
2104 new_args = {}
2108 new_args = {}
2105 new_args.update(kwargs)
2109 new_args.update(kwargs)
2106 new_args['narrow'] = True
2110 new_args['narrow'] = True
2107 new_args['narrow_acl'] = True
2111 new_args['narrow_acl'] = True
2108 new_args['includepats'] = req_includes
2112 new_args['includepats'] = req_includes
2109 if req_excludes:
2113 if req_excludes:
2110 new_args['excludepats'] = req_excludes
2114 new_args['excludepats'] = req_excludes
2111
2115
2112 return new_args
2116 return new_args
2113
2117
2114
2118
2115 def _computeellipsis(repo, common, heads, known, match, depth=None):
2119 def _computeellipsis(repo, common, heads, known, match, depth=None):
2116 """Compute the shape of a narrowed DAG.
2120 """Compute the shape of a narrowed DAG.
2117
2121
2118 Args:
2122 Args:
2119 repo: The repository we're transferring.
2123 repo: The repository we're transferring.
2120 common: The roots of the DAG range we're transferring.
2124 common: The roots of the DAG range we're transferring.
2121 May be just [nullid], which means all ancestors of heads.
2125 May be just [nullid], which means all ancestors of heads.
2122 heads: The heads of the DAG range we're transferring.
2126 heads: The heads of the DAG range we're transferring.
2123 match: The narrowmatcher that allows us to identify relevant changes.
2127 match: The narrowmatcher that allows us to identify relevant changes.
2124 depth: If not None, only consider nodes to be full nodes if they are at
2128 depth: If not None, only consider nodes to be full nodes if they are at
2125 most depth changesets away from one of heads.
2129 most depth changesets away from one of heads.
2126
2130
2127 Returns:
2131 Returns:
2128 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2132 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2129
2133
2130 visitnodes: The list of nodes (either full or ellipsis) which
2134 visitnodes: The list of nodes (either full or ellipsis) which
2131 need to be sent to the client.
2135 need to be sent to the client.
2132 relevant_nodes: The set of changelog nodes which change a file inside
2136 relevant_nodes: The set of changelog nodes which change a file inside
2133 the narrowspec. The client needs these as non-ellipsis nodes.
2137 the narrowspec. The client needs these as non-ellipsis nodes.
2134 ellipsisroots: A dict of {rev: parents} that is used in
2138 ellipsisroots: A dict of {rev: parents} that is used in
2135 narrowchangegroup to produce ellipsis nodes with the
2139 narrowchangegroup to produce ellipsis nodes with the
2136 correct parents.
2140 correct parents.
2137 """
2141 """
2138 cl = repo.changelog
2142 cl = repo.changelog
2139 mfl = repo.manifestlog
2143 mfl = repo.manifestlog
2140
2144
2141 clrev = cl.rev
2145 clrev = cl.rev
2142
2146
2143 commonrevs = {clrev(n) for n in common} | {nullrev}
2147 commonrevs = {clrev(n) for n in common} | {nullrev}
2144 headsrevs = {clrev(n) for n in heads}
2148 headsrevs = {clrev(n) for n in heads}
2145
2149
2146 if depth:
2150 if depth:
2147 revdepth = {h: 0 for h in headsrevs}
2151 revdepth = {h: 0 for h in headsrevs}
2148
2152
2149 ellipsisheads = collections.defaultdict(set)
2153 ellipsisheads = collections.defaultdict(set)
2150 ellipsisroots = collections.defaultdict(set)
2154 ellipsisroots = collections.defaultdict(set)
2151
2155
2152 def addroot(head, curchange):
2156 def addroot(head, curchange):
2153 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2157 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2154 ellipsisroots[head].add(curchange)
2158 ellipsisroots[head].add(curchange)
2155 # Recursively split ellipsis heads with 3 roots by finding the
2159 # Recursively split ellipsis heads with 3 roots by finding the
2156 # roots' youngest common descendant which is an elided merge commit.
2160 # roots' youngest common descendant which is an elided merge commit.
2157 # That descendant takes 2 of the 3 roots as its own, and becomes a
2161 # That descendant takes 2 of the 3 roots as its own, and becomes a
2158 # root of the head.
2162 # root of the head.
2159 while len(ellipsisroots[head]) > 2:
2163 while len(ellipsisroots[head]) > 2:
2160 child, roots = splithead(head)
2164 child, roots = splithead(head)
2161 splitroots(head, child, roots)
2165 splitroots(head, child, roots)
2162 head = child # Recurse in case we just added a 3rd root
2166 head = child # Recurse in case we just added a 3rd root
2163
2167
2164 def splitroots(head, child, roots):
2168 def splitroots(head, child, roots):
2165 ellipsisroots[head].difference_update(roots)
2169 ellipsisroots[head].difference_update(roots)
2166 ellipsisroots[head].add(child)
2170 ellipsisroots[head].add(child)
2167 ellipsisroots[child].update(roots)
2171 ellipsisroots[child].update(roots)
2168 ellipsisroots[child].discard(child)
2172 ellipsisroots[child].discard(child)
2169
2173
2170 def splithead(head):
2174 def splithead(head):
2171 r1, r2, r3 = sorted(ellipsisroots[head])
2175 r1, r2, r3 = sorted(ellipsisroots[head])
2172 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2176 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2173 mid = repo.revs(
2177 mid = repo.revs(
2174 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2178 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2175 )
2179 )
2176 for j in mid:
2180 for j in mid:
2177 if j == nr2:
2181 if j == nr2:
2178 return nr2, (nr1, nr2)
2182 return nr2, (nr1, nr2)
2179 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2183 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2180 return j, (nr1, nr2)
2184 return j, (nr1, nr2)
2181 raise error.Abort(
2185 raise error.Abort(
2182 _(
2186 _(
2183 b'Failed to split up ellipsis node! head: %d, '
2187 b'Failed to split up ellipsis node! head: %d, '
2184 b'roots: %d %d %d'
2188 b'roots: %d %d %d'
2185 )
2189 )
2186 % (head, r1, r2, r3)
2190 % (head, r1, r2, r3)
2187 )
2191 )
2188
2192
2189 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2193 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2190 visit = reversed(missing)
2194 visit = reversed(missing)
2191 relevant_nodes = set()
2195 relevant_nodes = set()
2192 visitnodes = [cl.node(m) for m in missing]
2196 visitnodes = [cl.node(m) for m in missing]
2193 required = set(headsrevs) | known
2197 required = set(headsrevs) | known
2194 for rev in visit:
2198 for rev in visit:
2195 clrev = cl.changelogrevision(rev)
2199 clrev = cl.changelogrevision(rev)
2196 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2200 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2197 if depth is not None:
2201 if depth is not None:
2198 curdepth = revdepth[rev]
2202 curdepth = revdepth[rev]
2199 for p in ps:
2203 for p in ps:
2200 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2204 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2201 needed = False
2205 needed = False
2202 shallow_enough = depth is None or revdepth[rev] <= depth
2206 shallow_enough = depth is None or revdepth[rev] <= depth
2203 if shallow_enough:
2207 if shallow_enough:
2204 curmf = mfl[clrev.manifest].read()
2208 curmf = mfl[clrev.manifest].read()
2205 if ps:
2209 if ps:
2206 # We choose to not trust the changed files list in
2210 # We choose to not trust the changed files list in
2207 # changesets because it's not always correct. TODO: could
2211 # changesets because it's not always correct. TODO: could
2208 # we trust it for the non-merge case?
2212 # we trust it for the non-merge case?
2209 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2213 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2210 needed = bool(curmf.diff(p1mf, match))
2214 needed = bool(curmf.diff(p1mf, match))
2211 if not needed and len(ps) > 1:
2215 if not needed and len(ps) > 1:
2212 # For merge changes, the list of changed files is not
2216 # For merge changes, the list of changed files is not
2213 # helpful, since we need to emit the merge if a file
2217 # helpful, since we need to emit the merge if a file
2214 # in the narrow spec has changed on either side of the
2218 # in the narrow spec has changed on either side of the
2215 # merge. As a result, we do a manifest diff to check.
2219 # merge. As a result, we do a manifest diff to check.
2216 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2220 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2217 needed = bool(curmf.diff(p2mf, match))
2221 needed = bool(curmf.diff(p2mf, match))
2218 else:
2222 else:
2219 # For a root node, we need to include the node if any
2223 # For a root node, we need to include the node if any
2220 # files in the node match the narrowspec.
2224 # files in the node match the narrowspec.
2221 needed = any(curmf.walk(match))
2225 needed = any(curmf.walk(match))
2222
2226
2223 if needed:
2227 if needed:
2224 for head in ellipsisheads[rev]:
2228 for head in ellipsisheads[rev]:
2225 addroot(head, rev)
2229 addroot(head, rev)
2226 for p in ps:
2230 for p in ps:
2227 required.add(p)
2231 required.add(p)
2228 relevant_nodes.add(cl.node(rev))
2232 relevant_nodes.add(cl.node(rev))
2229 else:
2233 else:
2230 if not ps:
2234 if not ps:
2231 ps = [nullrev]
2235 ps = [nullrev]
2232 if rev in required:
2236 if rev in required:
2233 for head in ellipsisheads[rev]:
2237 for head in ellipsisheads[rev]:
2234 addroot(head, rev)
2238 addroot(head, rev)
2235 for p in ps:
2239 for p in ps:
2236 ellipsisheads[p].add(rev)
2240 ellipsisheads[p].add(rev)
2237 else:
2241 else:
2238 for p in ps:
2242 for p in ps:
2239 ellipsisheads[p] |= ellipsisheads[rev]
2243 ellipsisheads[p] |= ellipsisheads[rev]
2240
2244
2241 # add common changesets as roots of their reachable ellipsis heads
2245 # add common changesets as roots of their reachable ellipsis heads
2242 for c in commonrevs:
2246 for c in commonrevs:
2243 for head in ellipsisheads[c]:
2247 for head in ellipsisheads[c]:
2244 addroot(head, c)
2248 addroot(head, c)
2245 return visitnodes, relevant_nodes, ellipsisroots
2249 return visitnodes, relevant_nodes, ellipsisroots
2246
2250
2247
2251
2248 def caps20to10(repo, role):
2252 def caps20to10(repo, role):
2249 """return a set with appropriate options to use bundle20 during getbundle"""
2253 """return a set with appropriate options to use bundle20 during getbundle"""
2250 caps = {b'HG20'}
2254 caps = {b'HG20'}
2251 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2255 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2252 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2256 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2253 return caps
2257 return caps
2254
2258
2255
2259
2256 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2260 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2257 getbundle2partsorder = []
2261 getbundle2partsorder = []
2258
2262
2259 # Mapping between step name and function
2263 # Mapping between step name and function
2260 #
2264 #
2261 # This exists to help extensions wrap steps if necessary
2265 # This exists to help extensions wrap steps if necessary
2262 getbundle2partsmapping = {}
2266 getbundle2partsmapping = {}
2263
2267
2264
2268
2265 def getbundle2partsgenerator(stepname, idx=None):
2269 def getbundle2partsgenerator(stepname, idx=None):
2266 """decorator for function generating bundle2 part for getbundle
2270 """decorator for function generating bundle2 part for getbundle
2267
2271
2268 The function is added to the step -> function mapping and appended to the
2272 The function is added to the step -> function mapping and appended to the
2269 list of steps. Beware that decorated functions will be added in order
2273 list of steps. Beware that decorated functions will be added in order
2270 (this may matter).
2274 (this may matter).
2271
2275
2272 You can only use this decorator for new steps, if you want to wrap a step
2276 You can only use this decorator for new steps, if you want to wrap a step
2273 from an extension, attack the getbundle2partsmapping dictionary directly."""
2277 from an extension, attack the getbundle2partsmapping dictionary directly."""
2274
2278
2275 def dec(func):
2279 def dec(func):
2276 assert stepname not in getbundle2partsmapping
2280 assert stepname not in getbundle2partsmapping
2277 getbundle2partsmapping[stepname] = func
2281 getbundle2partsmapping[stepname] = func
2278 if idx is None:
2282 if idx is None:
2279 getbundle2partsorder.append(stepname)
2283 getbundle2partsorder.append(stepname)
2280 else:
2284 else:
2281 getbundle2partsorder.insert(idx, stepname)
2285 getbundle2partsorder.insert(idx, stepname)
2282 return func
2286 return func
2283
2287
2284 return dec
2288 return dec
2285
2289
2286
2290
2287 def bundle2requested(bundlecaps):
2291 def bundle2requested(bundlecaps):
2288 if bundlecaps is not None:
2292 if bundlecaps is not None:
2289 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2293 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2290 return False
2294 return False
2291
2295
2292
2296
2293 def getbundlechunks(
2297 def getbundlechunks(
2294 repo,
2298 repo,
2295 source,
2299 source,
2296 heads=None,
2300 heads=None,
2297 common=None,
2301 common=None,
2298 bundlecaps=None,
2302 bundlecaps=None,
2299 remote_sidedata=None,
2303 remote_sidedata=None,
2300 **kwargs
2304 **kwargs
2301 ):
2305 ):
2302 """Return chunks constituting a bundle's raw data.
2306 """Return chunks constituting a bundle's raw data.
2303
2307
2304 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2308 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2305 passed.
2309 passed.
2306
2310
2307 Returns a 2-tuple of a dict with metadata about the generated bundle
2311 Returns a 2-tuple of a dict with metadata about the generated bundle
2308 and an iterator over raw chunks (of varying sizes).
2312 and an iterator over raw chunks (of varying sizes).
2309 """
2313 """
2310 kwargs = pycompat.byteskwargs(kwargs)
2314 kwargs = pycompat.byteskwargs(kwargs)
2311 info = {}
2315 info = {}
2312 usebundle2 = bundle2requested(bundlecaps)
2316 usebundle2 = bundle2requested(bundlecaps)
2313 # bundle10 case
2317 # bundle10 case
2314 if not usebundle2:
2318 if not usebundle2:
2315 if bundlecaps and not kwargs.get(b'cg', True):
2319 if bundlecaps and not kwargs.get(b'cg', True):
2316 raise ValueError(
2320 raise ValueError(
2317 _(b'request for bundle10 must include changegroup')
2321 _(b'request for bundle10 must include changegroup')
2318 )
2322 )
2319
2323
2320 if kwargs:
2324 if kwargs:
2321 raise ValueError(
2325 raise ValueError(
2322 _(b'unsupported getbundle arguments: %s')
2326 _(b'unsupported getbundle arguments: %s')
2323 % b', '.join(sorted(kwargs.keys()))
2327 % b', '.join(sorted(kwargs.keys()))
2324 )
2328 )
2325 outgoing = _computeoutgoing(repo, heads, common)
2329 outgoing = _computeoutgoing(repo, heads, common)
2326 info[b'bundleversion'] = 1
2330 info[b'bundleversion'] = 1
2327 return (
2331 return (
2328 info,
2332 info,
2329 changegroup.makestream(
2333 changegroup.makestream(
2330 repo,
2334 repo,
2331 outgoing,
2335 outgoing,
2332 b'01',
2336 b'01',
2333 source,
2337 source,
2334 bundlecaps=bundlecaps,
2338 bundlecaps=bundlecaps,
2335 remote_sidedata=remote_sidedata,
2339 remote_sidedata=remote_sidedata,
2336 ),
2340 ),
2337 )
2341 )
2338
2342
2339 # bundle20 case
2343 # bundle20 case
2340 info[b'bundleversion'] = 2
2344 info[b'bundleversion'] = 2
2341 b2caps = {}
2345 b2caps = {}
2342 for bcaps in bundlecaps:
2346 for bcaps in bundlecaps:
2343 if bcaps.startswith(b'bundle2='):
2347 if bcaps.startswith(b'bundle2='):
2344 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2348 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2345 b2caps.update(bundle2.decodecaps(blob))
2349 b2caps.update(bundle2.decodecaps(blob))
2346 bundler = bundle2.bundle20(repo.ui, b2caps)
2350 bundler = bundle2.bundle20(repo.ui, b2caps)
2347
2351
2348 kwargs[b'heads'] = heads
2352 kwargs[b'heads'] = heads
2349 kwargs[b'common'] = common
2353 kwargs[b'common'] = common
2350
2354
2351 for name in getbundle2partsorder:
2355 for name in getbundle2partsorder:
2352 func = getbundle2partsmapping[name]
2356 func = getbundle2partsmapping[name]
2353 func(
2357 func(
2354 bundler,
2358 bundler,
2355 repo,
2359 repo,
2356 source,
2360 source,
2357 bundlecaps=bundlecaps,
2361 bundlecaps=bundlecaps,
2358 b2caps=b2caps,
2362 b2caps=b2caps,
2359 remote_sidedata=remote_sidedata,
2363 remote_sidedata=remote_sidedata,
2360 **pycompat.strkwargs(kwargs)
2364 **pycompat.strkwargs(kwargs)
2361 )
2365 )
2362
2366
2363 info[b'prefercompressed'] = bundler.prefercompressed
2367 info[b'prefercompressed'] = bundler.prefercompressed
2364
2368
2365 return info, bundler.getchunks()
2369 return info, bundler.getchunks()
2366
2370
2367
2371
2368 @getbundle2partsgenerator(b'stream2')
2372 @getbundle2partsgenerator(b'stream2')
2369 def _getbundlestream2(bundler, repo, *args, **kwargs):
2373 def _getbundlestream2(bundler, repo, *args, **kwargs):
2370 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2374 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2371
2375
2372
2376
2373 @getbundle2partsgenerator(b'changegroup')
2377 @getbundle2partsgenerator(b'changegroup')
2374 def _getbundlechangegrouppart(
2378 def _getbundlechangegrouppart(
2375 bundler,
2379 bundler,
2376 repo,
2380 repo,
2377 source,
2381 source,
2378 bundlecaps=None,
2382 bundlecaps=None,
2379 b2caps=None,
2383 b2caps=None,
2380 heads=None,
2384 heads=None,
2381 common=None,
2385 common=None,
2382 remote_sidedata=None,
2386 remote_sidedata=None,
2383 **kwargs
2387 **kwargs
2384 ):
2388 ):
2385 """add a changegroup part to the requested bundle"""
2389 """add a changegroup part to the requested bundle"""
2386 if not kwargs.get('cg', True) or not b2caps:
2390 if not kwargs.get('cg', True) or not b2caps:
2387 return
2391 return
2388
2392
2389 version = b'01'
2393 version = b'01'
2390 cgversions = b2caps.get(b'changegroup')
2394 cgversions = b2caps.get(b'changegroup')
2391 if cgversions: # 3.1 and 3.2 ship with an empty value
2395 if cgversions: # 3.1 and 3.2 ship with an empty value
2392 cgversions = [
2396 cgversions = [
2393 v
2397 v
2394 for v in cgversions
2398 for v in cgversions
2395 if v in changegroup.supportedoutgoingversions(repo)
2399 if v in changegroup.supportedoutgoingversions(repo)
2396 ]
2400 ]
2397 if not cgversions:
2401 if not cgversions:
2398 raise error.Abort(_(b'no common changegroup version'))
2402 raise error.Abort(_(b'no common changegroup version'))
2399 version = max(cgversions)
2403 version = max(cgversions)
2400
2404
2401 outgoing = _computeoutgoing(repo, heads, common)
2405 outgoing = _computeoutgoing(repo, heads, common)
2402 if not outgoing.missing:
2406 if not outgoing.missing:
2403 return
2407 return
2404
2408
2405 if kwargs.get('narrow', False):
2409 if kwargs.get('narrow', False):
2406 include = sorted(filter(bool, kwargs.get('includepats', [])))
2410 include = sorted(filter(bool, kwargs.get('includepats', [])))
2407 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2411 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2408 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2412 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2409 else:
2413 else:
2410 matcher = None
2414 matcher = None
2411
2415
2412 cgstream = changegroup.makestream(
2416 cgstream = changegroup.makestream(
2413 repo,
2417 repo,
2414 outgoing,
2418 outgoing,
2415 version,
2419 version,
2416 source,
2420 source,
2417 bundlecaps=bundlecaps,
2421 bundlecaps=bundlecaps,
2418 matcher=matcher,
2422 matcher=matcher,
2419 remote_sidedata=remote_sidedata,
2423 remote_sidedata=remote_sidedata,
2420 )
2424 )
2421
2425
2422 part = bundler.newpart(b'changegroup', data=cgstream)
2426 part = bundler.newpart(b'changegroup', data=cgstream)
2423 if cgversions:
2427 if cgversions:
2424 part.addparam(b'version', version)
2428 part.addparam(b'version', version)
2425
2429
2426 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2430 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2427
2431
2428 if scmutil.istreemanifest(repo):
2432 if scmutil.istreemanifest(repo):
2429 part.addparam(b'treemanifest', b'1')
2433 part.addparam(b'treemanifest', b'1')
2430
2434
2431 if repository.REPO_FEATURE_SIDE_DATA in repo.features:
2435 if repository.REPO_FEATURE_SIDE_DATA in repo.features:
2432 part.addparam(b'exp-sidedata', b'1')
2436 part.addparam(b'exp-sidedata', b'1')
2433 sidedata = bundle2.format_remote_wanted_sidedata(repo)
2437 sidedata = bundle2.format_remote_wanted_sidedata(repo)
2434 part.addparam(b'exp-wanted-sidedata', sidedata)
2438 part.addparam(b'exp-wanted-sidedata', sidedata)
2435
2439
2436 if (
2440 if (
2437 kwargs.get('narrow', False)
2441 kwargs.get('narrow', False)
2438 and kwargs.get('narrow_acl', False)
2442 and kwargs.get('narrow_acl', False)
2439 and (include or exclude)
2443 and (include or exclude)
2440 ):
2444 ):
2441 # this is mandatory because otherwise ACL clients won't work
2445 # this is mandatory because otherwise ACL clients won't work
2442 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2446 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2443 narrowspecpart.data = b'%s\0%s' % (
2447 narrowspecpart.data = b'%s\0%s' % (
2444 b'\n'.join(include),
2448 b'\n'.join(include),
2445 b'\n'.join(exclude),
2449 b'\n'.join(exclude),
2446 )
2450 )
2447
2451
2448
2452
2449 @getbundle2partsgenerator(b'bookmarks')
2453 @getbundle2partsgenerator(b'bookmarks')
2450 def _getbundlebookmarkpart(
2454 def _getbundlebookmarkpart(
2451 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2455 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2452 ):
2456 ):
2453 """add a bookmark part to the requested bundle"""
2457 """add a bookmark part to the requested bundle"""
2454 if not kwargs.get('bookmarks', False):
2458 if not kwargs.get('bookmarks', False):
2455 return
2459 return
2456 if not b2caps or b'bookmarks' not in b2caps:
2460 if not b2caps or b'bookmarks' not in b2caps:
2457 raise error.Abort(_(b'no common bookmarks exchange method'))
2461 raise error.Abort(_(b'no common bookmarks exchange method'))
2458 books = bookmod.listbinbookmarks(repo)
2462 books = bookmod.listbinbookmarks(repo)
2459 data = bookmod.binaryencode(repo, books)
2463 data = bookmod.binaryencode(repo, books)
2460 if data:
2464 if data:
2461 bundler.newpart(b'bookmarks', data=data)
2465 bundler.newpart(b'bookmarks', data=data)
2462
2466
2463
2467
2464 @getbundle2partsgenerator(b'listkeys')
2468 @getbundle2partsgenerator(b'listkeys')
2465 def _getbundlelistkeysparts(
2469 def _getbundlelistkeysparts(
2466 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2470 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2467 ):
2471 ):
2468 """add parts containing listkeys namespaces to the requested bundle"""
2472 """add parts containing listkeys namespaces to the requested bundle"""
2469 listkeys = kwargs.get('listkeys', ())
2473 listkeys = kwargs.get('listkeys', ())
2470 for namespace in listkeys:
2474 for namespace in listkeys:
2471 part = bundler.newpart(b'listkeys')
2475 part = bundler.newpart(b'listkeys')
2472 part.addparam(b'namespace', namespace)
2476 part.addparam(b'namespace', namespace)
2473 keys = repo.listkeys(namespace).items()
2477 keys = repo.listkeys(namespace).items()
2474 part.data = pushkey.encodekeys(keys)
2478 part.data = pushkey.encodekeys(keys)
2475
2479
2476
2480
2477 @getbundle2partsgenerator(b'obsmarkers')
2481 @getbundle2partsgenerator(b'obsmarkers')
2478 def _getbundleobsmarkerpart(
2482 def _getbundleobsmarkerpart(
2479 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2483 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2480 ):
2484 ):
2481 """add an obsolescence markers part to the requested bundle"""
2485 """add an obsolescence markers part to the requested bundle"""
2482 if kwargs.get('obsmarkers', False):
2486 if kwargs.get('obsmarkers', False):
2483 if heads is None:
2487 if heads is None:
2484 heads = repo.heads()
2488 heads = repo.heads()
2485 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2489 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2486 markers = repo.obsstore.relevantmarkers(subset)
2490 markers = repo.obsstore.relevantmarkers(subset)
2487 markers = obsutil.sortedmarkers(markers)
2491 markers = obsutil.sortedmarkers(markers)
2488 bundle2.buildobsmarkerspart(bundler, markers)
2492 bundle2.buildobsmarkerspart(bundler, markers)
2489
2493
2490
2494
2491 @getbundle2partsgenerator(b'phases')
2495 @getbundle2partsgenerator(b'phases')
2492 def _getbundlephasespart(
2496 def _getbundlephasespart(
2493 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2497 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2494 ):
2498 ):
2495 """add phase heads part to the requested bundle"""
2499 """add phase heads part to the requested bundle"""
2496 if kwargs.get('phases', False):
2500 if kwargs.get('phases', False):
2497 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2501 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2498 raise error.Abort(_(b'no common phases exchange method'))
2502 raise error.Abort(_(b'no common phases exchange method'))
2499 if heads is None:
2503 if heads is None:
2500 heads = repo.heads()
2504 heads = repo.heads()
2501
2505
2502 headsbyphase = collections.defaultdict(set)
2506 headsbyphase = collections.defaultdict(set)
2503 if repo.publishing():
2507 if repo.publishing():
2504 headsbyphase[phases.public] = heads
2508 headsbyphase[phases.public] = heads
2505 else:
2509 else:
2506 # find the appropriate heads to move
2510 # find the appropriate heads to move
2507
2511
2508 phase = repo._phasecache.phase
2512 phase = repo._phasecache.phase
2509 node = repo.changelog.node
2513 node = repo.changelog.node
2510 rev = repo.changelog.rev
2514 rev = repo.changelog.rev
2511 for h in heads:
2515 for h in heads:
2512 headsbyphase[phase(repo, rev(h))].add(h)
2516 headsbyphase[phase(repo, rev(h))].add(h)
2513 seenphases = list(headsbyphase.keys())
2517 seenphases = list(headsbyphase.keys())
2514
2518
2515 # We do not handle anything but public and draft phase for now)
2519 # We do not handle anything but public and draft phase for now)
2516 if seenphases:
2520 if seenphases:
2517 assert max(seenphases) <= phases.draft
2521 assert max(seenphases) <= phases.draft
2518
2522
2519 # if client is pulling non-public changesets, we need to find
2523 # if client is pulling non-public changesets, we need to find
2520 # intermediate public heads.
2524 # intermediate public heads.
2521 draftheads = headsbyphase.get(phases.draft, set())
2525 draftheads = headsbyphase.get(phases.draft, set())
2522 if draftheads:
2526 if draftheads:
2523 publicheads = headsbyphase.get(phases.public, set())
2527 publicheads = headsbyphase.get(phases.public, set())
2524
2528
2525 revset = b'heads(only(%ln, %ln) and public())'
2529 revset = b'heads(only(%ln, %ln) and public())'
2526 extraheads = repo.revs(revset, draftheads, publicheads)
2530 extraheads = repo.revs(revset, draftheads, publicheads)
2527 for r in extraheads:
2531 for r in extraheads:
2528 headsbyphase[phases.public].add(node(r))
2532 headsbyphase[phases.public].add(node(r))
2529
2533
2530 # transform data in a format used by the encoding function
2534 # transform data in a format used by the encoding function
2531 phasemapping = {
2535 phasemapping = {
2532 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2536 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2533 }
2537 }
2534
2538
2535 # generate the actual part
2539 # generate the actual part
2536 phasedata = phases.binaryencode(phasemapping)
2540 phasedata = phases.binaryencode(phasemapping)
2537 bundler.newpart(b'phase-heads', data=phasedata)
2541 bundler.newpart(b'phase-heads', data=phasedata)
2538
2542
2539
2543
2540 @getbundle2partsgenerator(b'hgtagsfnodes')
2544 @getbundle2partsgenerator(b'hgtagsfnodes')
2541 def _getbundletagsfnodes(
2545 def _getbundletagsfnodes(
2542 bundler,
2546 bundler,
2543 repo,
2547 repo,
2544 source,
2548 source,
2545 bundlecaps=None,
2549 bundlecaps=None,
2546 b2caps=None,
2550 b2caps=None,
2547 heads=None,
2551 heads=None,
2548 common=None,
2552 common=None,
2549 **kwargs
2553 **kwargs
2550 ):
2554 ):
2551 """Transfer the .hgtags filenodes mapping.
2555 """Transfer the .hgtags filenodes mapping.
2552
2556
2553 Only values for heads in this bundle will be transferred.
2557 Only values for heads in this bundle will be transferred.
2554
2558
2555 The part data consists of pairs of 20 byte changeset node and .hgtags
2559 The part data consists of pairs of 20 byte changeset node and .hgtags
2556 filenodes raw values.
2560 filenodes raw values.
2557 """
2561 """
2558 # Don't send unless:
2562 # Don't send unless:
2559 # - changeset are being exchanged,
2563 # - changeset are being exchanged,
2560 # - the client supports it.
2564 # - the client supports it.
2561 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2565 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2562 return
2566 return
2563
2567
2564 outgoing = _computeoutgoing(repo, heads, common)
2568 outgoing = _computeoutgoing(repo, heads, common)
2565 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2569 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2566
2570
2567
2571
2568 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2572 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2569 def _getbundlerevbranchcache(
2573 def _getbundlerevbranchcache(
2570 bundler,
2574 bundler,
2571 repo,
2575 repo,
2572 source,
2576 source,
2573 bundlecaps=None,
2577 bundlecaps=None,
2574 b2caps=None,
2578 b2caps=None,
2575 heads=None,
2579 heads=None,
2576 common=None,
2580 common=None,
2577 **kwargs
2581 **kwargs
2578 ):
2582 ):
2579 """Transfer the rev-branch-cache mapping
2583 """Transfer the rev-branch-cache mapping
2580
2584
2581 The payload is a series of data related to each branch
2585 The payload is a series of data related to each branch
2582
2586
2583 1) branch name length
2587 1) branch name length
2584 2) number of open heads
2588 2) number of open heads
2585 3) number of closed heads
2589 3) number of closed heads
2586 4) open heads nodes
2590 4) open heads nodes
2587 5) closed heads nodes
2591 5) closed heads nodes
2588 """
2592 """
2589 # Don't send unless:
2593 # Don't send unless:
2590 # - changeset are being exchanged,
2594 # - changeset are being exchanged,
2591 # - the client supports it.
2595 # - the client supports it.
2592 # - narrow bundle isn't in play (not currently compatible).
2596 # - narrow bundle isn't in play (not currently compatible).
2593 if (
2597 if (
2594 not kwargs.get('cg', True)
2598 not kwargs.get('cg', True)
2595 or not b2caps
2599 or not b2caps
2596 or b'rev-branch-cache' not in b2caps
2600 or b'rev-branch-cache' not in b2caps
2597 or kwargs.get('narrow', False)
2601 or kwargs.get('narrow', False)
2598 or repo.ui.has_section(_NARROWACL_SECTION)
2602 or repo.ui.has_section(_NARROWACL_SECTION)
2599 ):
2603 ):
2600 return
2604 return
2601
2605
2602 outgoing = _computeoutgoing(repo, heads, common)
2606 outgoing = _computeoutgoing(repo, heads, common)
2603 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2607 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2604
2608
2605
2609
2606 def check_heads(repo, their_heads, context):
2610 def check_heads(repo, their_heads, context):
2607 """check if the heads of a repo have been modified
2611 """check if the heads of a repo have been modified
2608
2612
2609 Used by peer for unbundling.
2613 Used by peer for unbundling.
2610 """
2614 """
2611 heads = repo.heads()
2615 heads = repo.heads()
2612 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2616 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2613 if not (
2617 if not (
2614 their_heads == [b'force']
2618 their_heads == [b'force']
2615 or their_heads == heads
2619 or their_heads == heads
2616 or their_heads == [b'hashed', heads_hash]
2620 or their_heads == [b'hashed', heads_hash]
2617 ):
2621 ):
2618 # someone else committed/pushed/unbundled while we
2622 # someone else committed/pushed/unbundled while we
2619 # were transferring data
2623 # were transferring data
2620 raise error.PushRaced(
2624 raise error.PushRaced(
2621 b'repository changed while %s - please try again' % context
2625 b'repository changed while %s - please try again' % context
2622 )
2626 )
2623
2627
2624
2628
2625 def unbundle(repo, cg, heads, source, url):
2629 def unbundle(repo, cg, heads, source, url):
2626 """Apply a bundle to a repo.
2630 """Apply a bundle to a repo.
2627
2631
2628 this function makes sure the repo is locked during the application and have
2632 this function makes sure the repo is locked during the application and have
2629 mechanism to check that no push race occurred between the creation of the
2633 mechanism to check that no push race occurred between the creation of the
2630 bundle and its application.
2634 bundle and its application.
2631
2635
2632 If the push was raced as PushRaced exception is raised."""
2636 If the push was raced as PushRaced exception is raised."""
2633 r = 0
2637 r = 0
2634 # need a transaction when processing a bundle2 stream
2638 # need a transaction when processing a bundle2 stream
2635 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2639 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2636 lockandtr = [None, None, None]
2640 lockandtr = [None, None, None]
2637 recordout = None
2641 recordout = None
2638 # quick fix for output mismatch with bundle2 in 3.4
2642 # quick fix for output mismatch with bundle2 in 3.4
2639 captureoutput = repo.ui.configbool(
2643 captureoutput = repo.ui.configbool(
2640 b'experimental', b'bundle2-output-capture'
2644 b'experimental', b'bundle2-output-capture'
2641 )
2645 )
2642 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2646 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2643 captureoutput = True
2647 captureoutput = True
2644 try:
2648 try:
2645 # note: outside bundle1, 'heads' is expected to be empty and this
2649 # note: outside bundle1, 'heads' is expected to be empty and this
2646 # 'check_heads' call wil be a no-op
2650 # 'check_heads' call wil be a no-op
2647 check_heads(repo, heads, b'uploading changes')
2651 check_heads(repo, heads, b'uploading changes')
2648 # push can proceed
2652 # push can proceed
2649 if not isinstance(cg, bundle2.unbundle20):
2653 if not isinstance(cg, bundle2.unbundle20):
2650 # legacy case: bundle1 (changegroup 01)
2654 # legacy case: bundle1 (changegroup 01)
2651 txnname = b"\n".join([source, urlutil.hidepassword(url)])
2655 txnname = b"\n".join([source, urlutil.hidepassword(url)])
2652 with repo.lock(), repo.transaction(txnname) as tr:
2656 with repo.lock(), repo.transaction(txnname) as tr:
2653 op = bundle2.applybundle(repo, cg, tr, source, url)
2657 op = bundle2.applybundle(repo, cg, tr, source, url)
2654 r = bundle2.combinechangegroupresults(op)
2658 r = bundle2.combinechangegroupresults(op)
2655 else:
2659 else:
2656 r = None
2660 r = None
2657 try:
2661 try:
2658
2662
2659 def gettransaction():
2663 def gettransaction():
2660 if not lockandtr[2]:
2664 if not lockandtr[2]:
2661 if not bookmod.bookmarksinstore(repo):
2665 if not bookmod.bookmarksinstore(repo):
2662 lockandtr[0] = repo.wlock()
2666 lockandtr[0] = repo.wlock()
2663 lockandtr[1] = repo.lock()
2667 lockandtr[1] = repo.lock()
2664 lockandtr[2] = repo.transaction(source)
2668 lockandtr[2] = repo.transaction(source)
2665 lockandtr[2].hookargs[b'source'] = source
2669 lockandtr[2].hookargs[b'source'] = source
2666 lockandtr[2].hookargs[b'url'] = url
2670 lockandtr[2].hookargs[b'url'] = url
2667 lockandtr[2].hookargs[b'bundle2'] = b'1'
2671 lockandtr[2].hookargs[b'bundle2'] = b'1'
2668 return lockandtr[2]
2672 return lockandtr[2]
2669
2673
2670 # Do greedy locking by default until we're satisfied with lazy
2674 # Do greedy locking by default until we're satisfied with lazy
2671 # locking.
2675 # locking.
2672 if not repo.ui.configbool(
2676 if not repo.ui.configbool(
2673 b'experimental', b'bundle2lazylocking'
2677 b'experimental', b'bundle2lazylocking'
2674 ):
2678 ):
2675 gettransaction()
2679 gettransaction()
2676
2680
2677 op = bundle2.bundleoperation(
2681 op = bundle2.bundleoperation(
2678 repo,
2682 repo,
2679 gettransaction,
2683 gettransaction,
2680 captureoutput=captureoutput,
2684 captureoutput=captureoutput,
2681 source=b'push',
2685 source=b'push',
2682 )
2686 )
2683 try:
2687 try:
2684 op = bundle2.processbundle(repo, cg, op=op)
2688 op = bundle2.processbundle(repo, cg, op=op)
2685 finally:
2689 finally:
2686 r = op.reply
2690 r = op.reply
2687 if captureoutput and r is not None:
2691 if captureoutput and r is not None:
2688 repo.ui.pushbuffer(error=True, subproc=True)
2692 repo.ui.pushbuffer(error=True, subproc=True)
2689
2693
2690 def recordout(output):
2694 def recordout(output):
2691 r.newpart(b'output', data=output, mandatory=False)
2695 r.newpart(b'output', data=output, mandatory=False)
2692
2696
2693 if lockandtr[2] is not None:
2697 if lockandtr[2] is not None:
2694 lockandtr[2].close()
2698 lockandtr[2].close()
2695 except BaseException as exc:
2699 except BaseException as exc:
2696 exc.duringunbundle2 = True
2700 exc.duringunbundle2 = True
2697 if captureoutput and r is not None:
2701 if captureoutput and r is not None:
2698 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2702 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2699
2703
2700 def recordout(output):
2704 def recordout(output):
2701 part = bundle2.bundlepart(
2705 part = bundle2.bundlepart(
2702 b'output', data=output, mandatory=False
2706 b'output', data=output, mandatory=False
2703 )
2707 )
2704 parts.append(part)
2708 parts.append(part)
2705
2709
2706 raise
2710 raise
2707 finally:
2711 finally:
2708 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2712 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2709 if recordout is not None:
2713 if recordout is not None:
2710 recordout(repo.ui.popbuffer())
2714 recordout(repo.ui.popbuffer())
2711 return r
2715 return r
2712
2716
2713
2717
2714 def _maybeapplyclonebundle(pullop):
2718 def _maybeapplyclonebundle(pullop):
2715 """Apply a clone bundle from a remote, if possible."""
2719 """Apply a clone bundle from a remote, if possible."""
2716
2720
2717 repo = pullop.repo
2721 repo = pullop.repo
2718 remote = pullop.remote
2722 remote = pullop.remote
2719
2723
2720 if not repo.ui.configbool(b'ui', b'clonebundles'):
2724 if not repo.ui.configbool(b'ui', b'clonebundles'):
2721 return
2725 return
2722
2726
2723 # Only run if local repo is empty.
2727 # Only run if local repo is empty.
2724 if len(repo):
2728 if len(repo):
2725 return
2729 return
2726
2730
2727 if pullop.heads:
2731 if pullop.heads:
2728 return
2732 return
2729
2733
2730 if not remote.capable(b'clonebundles'):
2734 if not remote.capable(b'clonebundles'):
2731 return
2735 return
2732
2736
2733 with remote.commandexecutor() as e:
2737 with remote.commandexecutor() as e:
2734 res = e.callcommand(b'clonebundles', {}).result()
2738 res = e.callcommand(b'clonebundles', {}).result()
2735
2739
2736 # If we call the wire protocol command, that's good enough to record the
2740 # If we call the wire protocol command, that's good enough to record the
2737 # attempt.
2741 # attempt.
2738 pullop.clonebundleattempted = True
2742 pullop.clonebundleattempted = True
2739
2743
2740 entries = bundlecaches.parseclonebundlesmanifest(repo, res)
2744 entries = bundlecaches.parseclonebundlesmanifest(repo, res)
2741 if not entries:
2745 if not entries:
2742 repo.ui.note(
2746 repo.ui.note(
2743 _(
2747 _(
2744 b'no clone bundles available on remote; '
2748 b'no clone bundles available on remote; '
2745 b'falling back to regular clone\n'
2749 b'falling back to regular clone\n'
2746 )
2750 )
2747 )
2751 )
2748 return
2752 return
2749
2753
2750 entries = bundlecaches.filterclonebundleentries(
2754 entries = bundlecaches.filterclonebundleentries(
2751 repo, entries, streamclonerequested=pullop.streamclonerequested
2755 repo, entries, streamclonerequested=pullop.streamclonerequested
2752 )
2756 )
2753
2757
2754 if not entries:
2758 if not entries:
2755 # There is a thundering herd concern here. However, if a server
2759 # There is a thundering herd concern here. However, if a server
2756 # operator doesn't advertise bundles appropriate for its clients,
2760 # operator doesn't advertise bundles appropriate for its clients,
2757 # they deserve what's coming. Furthermore, from a client's
2761 # they deserve what's coming. Furthermore, from a client's
2758 # perspective, no automatic fallback would mean not being able to
2762 # perspective, no automatic fallback would mean not being able to
2759 # clone!
2763 # clone!
2760 repo.ui.warn(
2764 repo.ui.warn(
2761 _(
2765 _(
2762 b'no compatible clone bundles available on server; '
2766 b'no compatible clone bundles available on server; '
2763 b'falling back to regular clone\n'
2767 b'falling back to regular clone\n'
2764 )
2768 )
2765 )
2769 )
2766 repo.ui.warn(
2770 repo.ui.warn(
2767 _(b'(you may want to report this to the server operator)\n')
2771 _(b'(you may want to report this to the server operator)\n')
2768 )
2772 )
2769 return
2773 return
2770
2774
2771 entries = bundlecaches.sortclonebundleentries(repo.ui, entries)
2775 entries = bundlecaches.sortclonebundleentries(repo.ui, entries)
2772
2776
2773 url = entries[0][b'URL']
2777 url = entries[0][b'URL']
2774 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2778 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2775 if trypullbundlefromurl(repo.ui, repo, url):
2779 if trypullbundlefromurl(repo.ui, repo, url):
2776 repo.ui.status(_(b'finished applying clone bundle\n'))
2780 repo.ui.status(_(b'finished applying clone bundle\n'))
2777 # Bundle failed.
2781 # Bundle failed.
2778 #
2782 #
2779 # We abort by default to avoid the thundering herd of
2783 # We abort by default to avoid the thundering herd of
2780 # clients flooding a server that was expecting expensive
2784 # clients flooding a server that was expecting expensive
2781 # clone load to be offloaded.
2785 # clone load to be offloaded.
2782 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2786 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2783 repo.ui.warn(_(b'falling back to normal clone\n'))
2787 repo.ui.warn(_(b'falling back to normal clone\n'))
2784 else:
2788 else:
2785 raise error.Abort(
2789 raise error.Abort(
2786 _(b'error applying bundle'),
2790 _(b'error applying bundle'),
2787 hint=_(
2791 hint=_(
2788 b'if this error persists, consider contacting '
2792 b'if this error persists, consider contacting '
2789 b'the server operator or disable clone '
2793 b'the server operator or disable clone '
2790 b'bundles via '
2794 b'bundles via '
2791 b'"--config ui.clonebundles=false"'
2795 b'"--config ui.clonebundles=false"'
2792 ),
2796 ),
2793 )
2797 )
2794
2798
2795
2799
2796 def trypullbundlefromurl(ui, repo, url):
2800 def trypullbundlefromurl(ui, repo, url):
2797 """Attempt to apply a bundle from a URL."""
2801 """Attempt to apply a bundle from a URL."""
2798 with repo.lock(), repo.transaction(b'bundleurl') as tr:
2802 with repo.lock(), repo.transaction(b'bundleurl') as tr:
2799 try:
2803 try:
2800 fh = urlmod.open(ui, url)
2804 fh = urlmod.open(ui, url)
2801 cg = readbundle(ui, fh, b'stream')
2805 cg = readbundle(ui, fh, b'stream')
2802
2806
2803 if isinstance(cg, streamclone.streamcloneapplier):
2807 if isinstance(cg, streamclone.streamcloneapplier):
2804 cg.apply(repo)
2808 cg.apply(repo)
2805 else:
2809 else:
2806 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
2810 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
2807 return True
2811 return True
2808 except urlerr.httperror as e:
2812 except urlerr.httperror as e:
2809 ui.warn(
2813 ui.warn(
2810 _(b'HTTP error fetching bundle: %s\n')
2814 _(b'HTTP error fetching bundle: %s\n')
2811 % stringutil.forcebytestr(e)
2815 % stringutil.forcebytestr(e)
2812 )
2816 )
2813 except urlerr.urlerror as e:
2817 except urlerr.urlerror as e:
2814 ui.warn(
2818 ui.warn(
2815 _(b'error fetching bundle: %s\n')
2819 _(b'error fetching bundle: %s\n')
2816 % stringutil.forcebytestr(e.reason)
2820 % stringutil.forcebytestr(e.reason)
2817 )
2821 )
2818
2822
2819 return False
2823 return False
@@ -1,1825 +1,1845 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [extensions]
2 > [extensions]
3 > drawdag=$TESTDIR/drawdag.py
3 > drawdag=$TESTDIR/drawdag.py
4 > phasereport=$TESTDIR/testlib/ext-phase-report.py
4 > phasereport=$TESTDIR/testlib/ext-phase-report.py
5 > EOF
5 > EOF
6
6
7 $ hgph() { hg log -G --template "{rev} {phase} {desc} - {node|short}\n" $*; }
7 $ hgph() { hg log -G --template "{rev} {phase} {desc} - {node|short}\n" $*; }
8
8
9 $ mkcommit() {
9 $ mkcommit() {
10 > echo "$1" > "$1"
10 > echo "$1" > "$1"
11 > hg add "$1"
11 > hg add "$1"
12 > message="$1"
12 > message="$1"
13 > shift
13 > shift
14 > hg ci -m "$message" $*
14 > hg ci -m "$message" $*
15 > }
15 > }
16
16
17 $ hg init alpha
17 $ hg init alpha
18 $ cd alpha
18 $ cd alpha
19 $ mkcommit a-A
19 $ mkcommit a-A
20 test-debug-phase: new rev 0: x -> 1
20 test-debug-phase: new rev 0: x -> 1
21 $ mkcommit a-B
21 $ mkcommit a-B
22 test-debug-phase: new rev 1: x -> 1
22 test-debug-phase: new rev 1: x -> 1
23 $ mkcommit a-C
23 $ mkcommit a-C
24 test-debug-phase: new rev 2: x -> 1
24 test-debug-phase: new rev 2: x -> 1
25 $ mkcommit a-D
25 $ mkcommit a-D
26 test-debug-phase: new rev 3: x -> 1
26 test-debug-phase: new rev 3: x -> 1
27 $ hgph
27 $ hgph
28 @ 3 draft a-D - b555f63b6063
28 @ 3 draft a-D - b555f63b6063
29 |
29 |
30 o 2 draft a-C - 54acac6f23ab
30 o 2 draft a-C - 54acac6f23ab
31 |
31 |
32 o 1 draft a-B - 548a3d25dbf0
32 o 1 draft a-B - 548a3d25dbf0
33 |
33 |
34 o 0 draft a-A - 054250a37db4
34 o 0 draft a-A - 054250a37db4
35
35
36
36
37 $ hg init ../beta
37 $ hg init ../beta
38 $ hg push -r 1 ../beta
38 $ hg push -r 1 ../beta
39 pushing to ../beta
39 pushing to ../beta
40 searching for changes
40 searching for changes
41 adding changesets
41 adding changesets
42 adding manifests
42 adding manifests
43 adding file changes
43 adding file changes
44 added 2 changesets with 2 changes to 2 files
44 added 2 changesets with 2 changes to 2 files
45 test-debug-phase: new rev 0: x -> 0
45 test-debug-phase: new rev 0: x -> 0
46 test-debug-phase: new rev 1: x -> 0
46 test-debug-phase: new rev 1: x -> 0
47 test-debug-phase: move rev 0: 1 -> 0
47 test-debug-phase: move rev 0: 1 -> 0
48 test-debug-phase: move rev 1: 1 -> 0
48 test-debug-phase: move rev 1: 1 -> 0
49 $ hgph
49 $ hgph
50 @ 3 draft a-D - b555f63b6063
50 @ 3 draft a-D - b555f63b6063
51 |
51 |
52 o 2 draft a-C - 54acac6f23ab
52 o 2 draft a-C - 54acac6f23ab
53 |
53 |
54 o 1 public a-B - 548a3d25dbf0
54 o 1 public a-B - 548a3d25dbf0
55 |
55 |
56 o 0 public a-A - 054250a37db4
56 o 0 public a-A - 054250a37db4
57
57
58
58
59 $ cd ../beta
59 $ cd ../beta
60 $ hgph
60 $ hgph
61 o 1 public a-B - 548a3d25dbf0
61 o 1 public a-B - 548a3d25dbf0
62 |
62 |
63 o 0 public a-A - 054250a37db4
63 o 0 public a-A - 054250a37db4
64
64
65 $ hg up -q
65 $ hg up -q
66 $ mkcommit b-A
66 $ mkcommit b-A
67 test-debug-phase: new rev 2: x -> 1
67 test-debug-phase: new rev 2: x -> 1
68 $ hgph
68 $ hgph
69 @ 2 draft b-A - f54f1bb90ff3
69 @ 2 draft b-A - f54f1bb90ff3
70 |
70 |
71 o 1 public a-B - 548a3d25dbf0
71 o 1 public a-B - 548a3d25dbf0
72 |
72 |
73 o 0 public a-A - 054250a37db4
73 o 0 public a-A - 054250a37db4
74
74
75 $ hg pull ../alpha
75 $ hg pull ../alpha
76 pulling from ../alpha
76 pulling from ../alpha
77 searching for changes
77 searching for changes
78 adding changesets
78 adding changesets
79 adding manifests
79 adding manifests
80 adding file changes
80 adding file changes
81 added 2 changesets with 2 changes to 2 files (+1 heads)
81 added 2 changesets with 2 changes to 2 files (+1 heads)
82 new changesets 54acac6f23ab:b555f63b6063
82 new changesets 54acac6f23ab:b555f63b6063
83 test-debug-phase: new rev 3: x -> 0
83 test-debug-phase: new rev 3: x -> 0
84 test-debug-phase: new rev 4: x -> 0
84 test-debug-phase: new rev 4: x -> 0
85 (run 'hg heads' to see heads, 'hg merge' to merge)
85 (run 'hg heads' to see heads, 'hg merge' to merge)
86 $ hgph
86 $ hgph
87 o 4 public a-D - b555f63b6063
87 o 4 public a-D - b555f63b6063
88 |
88 |
89 o 3 public a-C - 54acac6f23ab
89 o 3 public a-C - 54acac6f23ab
90 |
90 |
91 | @ 2 draft b-A - f54f1bb90ff3
91 | @ 2 draft b-A - f54f1bb90ff3
92 |/
92 |/
93 o 1 public a-B - 548a3d25dbf0
93 o 1 public a-B - 548a3d25dbf0
94 |
94 |
95 o 0 public a-A - 054250a37db4
95 o 0 public a-A - 054250a37db4
96
96
97
97
98 pull did not updated ../alpha state.
98 pull did not updated ../alpha state.
99 push from alpha to beta should update phase even if nothing is transferred
99 push from alpha to beta should update phase even if nothing is transferred
100
100
101 $ cd ../alpha
101 $ cd ../alpha
102 $ hgph # not updated by remote pull
102 $ hgph # not updated by remote pull
103 @ 3 draft a-D - b555f63b6063
103 @ 3 draft a-D - b555f63b6063
104 |
104 |
105 o 2 draft a-C - 54acac6f23ab
105 o 2 draft a-C - 54acac6f23ab
106 |
106 |
107 o 1 public a-B - 548a3d25dbf0
107 o 1 public a-B - 548a3d25dbf0
108 |
108 |
109 o 0 public a-A - 054250a37db4
109 o 0 public a-A - 054250a37db4
110
110
111 $ hg push -r 2 ../beta
111 $ hg push -r 2 ../beta
112 pushing to ../beta
112 pushing to ../beta
113 searching for changes
113 searching for changes
114 no changes found
114 no changes found
115 test-debug-phase: move rev 2: 1 -> 0
115 test-debug-phase: move rev 2: 1 -> 0
116 [1]
116 [1]
117 $ hgph
117 $ hgph
118 @ 3 draft a-D - b555f63b6063
118 @ 3 draft a-D - b555f63b6063
119 |
119 |
120 o 2 public a-C - 54acac6f23ab
120 o 2 public a-C - 54acac6f23ab
121 |
121 |
122 o 1 public a-B - 548a3d25dbf0
122 o 1 public a-B - 548a3d25dbf0
123 |
123 |
124 o 0 public a-A - 054250a37db4
124 o 0 public a-A - 054250a37db4
125
125
126 $ hg push ../beta
126 $ hg push ../beta
127 pushing to ../beta
127 pushing to ../beta
128 searching for changes
128 searching for changes
129 no changes found
129 no changes found
130 test-debug-phase: move rev 3: 1 -> 0
130 test-debug-phase: move rev 3: 1 -> 0
131 [1]
131 [1]
132 $ hgph
132 $ hgph
133 @ 3 public a-D - b555f63b6063
133 @ 3 public a-D - b555f63b6063
134 |
134 |
135 o 2 public a-C - 54acac6f23ab
135 o 2 public a-C - 54acac6f23ab
136 |
136 |
137 o 1 public a-B - 548a3d25dbf0
137 o 1 public a-B - 548a3d25dbf0
138 |
138 |
139 o 0 public a-A - 054250a37db4
139 o 0 public a-A - 054250a37db4
140
140
141
141
142 update must update phase of common changeset too
142 update must update phase of common changeset too
143
143
144 $ hg pull ../beta # getting b-A
144 $ hg pull ../beta # getting b-A
145 pulling from ../beta
145 pulling from ../beta
146 searching for changes
146 searching for changes
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 1 changesets with 1 changes to 1 files (+1 heads)
150 added 1 changesets with 1 changes to 1 files (+1 heads)
151 new changesets f54f1bb90ff3
151 new changesets f54f1bb90ff3
152 test-debug-phase: new rev 4: x -> 0
152 test-debug-phase: new rev 4: x -> 0
153 (run 'hg heads' to see heads, 'hg merge' to merge)
153 (run 'hg heads' to see heads, 'hg merge' to merge)
154
154
155 $ cd ../beta
155 $ cd ../beta
156 $ hgph # not updated by remote pull
156 $ hgph # not updated by remote pull
157 o 4 public a-D - b555f63b6063
157 o 4 public a-D - b555f63b6063
158 |
158 |
159 o 3 public a-C - 54acac6f23ab
159 o 3 public a-C - 54acac6f23ab
160 |
160 |
161 | @ 2 draft b-A - f54f1bb90ff3
161 | @ 2 draft b-A - f54f1bb90ff3
162 |/
162 |/
163 o 1 public a-B - 548a3d25dbf0
163 o 1 public a-B - 548a3d25dbf0
164 |
164 |
165 o 0 public a-A - 054250a37db4
165 o 0 public a-A - 054250a37db4
166
166
167 $ hg pull ../alpha
167 $ hg pull ../alpha
168 pulling from ../alpha
168 pulling from ../alpha
169 searching for changes
169 searching for changes
170 no changes found
170 no changes found
171 1 local changesets published
171 1 local changesets published
172 test-debug-phase: move rev 2: 1 -> 0
172 test-debug-phase: move rev 2: 1 -> 0
173 $ hgph
173 $ hgph
174 o 4 public a-D - b555f63b6063
174 o 4 public a-D - b555f63b6063
175 |
175 |
176 o 3 public a-C - 54acac6f23ab
176 o 3 public a-C - 54acac6f23ab
177 |
177 |
178 | @ 2 public b-A - f54f1bb90ff3
178 | @ 2 public b-A - f54f1bb90ff3
179 |/
179 |/
180 o 1 public a-B - 548a3d25dbf0
180 o 1 public a-B - 548a3d25dbf0
181 |
181 |
182 o 0 public a-A - 054250a37db4
182 o 0 public a-A - 054250a37db4
183
183
184
184
185 Publish configuration option
185 Publish configuration option
186 ----------------------------
186 ----------------------------
187
187
188 Pull
188 Pull
189 ````
189 ````
190
190
191 changegroup are added without phase movement
191 changegroup are added without phase movement
192
192
193 $ hg bundle -a ../base.bundle
193 $ hg bundle -a ../base.bundle
194 5 changesets found
194 5 changesets found
195 $ cd ..
195 $ cd ..
196 $ hg init mu
196 $ hg init mu
197 $ cd mu
197 $ cd mu
198 $ cat > .hg/hgrc << EOF
198 $ cat > .hg/hgrc << EOF
199 > [phases]
199 > [phases]
200 > publish=0
200 > publish=0
201 > EOF
201 > EOF
202 $ hg unbundle ../base.bundle
202 $ hg unbundle ../base.bundle
203 adding changesets
203 adding changesets
204 adding manifests
204 adding manifests
205 adding file changes
205 adding file changes
206 added 5 changesets with 5 changes to 5 files (+1 heads)
206 added 5 changesets with 5 changes to 5 files (+1 heads)
207 new changesets 054250a37db4:b555f63b6063 (5 drafts)
207 new changesets 054250a37db4:b555f63b6063 (5 drafts)
208 test-debug-phase: new rev 0: x -> 1
208 test-debug-phase: new rev 0: x -> 1
209 test-debug-phase: new rev 1: x -> 1
209 test-debug-phase: new rev 1: x -> 1
210 test-debug-phase: new rev 2: x -> 1
210 test-debug-phase: new rev 2: x -> 1
211 test-debug-phase: new rev 3: x -> 1
211 test-debug-phase: new rev 3: x -> 1
212 test-debug-phase: new rev 4: x -> 1
212 test-debug-phase: new rev 4: x -> 1
213 (run 'hg heads' to see heads, 'hg merge' to merge)
213 (run 'hg heads' to see heads, 'hg merge' to merge)
214 $ hgph
214 $ hgph
215 o 4 draft a-D - b555f63b6063
215 o 4 draft a-D - b555f63b6063
216 |
216 |
217 o 3 draft a-C - 54acac6f23ab
217 o 3 draft a-C - 54acac6f23ab
218 |
218 |
219 | o 2 draft b-A - f54f1bb90ff3
219 | o 2 draft b-A - f54f1bb90ff3
220 |/
220 |/
221 o 1 draft a-B - 548a3d25dbf0
221 o 1 draft a-B - 548a3d25dbf0
222 |
222 |
223 o 0 draft a-A - 054250a37db4
223 o 0 draft a-A - 054250a37db4
224
224
225 $ cd ..
225 $ cd ..
226
226
227 Pulling from publish=False to publish=False does not move boundary.
227 Pulling from publish=False to publish=False does not move boundary.
228
228
229 $ hg init nu
229 $ hg init nu
230 $ cd nu
230 $ cd nu
231 $ cat > .hg/hgrc << EOF
231 $ cat > .hg/hgrc << EOF
232 > [phases]
232 > [phases]
233 > publish=0
233 > publish=0
234 > EOF
234 > EOF
235 $ hg pull ../mu -r 54acac6f23ab
235 $ hg pull ../mu -r 54acac6f23ab
236 pulling from ../mu
236 pulling from ../mu
237 adding changesets
237 adding changesets
238 adding manifests
238 adding manifests
239 adding file changes
239 adding file changes
240 added 3 changesets with 3 changes to 3 files
240 added 3 changesets with 3 changes to 3 files
241 new changesets 054250a37db4:54acac6f23ab (3 drafts)
241 new changesets 054250a37db4:54acac6f23ab (3 drafts)
242 test-debug-phase: new rev 0: x -> 1
242 test-debug-phase: new rev 0: x -> 1
243 test-debug-phase: new rev 1: x -> 1
243 test-debug-phase: new rev 1: x -> 1
244 test-debug-phase: new rev 2: x -> 1
244 test-debug-phase: new rev 2: x -> 1
245 (run 'hg update' to get a working copy)
245 (run 'hg update' to get a working copy)
246 $ hgph
246 $ hgph
247 o 2 draft a-C - 54acac6f23ab
247 o 2 draft a-C - 54acac6f23ab
248 |
248 |
249 o 1 draft a-B - 548a3d25dbf0
249 o 1 draft a-B - 548a3d25dbf0
250 |
250 |
251 o 0 draft a-A - 054250a37db4
251 o 0 draft a-A - 054250a37db4
252
252
253
253
254 Even for common
254 Even for common
255
255
256 $ hg pull ../mu -r f54f1bb90ff3
256 $ hg pull ../mu -r f54f1bb90ff3
257 pulling from ../mu
257 pulling from ../mu
258 searching for changes
258 searching for changes
259 adding changesets
259 adding changesets
260 adding manifests
260 adding manifests
261 adding file changes
261 adding file changes
262 added 1 changesets with 1 changes to 1 files (+1 heads)
262 added 1 changesets with 1 changes to 1 files (+1 heads)
263 new changesets f54f1bb90ff3 (1 drafts)
263 new changesets f54f1bb90ff3 (1 drafts)
264 test-debug-phase: new rev 3: x -> 1
264 test-debug-phase: new rev 3: x -> 1
265 (run 'hg heads' to see heads, 'hg merge' to merge)
265 (run 'hg heads' to see heads, 'hg merge' to merge)
266 $ hgph
266 $ hgph
267 o 3 draft b-A - f54f1bb90ff3
267 o 3 draft b-A - f54f1bb90ff3
268 |
268 |
269 | o 2 draft a-C - 54acac6f23ab
269 | o 2 draft a-C - 54acac6f23ab
270 |/
270 |/
271 o 1 draft a-B - 548a3d25dbf0
271 o 1 draft a-B - 548a3d25dbf0
272 |
272 |
273 o 0 draft a-A - 054250a37db4
273 o 0 draft a-A - 054250a37db4
274
274
275
275
276
276
277 Pulling from Publish=True to Publish=False move boundary in common set.
277 Pulling from Publish=True to Publish=False move boundary in common set.
278 we are in nu
278 we are in nu
279
279
280 $ hg pull ../alpha -r b555f63b6063
280 $ hg pull ../alpha -r b555f63b6063
281 pulling from ../alpha
281 pulling from ../alpha
282 searching for changes
282 searching for changes
283 adding changesets
283 adding changesets
284 adding manifests
284 adding manifests
285 adding file changes
285 adding file changes
286 added 1 changesets with 1 changes to 1 files
286 added 1 changesets with 1 changes to 1 files
287 new changesets b555f63b6063
287 new changesets b555f63b6063
288 3 local changesets published
288 3 local changesets published
289 test-debug-phase: move rev 0: 1 -> 0
289 test-debug-phase: move rev 0: 1 -> 0
290 test-debug-phase: move rev 1: 1 -> 0
290 test-debug-phase: move rev 1: 1 -> 0
291 test-debug-phase: move rev 2: 1 -> 0
291 test-debug-phase: move rev 2: 1 -> 0
292 test-debug-phase: new rev 4: x -> 0
292 test-debug-phase: new rev 4: x -> 0
293 (run 'hg update' to get a working copy)
293 (run 'hg update' to get a working copy)
294 $ hgph # f54f1bb90ff3 stay draft, not ancestor of -r
294 $ hgph # f54f1bb90ff3 stay draft, not ancestor of -r
295 o 4 public a-D - b555f63b6063
295 o 4 public a-D - b555f63b6063
296 |
296 |
297 | o 3 draft b-A - f54f1bb90ff3
297 | o 3 draft b-A - f54f1bb90ff3
298 | |
298 | |
299 o | 2 public a-C - 54acac6f23ab
299 o | 2 public a-C - 54acac6f23ab
300 |/
300 |/
301 o 1 public a-B - 548a3d25dbf0
301 o 1 public a-B - 548a3d25dbf0
302 |
302 |
303 o 0 public a-A - 054250a37db4
303 o 0 public a-A - 054250a37db4
304
304
305
305
306 pulling from Publish=False to publish=False with some public
306 pulling from Publish=False to publish=False with some public
307
307
308 $ hg up -q f54f1bb90ff3
308 $ hg up -q f54f1bb90ff3
309 $ mkcommit n-A
309 $ mkcommit n-A
310 test-debug-phase: new rev 5: x -> 1
310 test-debug-phase: new rev 5: x -> 1
311 $ mkcommit n-B
311 $ mkcommit n-B
312 test-debug-phase: new rev 6: x -> 1
312 test-debug-phase: new rev 6: x -> 1
313 $ hgph
313 $ hgph
314 @ 6 draft n-B - 145e75495359
314 @ 6 draft n-B - 145e75495359
315 |
315 |
316 o 5 draft n-A - d6bcb4f74035
316 o 5 draft n-A - d6bcb4f74035
317 |
317 |
318 | o 4 public a-D - b555f63b6063
318 | o 4 public a-D - b555f63b6063
319 | |
319 | |
320 o | 3 draft b-A - f54f1bb90ff3
320 o | 3 draft b-A - f54f1bb90ff3
321 | |
321 | |
322 | o 2 public a-C - 54acac6f23ab
322 | o 2 public a-C - 54acac6f23ab
323 |/
323 |/
324 o 1 public a-B - 548a3d25dbf0
324 o 1 public a-B - 548a3d25dbf0
325 |
325 |
326 o 0 public a-A - 054250a37db4
326 o 0 public a-A - 054250a37db4
327
327
328 $ cd ../mu
328 $ cd ../mu
329 $ hg pull ../nu --confirm --config ui.interactive=True<<EOF
329 $ hg pull ../nu --confirm --config ui.interactive=True<<EOF
330 > y
330 > y
331 > EOF
331 > EOF
332 pulling from ../nu
332 pulling from ../nu
333 searching for changes
333 searching for changes
334 adding changesets
334 adding changesets
335 adding manifests
335 adding manifests
336 adding file changes
336 adding file changes
337 adding 2 changesets with 2 changes to 2 files
337 adding 2 changesets with 2 changes to 2 files
338 new changesets d6bcb4f74035:145e75495359 (2 drafts)
338 new changesets d6bcb4f74035:145e75495359 (2 drafts)
339 4 local changesets will be published
339 4 local changesets will be published
340 accept incoming changes (yn)? y
340 accept incoming changes (yn)? y
341 added 2 changesets with 2 changes to 2 files
341 added 2 changesets with 2 changes to 2 files
342 new changesets d6bcb4f74035:145e75495359 (2 drafts)
342 new changesets d6bcb4f74035:145e75495359 (2 drafts)
343 4 local changesets published
343 4 local changesets published
344 test-debug-phase: move rev 0: 1 -> 0
344 test-debug-phase: move rev 0: 1 -> 0
345 test-debug-phase: move rev 1: 1 -> 0
345 test-debug-phase: move rev 1: 1 -> 0
346 test-debug-phase: move rev 3: 1 -> 0
346 test-debug-phase: move rev 3: 1 -> 0
347 test-debug-phase: move rev 4: 1 -> 0
347 test-debug-phase: move rev 4: 1 -> 0
348 test-debug-phase: new rev 5: x -> 1
348 test-debug-phase: new rev 5: x -> 1
349 test-debug-phase: new rev 6: x -> 1
349 test-debug-phase: new rev 6: x -> 1
350 (run 'hg update' to get a working copy)
350 (run 'hg update' to get a working copy)
351 $ hgph
351 $ hgph
352 o 6 draft n-B - 145e75495359
352 o 6 draft n-B - 145e75495359
353 |
353 |
354 o 5 draft n-A - d6bcb4f74035
354 o 5 draft n-A - d6bcb4f74035
355 |
355 |
356 | o 4 public a-D - b555f63b6063
356 | o 4 public a-D - b555f63b6063
357 | |
357 | |
358 | o 3 public a-C - 54acac6f23ab
358 | o 3 public a-C - 54acac6f23ab
359 | |
359 | |
360 o | 2 draft b-A - f54f1bb90ff3
360 o | 2 draft b-A - f54f1bb90ff3
361 |/
361 |/
362 o 1 public a-B - 548a3d25dbf0
362 o 1 public a-B - 548a3d25dbf0
363 |
363 |
364 o 0 public a-A - 054250a37db4
364 o 0 public a-A - 054250a37db4
365
365
366 $ cd ..
366 $ cd ..
367
367
368 pulling into publish=True
368 pulling into publish=True
369
369
370 $ cd alpha
370 $ cd alpha
371 $ hgph
371 $ hgph
372 o 4 public b-A - f54f1bb90ff3
372 o 4 public b-A - f54f1bb90ff3
373 |
373 |
374 | @ 3 public a-D - b555f63b6063
374 | @ 3 public a-D - b555f63b6063
375 | |
375 | |
376 | o 2 public a-C - 54acac6f23ab
376 | o 2 public a-C - 54acac6f23ab
377 |/
377 |/
378 o 1 public a-B - 548a3d25dbf0
378 o 1 public a-B - 548a3d25dbf0
379 |
379 |
380 o 0 public a-A - 054250a37db4
380 o 0 public a-A - 054250a37db4
381
381
382 $ hg pull ../mu
382 $ hg pull ../mu
383 pulling from ../mu
383 pulling from ../mu
384 searching for changes
384 searching for changes
385 adding changesets
385 adding changesets
386 adding manifests
386 adding manifests
387 adding file changes
387 adding file changes
388 added 2 changesets with 2 changes to 2 files
388 added 2 changesets with 2 changes to 2 files
389 new changesets d6bcb4f74035:145e75495359 (2 drafts)
389 new changesets d6bcb4f74035:145e75495359 (2 drafts)
390 test-debug-phase: new rev 5: x -> 1
390 test-debug-phase: new rev 5: x -> 1
391 test-debug-phase: new rev 6: x -> 1
391 test-debug-phase: new rev 6: x -> 1
392 (run 'hg update' to get a working copy)
392 (run 'hg update' to get a working copy)
393 $ hgph
393 $ hgph
394 o 6 draft n-B - 145e75495359
394 o 6 draft n-B - 145e75495359
395 |
395 |
396 o 5 draft n-A - d6bcb4f74035
396 o 5 draft n-A - d6bcb4f74035
397 |
397 |
398 o 4 public b-A - f54f1bb90ff3
398 o 4 public b-A - f54f1bb90ff3
399 |
399 |
400 | @ 3 public a-D - b555f63b6063
400 | @ 3 public a-D - b555f63b6063
401 | |
401 | |
402 | o 2 public a-C - 54acac6f23ab
402 | o 2 public a-C - 54acac6f23ab
403 |/
403 |/
404 o 1 public a-B - 548a3d25dbf0
404 o 1 public a-B - 548a3d25dbf0
405 |
405 |
406 o 0 public a-A - 054250a37db4
406 o 0 public a-A - 054250a37db4
407
407
408 $ cd ..
408 $ cd ..
409
409
410 pulling back into original repo
410 pulling back into original repo
411
411
412 $ cd nu
412 $ cd nu
413 $ hg pull ../alpha
413 $ hg pull ../alpha
414 pulling from ../alpha
414 pulling from ../alpha
415 searching for changes
415 searching for changes
416 no changes found
416 no changes found
417 3 local changesets published
417 3 local changesets published
418 test-debug-phase: move rev 3: 1 -> 0
418 test-debug-phase: move rev 3: 1 -> 0
419 test-debug-phase: move rev 5: 1 -> 0
419 test-debug-phase: move rev 5: 1 -> 0
420 test-debug-phase: move rev 6: 1 -> 0
420 test-debug-phase: move rev 6: 1 -> 0
421 $ hgph
421 $ hgph
422 @ 6 public n-B - 145e75495359
422 @ 6 public n-B - 145e75495359
423 |
423 |
424 o 5 public n-A - d6bcb4f74035
424 o 5 public n-A - d6bcb4f74035
425 |
425 |
426 | o 4 public a-D - b555f63b6063
426 | o 4 public a-D - b555f63b6063
427 | |
427 | |
428 o | 3 public b-A - f54f1bb90ff3
428 o | 3 public b-A - f54f1bb90ff3
429 | |
429 | |
430 | o 2 public a-C - 54acac6f23ab
430 | o 2 public a-C - 54acac6f23ab
431 |/
431 |/
432 o 1 public a-B - 548a3d25dbf0
432 o 1 public a-B - 548a3d25dbf0
433 |
433 |
434 o 0 public a-A - 054250a37db4
434 o 0 public a-A - 054250a37db4
435
435
436
436
437 Push
437 Push
438 ````
438 ````
439
439
440 (inserted)
440 (inserted)
441
441
442 Test that phase are pushed even when they are nothing to pus
442 Test that phase are pushed even when they are nothing to pus
443 (this might be tested later bu are very convenient to not alter too much test)
443 (this might be tested later bu are very convenient to not alter too much test)
444
444
445 Push back to alpha
445 Push back to alpha
446
446
447 $ hg push ../alpha # from nu
447 $ hg push ../alpha # from nu
448 pushing to ../alpha
448 pushing to ../alpha
449 searching for changes
449 searching for changes
450 no changes found
450 no changes found
451 test-debug-phase: move rev 5: 1 -> 0
451 test-debug-phase: move rev 5: 1 -> 0
452 test-debug-phase: move rev 6: 1 -> 0
452 test-debug-phase: move rev 6: 1 -> 0
453 [1]
453 [1]
454 $ cd ..
454 $ cd ..
455 $ cd alpha
455 $ cd alpha
456 $ hgph
456 $ hgph
457 o 6 public n-B - 145e75495359
457 o 6 public n-B - 145e75495359
458 |
458 |
459 o 5 public n-A - d6bcb4f74035
459 o 5 public n-A - d6bcb4f74035
460 |
460 |
461 o 4 public b-A - f54f1bb90ff3
461 o 4 public b-A - f54f1bb90ff3
462 |
462 |
463 | @ 3 public a-D - b555f63b6063
463 | @ 3 public a-D - b555f63b6063
464 | |
464 | |
465 | o 2 public a-C - 54acac6f23ab
465 | o 2 public a-C - 54acac6f23ab
466 |/
466 |/
467 o 1 public a-B - 548a3d25dbf0
467 o 1 public a-B - 548a3d25dbf0
468 |
468 |
469 o 0 public a-A - 054250a37db4
469 o 0 public a-A - 054250a37db4
470
470
471
471
472 (end insertion)
472 (end insertion)
473
473
474
474
475 initial setup
475 initial setup
476
476
477 $ hg log -G # of alpha
477 $ hg log -G # of alpha
478 o changeset: 6:145e75495359
478 o changeset: 6:145e75495359
479 | tag: tip
479 | tag: tip
480 | user: test
480 | user: test
481 | date: Thu Jan 01 00:00:00 1970 +0000
481 | date: Thu Jan 01 00:00:00 1970 +0000
482 | summary: n-B
482 | summary: n-B
483 |
483 |
484 o changeset: 5:d6bcb4f74035
484 o changeset: 5:d6bcb4f74035
485 | user: test
485 | user: test
486 | date: Thu Jan 01 00:00:00 1970 +0000
486 | date: Thu Jan 01 00:00:00 1970 +0000
487 | summary: n-A
487 | summary: n-A
488 |
488 |
489 o changeset: 4:f54f1bb90ff3
489 o changeset: 4:f54f1bb90ff3
490 | parent: 1:548a3d25dbf0
490 | parent: 1:548a3d25dbf0
491 | user: test
491 | user: test
492 | date: Thu Jan 01 00:00:00 1970 +0000
492 | date: Thu Jan 01 00:00:00 1970 +0000
493 | summary: b-A
493 | summary: b-A
494 |
494 |
495 | @ changeset: 3:b555f63b6063
495 | @ changeset: 3:b555f63b6063
496 | | user: test
496 | | user: test
497 | | date: Thu Jan 01 00:00:00 1970 +0000
497 | | date: Thu Jan 01 00:00:00 1970 +0000
498 | | summary: a-D
498 | | summary: a-D
499 | |
499 | |
500 | o changeset: 2:54acac6f23ab
500 | o changeset: 2:54acac6f23ab
501 |/ user: test
501 |/ user: test
502 | date: Thu Jan 01 00:00:00 1970 +0000
502 | date: Thu Jan 01 00:00:00 1970 +0000
503 | summary: a-C
503 | summary: a-C
504 |
504 |
505 o changeset: 1:548a3d25dbf0
505 o changeset: 1:548a3d25dbf0
506 | user: test
506 | user: test
507 | date: Thu Jan 01 00:00:00 1970 +0000
507 | date: Thu Jan 01 00:00:00 1970 +0000
508 | summary: a-B
508 | summary: a-B
509 |
509 |
510 o changeset: 0:054250a37db4
510 o changeset: 0:054250a37db4
511 user: test
511 user: test
512 date: Thu Jan 01 00:00:00 1970 +0000
512 date: Thu Jan 01 00:00:00 1970 +0000
513 summary: a-A
513 summary: a-A
514
514
515 $ mkcommit a-E
515 $ mkcommit a-E
516 test-debug-phase: new rev 7: x -> 1
516 test-debug-phase: new rev 7: x -> 1
517 $ mkcommit a-F
517 $ mkcommit a-F
518 test-debug-phase: new rev 8: x -> 1
518 test-debug-phase: new rev 8: x -> 1
519 $ mkcommit a-G
519 $ mkcommit a-G
520 test-debug-phase: new rev 9: x -> 1
520 test-debug-phase: new rev 9: x -> 1
521 $ hg up d6bcb4f74035 -q
521 $ hg up d6bcb4f74035 -q
522 $ mkcommit a-H
522 $ mkcommit a-H
523 test-debug-phase: new rev 10: x -> 1
523 test-debug-phase: new rev 10: x -> 1
524 created new head
524 created new head
525 $ hgph
525 $ hgph
526 @ 10 draft a-H - 967b449fbc94
526 @ 10 draft a-H - 967b449fbc94
527 |
527 |
528 | o 9 draft a-G - 3e27b6f1eee1
528 | o 9 draft a-G - 3e27b6f1eee1
529 | |
529 | |
530 | o 8 draft a-F - b740e3e5c05d
530 | o 8 draft a-F - b740e3e5c05d
531 | |
531 | |
532 | o 7 draft a-E - e9f537e46dea
532 | o 7 draft a-E - e9f537e46dea
533 | |
533 | |
534 +---o 6 public n-B - 145e75495359
534 +---o 6 public n-B - 145e75495359
535 | |
535 | |
536 o | 5 public n-A - d6bcb4f74035
536 o | 5 public n-A - d6bcb4f74035
537 | |
537 | |
538 o | 4 public b-A - f54f1bb90ff3
538 o | 4 public b-A - f54f1bb90ff3
539 | |
539 | |
540 | o 3 public a-D - b555f63b6063
540 | o 3 public a-D - b555f63b6063
541 | |
541 | |
542 | o 2 public a-C - 54acac6f23ab
542 | o 2 public a-C - 54acac6f23ab
543 |/
543 |/
544 o 1 public a-B - 548a3d25dbf0
544 o 1 public a-B - 548a3d25dbf0
545 |
545 |
546 o 0 public a-A - 054250a37db4
546 o 0 public a-A - 054250a37db4
547
547
548
548
549 Pulling from bundle does not alter phases of changeset not present in the bundle
549 Pulling from bundle does not alter phases of changeset not present in the bundle
550
550
551 #if repobundlerepo
551 #if repobundlerepo
552 $ hg bundle --base 1 -r 6 -r 3 ../partial-bundle.hg
552 $ hg bundle --base 1 -r 6 -r 3 ../partial-bundle.hg
553 5 changesets found
553 5 changesets found
554 $ hg pull ../partial-bundle.hg
554 $ hg pull ../partial-bundle.hg
555 pulling from ../partial-bundle.hg
555 pulling from ../partial-bundle.hg
556 searching for changes
556 searching for changes
557 no changes found
557 no changes found
558 $ hgph
558 $ hgph
559 @ 10 draft a-H - 967b449fbc94
559 @ 10 draft a-H - 967b449fbc94
560 |
560 |
561 | o 9 draft a-G - 3e27b6f1eee1
561 | o 9 draft a-G - 3e27b6f1eee1
562 | |
562 | |
563 | o 8 draft a-F - b740e3e5c05d
563 | o 8 draft a-F - b740e3e5c05d
564 | |
564 | |
565 | o 7 draft a-E - e9f537e46dea
565 | o 7 draft a-E - e9f537e46dea
566 | |
566 | |
567 +---o 6 public n-B - 145e75495359
567 +---o 6 public n-B - 145e75495359
568 | |
568 | |
569 o | 5 public n-A - d6bcb4f74035
569 o | 5 public n-A - d6bcb4f74035
570 | |
570 | |
571 o | 4 public b-A - f54f1bb90ff3
571 o | 4 public b-A - f54f1bb90ff3
572 | |
572 | |
573 | o 3 public a-D - b555f63b6063
573 | o 3 public a-D - b555f63b6063
574 | |
574 | |
575 | o 2 public a-C - 54acac6f23ab
575 | o 2 public a-C - 54acac6f23ab
576 |/
576 |/
577 o 1 public a-B - 548a3d25dbf0
577 o 1 public a-B - 548a3d25dbf0
578 |
578 |
579 o 0 public a-A - 054250a37db4
579 o 0 public a-A - 054250a37db4
580
580
581 #endif
581 #endif
582
582
583 Pushing to Publish=False (unknown changeset)
583 Pushing to Publish=False (unknown changeset)
584
584
585 $ hg push ../mu -r b740e3e5c05d # a-F
585 $ hg push ../mu -r b740e3e5c05d # a-F
586 pushing to ../mu
586 pushing to ../mu
587 searching for changes
587 searching for changes
588 adding changesets
588 adding changesets
589 adding manifests
589 adding manifests
590 adding file changes
590 adding file changes
591 added 2 changesets with 2 changes to 2 files
591 added 2 changesets with 2 changes to 2 files
592 test-debug-phase: new rev 7: x -> 1
592 test-debug-phase: new rev 7: x -> 1
593 test-debug-phase: new rev 8: x -> 1
593 test-debug-phase: new rev 8: x -> 1
594 $ hgph
594 $ hgph
595 @ 10 draft a-H - 967b449fbc94
595 @ 10 draft a-H - 967b449fbc94
596 |
596 |
597 | o 9 draft a-G - 3e27b6f1eee1
597 | o 9 draft a-G - 3e27b6f1eee1
598 | |
598 | |
599 | o 8 draft a-F - b740e3e5c05d
599 | o 8 draft a-F - b740e3e5c05d
600 | |
600 | |
601 | o 7 draft a-E - e9f537e46dea
601 | o 7 draft a-E - e9f537e46dea
602 | |
602 | |
603 +---o 6 public n-B - 145e75495359
603 +---o 6 public n-B - 145e75495359
604 | |
604 | |
605 o | 5 public n-A - d6bcb4f74035
605 o | 5 public n-A - d6bcb4f74035
606 | |
606 | |
607 o | 4 public b-A - f54f1bb90ff3
607 o | 4 public b-A - f54f1bb90ff3
608 | |
608 | |
609 | o 3 public a-D - b555f63b6063
609 | o 3 public a-D - b555f63b6063
610 | |
610 | |
611 | o 2 public a-C - 54acac6f23ab
611 | o 2 public a-C - 54acac6f23ab
612 |/
612 |/
613 o 1 public a-B - 548a3d25dbf0
613 o 1 public a-B - 548a3d25dbf0
614 |
614 |
615 o 0 public a-A - 054250a37db4
615 o 0 public a-A - 054250a37db4
616
616
617
617
618 $ cd ../mu
618 $ cd ../mu
619 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
619 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
620 > # not ancestor of -r
620 > # not ancestor of -r
621 o 8 draft a-F - b740e3e5c05d
621 o 8 draft a-F - b740e3e5c05d
622 |
622 |
623 o 7 draft a-E - e9f537e46dea
623 o 7 draft a-E - e9f537e46dea
624 |
624 |
625 | o 6 draft n-B - 145e75495359
625 | o 6 draft n-B - 145e75495359
626 | |
626 | |
627 | o 5 draft n-A - d6bcb4f74035
627 | o 5 draft n-A - d6bcb4f74035
628 | |
628 | |
629 o | 4 public a-D - b555f63b6063
629 o | 4 public a-D - b555f63b6063
630 | |
630 | |
631 o | 3 public a-C - 54acac6f23ab
631 o | 3 public a-C - 54acac6f23ab
632 | |
632 | |
633 | o 2 draft b-A - f54f1bb90ff3
633 | o 2 draft b-A - f54f1bb90ff3
634 |/
634 |/
635 o 1 public a-B - 548a3d25dbf0
635 o 1 public a-B - 548a3d25dbf0
636 |
636 |
637 o 0 public a-A - 054250a37db4
637 o 0 public a-A - 054250a37db4
638
638
639
639
640 Pushing to Publish=True (unknown changeset)
640 Pushing to Publish=True (unknown changeset)
641
641
642 $ hg push ../beta -r b740e3e5c05d
642 $ hg push ../beta -r b740e3e5c05d
643 pushing to ../beta
643 pushing to ../beta
644 searching for changes
644 searching for changes
645 adding changesets
645 adding changesets
646 adding manifests
646 adding manifests
647 adding file changes
647 adding file changes
648 added 2 changesets with 2 changes to 2 files
648 added 2 changesets with 2 changes to 2 files
649 test-debug-phase: new rev 5: x -> 0
649 test-debug-phase: new rev 5: x -> 0
650 test-debug-phase: new rev 6: x -> 0
650 test-debug-phase: new rev 6: x -> 0
651 test-debug-phase: move rev 7: 1 -> 0
651 test-debug-phase: move rev 7: 1 -> 0
652 test-debug-phase: move rev 8: 1 -> 0
652 test-debug-phase: move rev 8: 1 -> 0
653 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
653 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
654 > # not ancestor of -r
654 > # not ancestor of -r
655 o 8 public a-F - b740e3e5c05d
655 o 8 public a-F - b740e3e5c05d
656 |
656 |
657 o 7 public a-E - e9f537e46dea
657 o 7 public a-E - e9f537e46dea
658 |
658 |
659 | o 6 draft n-B - 145e75495359
659 | o 6 draft n-B - 145e75495359
660 | |
660 | |
661 | o 5 draft n-A - d6bcb4f74035
661 | o 5 draft n-A - d6bcb4f74035
662 | |
662 | |
663 o | 4 public a-D - b555f63b6063
663 o | 4 public a-D - b555f63b6063
664 | |
664 | |
665 o | 3 public a-C - 54acac6f23ab
665 o | 3 public a-C - 54acac6f23ab
666 | |
666 | |
667 | o 2 draft b-A - f54f1bb90ff3
667 | o 2 draft b-A - f54f1bb90ff3
668 |/
668 |/
669 o 1 public a-B - 548a3d25dbf0
669 o 1 public a-B - 548a3d25dbf0
670 |
670 |
671 o 0 public a-A - 054250a37db4
671 o 0 public a-A - 054250a37db4
672
672
673
673
674 Pushing to Publish=True (common changeset)
674 Pushing to Publish=True (common changeset)
675
675
676 $ cd ../beta
676 $ cd ../beta
677 $ hg push ../alpha
677 $ hg push ../alpha
678 pushing to ../alpha
678 pushing to ../alpha
679 searching for changes
679 searching for changes
680 no changes found
680 no changes found
681 test-debug-phase: move rev 7: 1 -> 0
681 test-debug-phase: move rev 7: 1 -> 0
682 test-debug-phase: move rev 8: 1 -> 0
682 test-debug-phase: move rev 8: 1 -> 0
683 [1]
683 [1]
684 $ hgph
684 $ hgph
685 o 6 public a-F - b740e3e5c05d
685 o 6 public a-F - b740e3e5c05d
686 |
686 |
687 o 5 public a-E - e9f537e46dea
687 o 5 public a-E - e9f537e46dea
688 |
688 |
689 o 4 public a-D - b555f63b6063
689 o 4 public a-D - b555f63b6063
690 |
690 |
691 o 3 public a-C - 54acac6f23ab
691 o 3 public a-C - 54acac6f23ab
692 |
692 |
693 | @ 2 public b-A - f54f1bb90ff3
693 | @ 2 public b-A - f54f1bb90ff3
694 |/
694 |/
695 o 1 public a-B - 548a3d25dbf0
695 o 1 public a-B - 548a3d25dbf0
696 |
696 |
697 o 0 public a-A - 054250a37db4
697 o 0 public a-A - 054250a37db4
698
698
699 $ cd ../alpha
699 $ cd ../alpha
700 $ hgph
700 $ hgph
701 @ 10 draft a-H - 967b449fbc94
701 @ 10 draft a-H - 967b449fbc94
702 |
702 |
703 | o 9 draft a-G - 3e27b6f1eee1
703 | o 9 draft a-G - 3e27b6f1eee1
704 | |
704 | |
705 | o 8 public a-F - b740e3e5c05d
705 | o 8 public a-F - b740e3e5c05d
706 | |
706 | |
707 | o 7 public a-E - e9f537e46dea
707 | o 7 public a-E - e9f537e46dea
708 | |
708 | |
709 +---o 6 public n-B - 145e75495359
709 +---o 6 public n-B - 145e75495359
710 | |
710 | |
711 o | 5 public n-A - d6bcb4f74035
711 o | 5 public n-A - d6bcb4f74035
712 | |
712 | |
713 o | 4 public b-A - f54f1bb90ff3
713 o | 4 public b-A - f54f1bb90ff3
714 | |
714 | |
715 | o 3 public a-D - b555f63b6063
715 | o 3 public a-D - b555f63b6063
716 | |
716 | |
717 | o 2 public a-C - 54acac6f23ab
717 | o 2 public a-C - 54acac6f23ab
718 |/
718 |/
719 o 1 public a-B - 548a3d25dbf0
719 o 1 public a-B - 548a3d25dbf0
720 |
720 |
721 o 0 public a-A - 054250a37db4
721 o 0 public a-A - 054250a37db4
722
722
723
723
724 Pushing to Publish=False (common changeset that change phase + unknown one)
724 Pushing to Publish=False (common changeset that change phase + unknown one)
725
725
726 $ hg push ../mu -r 967b449fbc94 -f
726 $ hg push ../mu -r 967b449fbc94 -f
727 pushing to ../mu
727 pushing to ../mu
728 searching for changes
728 searching for changes
729 adding changesets
729 adding changesets
730 adding manifests
730 adding manifests
731 adding file changes
731 adding file changes
732 added 1 changesets with 1 changes to 1 files (+1 heads)
732 added 1 changesets with 1 changes to 1 files (+1 heads)
733 test-debug-phase: move rev 2: 1 -> 0
733 test-debug-phase: move rev 2: 1 -> 0
734 test-debug-phase: move rev 5: 1 -> 0
734 test-debug-phase: move rev 5: 1 -> 0
735 test-debug-phase: new rev 9: x -> 1
735 test-debug-phase: new rev 9: x -> 1
736 $ hgph
736 $ hgph
737 @ 10 draft a-H - 967b449fbc94
737 @ 10 draft a-H - 967b449fbc94
738 |
738 |
739 | o 9 draft a-G - 3e27b6f1eee1
739 | o 9 draft a-G - 3e27b6f1eee1
740 | |
740 | |
741 | o 8 public a-F - b740e3e5c05d
741 | o 8 public a-F - b740e3e5c05d
742 | |
742 | |
743 | o 7 public a-E - e9f537e46dea
743 | o 7 public a-E - e9f537e46dea
744 | |
744 | |
745 +---o 6 public n-B - 145e75495359
745 +---o 6 public n-B - 145e75495359
746 | |
746 | |
747 o | 5 public n-A - d6bcb4f74035
747 o | 5 public n-A - d6bcb4f74035
748 | |
748 | |
749 o | 4 public b-A - f54f1bb90ff3
749 o | 4 public b-A - f54f1bb90ff3
750 | |
750 | |
751 | o 3 public a-D - b555f63b6063
751 | o 3 public a-D - b555f63b6063
752 | |
752 | |
753 | o 2 public a-C - 54acac6f23ab
753 | o 2 public a-C - 54acac6f23ab
754 |/
754 |/
755 o 1 public a-B - 548a3d25dbf0
755 o 1 public a-B - 548a3d25dbf0
756 |
756 |
757 o 0 public a-A - 054250a37db4
757 o 0 public a-A - 054250a37db4
758
758
759 $ cd ../mu
759 $ cd ../mu
760 $ hgph # d6bcb4f74035 should have changed phase
760 $ hgph # d6bcb4f74035 should have changed phase
761 > # 145e75495359 is still draft. not ancestor of -r
761 > # 145e75495359 is still draft. not ancestor of -r
762 o 9 draft a-H - 967b449fbc94
762 o 9 draft a-H - 967b449fbc94
763 |
763 |
764 | o 8 public a-F - b740e3e5c05d
764 | o 8 public a-F - b740e3e5c05d
765 | |
765 | |
766 | o 7 public a-E - e9f537e46dea
766 | o 7 public a-E - e9f537e46dea
767 | |
767 | |
768 +---o 6 draft n-B - 145e75495359
768 +---o 6 draft n-B - 145e75495359
769 | |
769 | |
770 o | 5 public n-A - d6bcb4f74035
770 o | 5 public n-A - d6bcb4f74035
771 | |
771 | |
772 | o 4 public a-D - b555f63b6063
772 | o 4 public a-D - b555f63b6063
773 | |
773 | |
774 | o 3 public a-C - 54acac6f23ab
774 | o 3 public a-C - 54acac6f23ab
775 | |
775 | |
776 o | 2 public b-A - f54f1bb90ff3
776 o | 2 public b-A - f54f1bb90ff3
777 |/
777 |/
778 o 1 public a-B - 548a3d25dbf0
778 o 1 public a-B - 548a3d25dbf0
779 |
779 |
780 o 0 public a-A - 054250a37db4
780 o 0 public a-A - 054250a37db4
781
781
782
782
783
783
784 Pushing to Publish=True (common changeset from publish=False)
784 Pushing to Publish=True (common changeset from publish=False)
785
785
786 (in mu)
786 (in mu)
787 $ hg push ../alpha
787 $ hg push ../alpha
788 pushing to ../alpha
788 pushing to ../alpha
789 searching for changes
789 searching for changes
790 no changes found
790 no changes found
791 test-debug-phase: move rev 10: 1 -> 0
791 test-debug-phase: move rev 10: 1 -> 0
792 test-debug-phase: move rev 6: 1 -> 0
792 test-debug-phase: move rev 6: 1 -> 0
793 test-debug-phase: move rev 9: 1 -> 0
793 test-debug-phase: move rev 9: 1 -> 0
794 [1]
794 [1]
795 $ hgph
795 $ hgph
796 o 9 public a-H - 967b449fbc94
796 o 9 public a-H - 967b449fbc94
797 |
797 |
798 | o 8 public a-F - b740e3e5c05d
798 | o 8 public a-F - b740e3e5c05d
799 | |
799 | |
800 | o 7 public a-E - e9f537e46dea
800 | o 7 public a-E - e9f537e46dea
801 | |
801 | |
802 +---o 6 public n-B - 145e75495359
802 +---o 6 public n-B - 145e75495359
803 | |
803 | |
804 o | 5 public n-A - d6bcb4f74035
804 o | 5 public n-A - d6bcb4f74035
805 | |
805 | |
806 | o 4 public a-D - b555f63b6063
806 | o 4 public a-D - b555f63b6063
807 | |
807 | |
808 | o 3 public a-C - 54acac6f23ab
808 | o 3 public a-C - 54acac6f23ab
809 | |
809 | |
810 o | 2 public b-A - f54f1bb90ff3
810 o | 2 public b-A - f54f1bb90ff3
811 |/
811 |/
812 o 1 public a-B - 548a3d25dbf0
812 o 1 public a-B - 548a3d25dbf0
813 |
813 |
814 o 0 public a-A - 054250a37db4
814 o 0 public a-A - 054250a37db4
815
815
816 $ hgph -R ../alpha # a-H should have been synced to 0
816 $ hgph -R ../alpha # a-H should have been synced to 0
817 @ 10 public a-H - 967b449fbc94
817 @ 10 public a-H - 967b449fbc94
818 |
818 |
819 | o 9 draft a-G - 3e27b6f1eee1
819 | o 9 draft a-G - 3e27b6f1eee1
820 | |
820 | |
821 | o 8 public a-F - b740e3e5c05d
821 | o 8 public a-F - b740e3e5c05d
822 | |
822 | |
823 | o 7 public a-E - e9f537e46dea
823 | o 7 public a-E - e9f537e46dea
824 | |
824 | |
825 +---o 6 public n-B - 145e75495359
825 +---o 6 public n-B - 145e75495359
826 | |
826 | |
827 o | 5 public n-A - d6bcb4f74035
827 o | 5 public n-A - d6bcb4f74035
828 | |
828 | |
829 o | 4 public b-A - f54f1bb90ff3
829 o | 4 public b-A - f54f1bb90ff3
830 | |
830 | |
831 | o 3 public a-D - b555f63b6063
831 | o 3 public a-D - b555f63b6063
832 | |
832 | |
833 | o 2 public a-C - 54acac6f23ab
833 | o 2 public a-C - 54acac6f23ab
834 |/
834 |/
835 o 1 public a-B - 548a3d25dbf0
835 o 1 public a-B - 548a3d25dbf0
836 |
836 |
837 o 0 public a-A - 054250a37db4
837 o 0 public a-A - 054250a37db4
838
838
839
839
840
840
841 Bare push with next changeset and common changeset needing sync (issue3575)
841 Bare push with next changeset and common changeset needing sync (issue3575)
842
842
843 (reset some stat on remote repo to avoid confusing other tests)
843 (reset some stat on remote repo to avoid confusing other tests)
844
844
845 $ hg -R ../alpha --config extensions.strip= strip --no-backup 967b449fbc94
845 $ hg -R ../alpha --config extensions.strip= strip --no-backup 967b449fbc94
846 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
846 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
847 $ hg phase --force --draft b740e3e5c05d 967b449fbc94
847 $ hg phase --force --draft b740e3e5c05d 967b449fbc94
848 test-debug-phase: move rev 8: 0 -> 1
848 test-debug-phase: move rev 8: 0 -> 1
849 test-debug-phase: move rev 9: 0 -> 1
849 test-debug-phase: move rev 9: 0 -> 1
850 $ hg push -fv ../alpha
850 $ hg push -fv ../alpha
851 pushing to ../alpha
851 pushing to ../alpha
852 searching for changes
852 searching for changes
853 1 changesets found
853 1 changesets found
854 uncompressed size of bundle content:
854 uncompressed size of bundle content:
855 178 (changelog)
855 178 (changelog)
856 165 (manifests)
856 165 (manifests)
857 131 a-H
857 131 a-H
858 adding changesets
858 adding changesets
859 adding manifests
859 adding manifests
860 adding file changes
860 adding file changes
861 added 1 changesets with 1 changes to 1 files (+1 heads)
861 added 1 changesets with 1 changes to 1 files (+1 heads)
862 test-debug-phase: new rev 10: x -> 0
862 test-debug-phase: new rev 10: x -> 0
863 test-debug-phase: move rev 8: 1 -> 0
863 test-debug-phase: move rev 8: 1 -> 0
864 test-debug-phase: move rev 9: 1 -> 0
864 test-debug-phase: move rev 9: 1 -> 0
865 $ hgph
865 $ hgph
866 o 9 public a-H - 967b449fbc94
866 o 9 public a-H - 967b449fbc94
867 |
867 |
868 | o 8 public a-F - b740e3e5c05d
868 | o 8 public a-F - b740e3e5c05d
869 | |
869 | |
870 | o 7 public a-E - e9f537e46dea
870 | o 7 public a-E - e9f537e46dea
871 | |
871 | |
872 +---o 6 public n-B - 145e75495359
872 +---o 6 public n-B - 145e75495359
873 | |
873 | |
874 o | 5 public n-A - d6bcb4f74035
874 o | 5 public n-A - d6bcb4f74035
875 | |
875 | |
876 | o 4 public a-D - b555f63b6063
876 | o 4 public a-D - b555f63b6063
877 | |
877 | |
878 | o 3 public a-C - 54acac6f23ab
878 | o 3 public a-C - 54acac6f23ab
879 | |
879 | |
880 o | 2 public b-A - f54f1bb90ff3
880 o | 2 public b-A - f54f1bb90ff3
881 |/
881 |/
882 o 1 public a-B - 548a3d25dbf0
882 o 1 public a-B - 548a3d25dbf0
883 |
883 |
884 o 0 public a-A - 054250a37db4
884 o 0 public a-A - 054250a37db4
885
885
886
886
887 $ hg -R ../alpha update 967b449fbc94 #for latter test consistency
887 $ hg -R ../alpha update 967b449fbc94 #for latter test consistency
888 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
888 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
889 $ hgph -R ../alpha
889 $ hgph -R ../alpha
890 @ 10 public a-H - 967b449fbc94
890 @ 10 public a-H - 967b449fbc94
891 |
891 |
892 | o 9 draft a-G - 3e27b6f1eee1
892 | o 9 draft a-G - 3e27b6f1eee1
893 | |
893 | |
894 | o 8 public a-F - b740e3e5c05d
894 | o 8 public a-F - b740e3e5c05d
895 | |
895 | |
896 | o 7 public a-E - e9f537e46dea
896 | o 7 public a-E - e9f537e46dea
897 | |
897 | |
898 +---o 6 public n-B - 145e75495359
898 +---o 6 public n-B - 145e75495359
899 | |
899 | |
900 o | 5 public n-A - d6bcb4f74035
900 o | 5 public n-A - d6bcb4f74035
901 | |
901 | |
902 o | 4 public b-A - f54f1bb90ff3
902 o | 4 public b-A - f54f1bb90ff3
903 | |
903 | |
904 | o 3 public a-D - b555f63b6063
904 | o 3 public a-D - b555f63b6063
905 | |
905 | |
906 | o 2 public a-C - 54acac6f23ab
906 | o 2 public a-C - 54acac6f23ab
907 |/
907 |/
908 o 1 public a-B - 548a3d25dbf0
908 o 1 public a-B - 548a3d25dbf0
909 |
909 |
910 o 0 public a-A - 054250a37db4
910 o 0 public a-A - 054250a37db4
911
911
912
912
913 Discovery locally secret changeset on a remote repository:
913 Discovery locally secret changeset on a remote repository:
914
914
915 - should make it non-secret
915 - should make it non-secret
916
916
917 $ cd ../alpha
917 $ cd ../alpha
918 $ mkcommit A-secret --config phases.new-commit=2
918 $ mkcommit A-secret --config phases.new-commit=2
919 test-debug-phase: new rev 11: x -> 2
919 test-debug-phase: new rev 11: x -> 2
920 $ hgph
920 $ hgph
921 @ 11 secret A-secret - 435b5d83910c
921 @ 11 secret A-secret - 435b5d83910c
922 |
922 |
923 o 10 public a-H - 967b449fbc94
923 o 10 public a-H - 967b449fbc94
924 |
924 |
925 | o 9 draft a-G - 3e27b6f1eee1
925 | o 9 draft a-G - 3e27b6f1eee1
926 | |
926 | |
927 | o 8 public a-F - b740e3e5c05d
927 | o 8 public a-F - b740e3e5c05d
928 | |
928 | |
929 | o 7 public a-E - e9f537e46dea
929 | o 7 public a-E - e9f537e46dea
930 | |
930 | |
931 +---o 6 public n-B - 145e75495359
931 +---o 6 public n-B - 145e75495359
932 | |
932 | |
933 o | 5 public n-A - d6bcb4f74035
933 o | 5 public n-A - d6bcb4f74035
934 | |
934 | |
935 o | 4 public b-A - f54f1bb90ff3
935 o | 4 public b-A - f54f1bb90ff3
936 | |
936 | |
937 | o 3 public a-D - b555f63b6063
937 | o 3 public a-D - b555f63b6063
938 | |
938 | |
939 | o 2 public a-C - 54acac6f23ab
939 | o 2 public a-C - 54acac6f23ab
940 |/
940 |/
941 o 1 public a-B - 548a3d25dbf0
941 o 1 public a-B - 548a3d25dbf0
942 |
942 |
943 o 0 public a-A - 054250a37db4
943 o 0 public a-A - 054250a37db4
944
944
945 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg
945 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg
946 1 changesets found
946 1 changesets found
947 $ hg -R ../mu unbundle ../secret-bundle.hg
947 $ hg -R ../mu unbundle ../secret-bundle.hg
948 adding changesets
948 adding changesets
949 adding manifests
949 adding manifests
950 adding file changes
950 adding file changes
951 added 1 changesets with 1 changes to 1 files
951 added 1 changesets with 1 changes to 1 files
952 new changesets 435b5d83910c (1 drafts)
952 new changesets 435b5d83910c (1 drafts)
953 test-debug-phase: new rev 10: x -> 1
953 test-debug-phase: new rev 10: x -> 1
954 (run 'hg update' to get a working copy)
954 (run 'hg update' to get a working copy)
955 $ hgph -R ../mu
955 $ hgph -R ../mu
956 o 10 draft A-secret - 435b5d83910c
956 o 10 draft A-secret - 435b5d83910c
957 |
957 |
958 o 9 public a-H - 967b449fbc94
958 o 9 public a-H - 967b449fbc94
959 |
959 |
960 | o 8 public a-F - b740e3e5c05d
960 | o 8 public a-F - b740e3e5c05d
961 | |
961 | |
962 | o 7 public a-E - e9f537e46dea
962 | o 7 public a-E - e9f537e46dea
963 | |
963 | |
964 +---o 6 public n-B - 145e75495359
964 +---o 6 public n-B - 145e75495359
965 | |
965 | |
966 o | 5 public n-A - d6bcb4f74035
966 o | 5 public n-A - d6bcb4f74035
967 | |
967 | |
968 | o 4 public a-D - b555f63b6063
968 | o 4 public a-D - b555f63b6063
969 | |
969 | |
970 | o 3 public a-C - 54acac6f23ab
970 | o 3 public a-C - 54acac6f23ab
971 | |
971 | |
972 o | 2 public b-A - f54f1bb90ff3
972 o | 2 public b-A - f54f1bb90ff3
973 |/
973 |/
974 o 1 public a-B - 548a3d25dbf0
974 o 1 public a-B - 548a3d25dbf0
975 |
975 |
976 o 0 public a-A - 054250a37db4
976 o 0 public a-A - 054250a37db4
977
977
978 $ hg pull ../mu
978 $ hg pull ../mu
979 pulling from ../mu
979 pulling from ../mu
980 searching for changes
980 searching for changes
981 no changes found
981 no changes found
982 test-debug-phase: move rev 11: 2 -> 1
982 test-debug-phase: move rev 11: 2 -> 1
983 $ hgph
983 $ hgph
984 @ 11 draft A-secret - 435b5d83910c
984 @ 11 draft A-secret - 435b5d83910c
985 |
985 |
986 o 10 public a-H - 967b449fbc94
986 o 10 public a-H - 967b449fbc94
987 |
987 |
988 | o 9 draft a-G - 3e27b6f1eee1
988 | o 9 draft a-G - 3e27b6f1eee1
989 | |
989 | |
990 | o 8 public a-F - b740e3e5c05d
990 | o 8 public a-F - b740e3e5c05d
991 | |
991 | |
992 | o 7 public a-E - e9f537e46dea
992 | o 7 public a-E - e9f537e46dea
993 | |
993 | |
994 +---o 6 public n-B - 145e75495359
994 +---o 6 public n-B - 145e75495359
995 | |
995 | |
996 o | 5 public n-A - d6bcb4f74035
996 o | 5 public n-A - d6bcb4f74035
997 | |
997 | |
998 o | 4 public b-A - f54f1bb90ff3
998 o | 4 public b-A - f54f1bb90ff3
999 | |
999 | |
1000 | o 3 public a-D - b555f63b6063
1000 | o 3 public a-D - b555f63b6063
1001 | |
1001 | |
1002 | o 2 public a-C - 54acac6f23ab
1002 | o 2 public a-C - 54acac6f23ab
1003 |/
1003 |/
1004 o 1 public a-B - 548a3d25dbf0
1004 o 1 public a-B - 548a3d25dbf0
1005 |
1005 |
1006 o 0 public a-A - 054250a37db4
1006 o 0 public a-A - 054250a37db4
1007
1007
1008
1008
1009 pushing a locally public and draft changesets remotely secret should make them
1009 pushing a locally public and draft changesets remotely secret should make them
1010 appear on the remote side.
1010 appear on the remote side.
1011
1011
1012 $ hg -R ../mu phase --secret --force 967b449fbc94
1012 $ hg -R ../mu phase --secret --force 967b449fbc94
1013 test-debug-phase: move rev 9: 0 -> 2
1013 test-debug-phase: move rev 9: 0 -> 2
1014 test-debug-phase: move rev 10: 1 -> 2
1014 test-debug-phase: move rev 10: 1 -> 2
1015 $ hg push -r 435b5d83910c ../mu
1015 $ hg push -r 435b5d83910c ../mu
1016 pushing to ../mu
1016 pushing to ../mu
1017 searching for changes
1017 searching for changes
1018 abort: push creates new remote head 435b5d83910c
1018 abort: push creates new remote head 435b5d83910c
1019 (merge or see 'hg help push' for details about pushing new heads)
1019 (merge or see 'hg help push' for details about pushing new heads)
1020 [20]
1020 [20]
1021 $ hg push -fr 435b5d83910c ../mu # because the push will create new visible head
1021 $ hg push -fr 435b5d83910c ../mu # because the push will create new visible head
1022 pushing to ../mu
1022 pushing to ../mu
1023 searching for changes
1023 searching for changes
1024 adding changesets
1024 adding changesets
1025 adding manifests
1025 adding manifests
1026 adding file changes
1026 adding file changes
1027 added 0 changesets with 0 changes to 2 files
1027 added 0 changesets with 0 changes to 2 files
1028 test-debug-phase: move rev 9: 2 -> 0
1028 test-debug-phase: move rev 9: 2 -> 0
1029 test-debug-phase: move rev 10: 2 -> 1
1029 test-debug-phase: move rev 10: 2 -> 1
1030 $ hgph -R ../mu
1030 $ hgph -R ../mu
1031 o 10 draft A-secret - 435b5d83910c
1031 o 10 draft A-secret - 435b5d83910c
1032 |
1032 |
1033 o 9 public a-H - 967b449fbc94
1033 o 9 public a-H - 967b449fbc94
1034 |
1034 |
1035 | o 8 public a-F - b740e3e5c05d
1035 | o 8 public a-F - b740e3e5c05d
1036 | |
1036 | |
1037 | o 7 public a-E - e9f537e46dea
1037 | o 7 public a-E - e9f537e46dea
1038 | |
1038 | |
1039 +---o 6 public n-B - 145e75495359
1039 +---o 6 public n-B - 145e75495359
1040 | |
1040 | |
1041 o | 5 public n-A - d6bcb4f74035
1041 o | 5 public n-A - d6bcb4f74035
1042 | |
1042 | |
1043 | o 4 public a-D - b555f63b6063
1043 | o 4 public a-D - b555f63b6063
1044 | |
1044 | |
1045 | o 3 public a-C - 54acac6f23ab
1045 | o 3 public a-C - 54acac6f23ab
1046 | |
1046 | |
1047 o | 2 public b-A - f54f1bb90ff3
1047 o | 2 public b-A - f54f1bb90ff3
1048 |/
1048 |/
1049 o 1 public a-B - 548a3d25dbf0
1049 o 1 public a-B - 548a3d25dbf0
1050 |
1050 |
1051 o 0 public a-A - 054250a37db4
1051 o 0 public a-A - 054250a37db4
1052
1052
1053
1053
1054 pull new changeset with common draft locally
1054 pull new changeset with common draft locally
1055
1055
1056 $ hg up -q 967b449fbc94 # create a new root for draft
1056 $ hg up -q 967b449fbc94 # create a new root for draft
1057 $ mkcommit 'alpha-more'
1057 $ mkcommit 'alpha-more'
1058 test-debug-phase: new rev 12: x -> 1
1058 test-debug-phase: new rev 12: x -> 1
1059 created new head
1059 created new head
1060 $ hg push -fr . ../mu
1060 $ hg push -fr . ../mu
1061 pushing to ../mu
1061 pushing to ../mu
1062 searching for changes
1062 searching for changes
1063 adding changesets
1063 adding changesets
1064 adding manifests
1064 adding manifests
1065 adding file changes
1065 adding file changes
1066 added 1 changesets with 1 changes to 1 files (+1 heads)
1066 added 1 changesets with 1 changes to 1 files (+1 heads)
1067 test-debug-phase: new rev 11: x -> 1
1067 test-debug-phase: new rev 11: x -> 1
1068 $ cd ../mu
1068 $ cd ../mu
1069 $ hg phase --secret --force 1c5cfd894796
1069 $ hg phase --secret --force 1c5cfd894796
1070 test-debug-phase: move rev 11: 1 -> 2
1070 test-debug-phase: move rev 11: 1 -> 2
1071 $ hg up -q 435b5d83910c
1071 $ hg up -q 435b5d83910c
1072 $ mkcommit 'mu-more'
1072 $ mkcommit 'mu-more'
1073 test-debug-phase: new rev 12: x -> 1
1073 test-debug-phase: new rev 12: x -> 1
1074 $ cd ../alpha
1074 $ cd ../alpha
1075 $ hg pull ../mu
1075 $ hg pull ../mu
1076 pulling from ../mu
1076 pulling from ../mu
1077 searching for changes
1077 searching for changes
1078 adding changesets
1078 adding changesets
1079 adding manifests
1079 adding manifests
1080 adding file changes
1080 adding file changes
1081 added 1 changesets with 1 changes to 1 files
1081 added 1 changesets with 1 changes to 1 files
1082 new changesets 5237fb433fc8 (1 drafts)
1082 new changesets 5237fb433fc8 (1 drafts)
1083 test-debug-phase: new rev 13: x -> 1
1083 test-debug-phase: new rev 13: x -> 1
1084 (run 'hg update' to get a working copy)
1084 (run 'hg update' to get a working copy)
1085 $ hgph
1085 $ hgph
1086 o 13 draft mu-more - 5237fb433fc8
1086 o 13 draft mu-more - 5237fb433fc8
1087 |
1087 |
1088 | @ 12 draft alpha-more - 1c5cfd894796
1088 | @ 12 draft alpha-more - 1c5cfd894796
1089 | |
1089 | |
1090 o | 11 draft A-secret - 435b5d83910c
1090 o | 11 draft A-secret - 435b5d83910c
1091 |/
1091 |/
1092 o 10 public a-H - 967b449fbc94
1092 o 10 public a-H - 967b449fbc94
1093 |
1093 |
1094 | o 9 draft a-G - 3e27b6f1eee1
1094 | o 9 draft a-G - 3e27b6f1eee1
1095 | |
1095 | |
1096 | o 8 public a-F - b740e3e5c05d
1096 | o 8 public a-F - b740e3e5c05d
1097 | |
1097 | |
1098 | o 7 public a-E - e9f537e46dea
1098 | o 7 public a-E - e9f537e46dea
1099 | |
1099 | |
1100 +---o 6 public n-B - 145e75495359
1100 +---o 6 public n-B - 145e75495359
1101 | |
1101 | |
1102 o | 5 public n-A - d6bcb4f74035
1102 o | 5 public n-A - d6bcb4f74035
1103 | |
1103 | |
1104 o | 4 public b-A - f54f1bb90ff3
1104 o | 4 public b-A - f54f1bb90ff3
1105 | |
1105 | |
1106 | o 3 public a-D - b555f63b6063
1106 | o 3 public a-D - b555f63b6063
1107 | |
1107 | |
1108 | o 2 public a-C - 54acac6f23ab
1108 | o 2 public a-C - 54acac6f23ab
1109 |/
1109 |/
1110 o 1 public a-B - 548a3d25dbf0
1110 o 1 public a-B - 548a3d25dbf0
1111 |
1111 |
1112 o 0 public a-A - 054250a37db4
1112 o 0 public a-A - 054250a37db4
1113
1113
1114
1114
1115 Test that test are properly ignored on remote event when existing locally
1115 Test that test are properly ignored on remote event when existing locally
1116
1116
1117 $ cd ..
1117 $ cd ..
1118 $ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma
1118 $ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma
1119 test-debug-phase: new rev 0: x -> 0
1119 test-debug-phase: new rev 0: x -> 0
1120 test-debug-phase: new rev 1: x -> 0
1120 test-debug-phase: new rev 1: x -> 0
1121 test-debug-phase: new rev 2: x -> 0
1121 test-debug-phase: new rev 2: x -> 0
1122 test-debug-phase: new rev 3: x -> 0
1122 test-debug-phase: new rev 3: x -> 0
1123 test-debug-phase: new rev 4: x -> 0
1123 test-debug-phase: new rev 4: x -> 0
1124
1124
1125 # pathological case are
1125 # pathological case are
1126 #
1126 #
1127 # * secret remotely
1127 # * secret remotely
1128 # * known locally
1128 # * known locally
1129 # * repo have uncommon changeset
1129 # * repo have uncommon changeset
1130
1130
1131 $ hg -R beta phase --secret --force f54f1bb90ff3
1131 $ hg -R beta phase --secret --force f54f1bb90ff3
1132 test-debug-phase: move rev 2: 0 -> 2
1132 test-debug-phase: move rev 2: 0 -> 2
1133 $ hg -R gamma phase --draft --force f54f1bb90ff3
1133 $ hg -R gamma phase --draft --force f54f1bb90ff3
1134 test-debug-phase: move rev 2: 0 -> 1
1134 test-debug-phase: move rev 2: 0 -> 1
1135
1135
1136 $ cd gamma
1136 $ cd gamma
1137 $ hg pull ../beta
1137 $ hg pull ../beta
1138 pulling from ../beta
1138 pulling from ../beta
1139 searching for changes
1139 searching for changes
1140 adding changesets
1140 adding changesets
1141 adding manifests
1141 adding manifests
1142 adding file changes
1142 adding file changes
1143 added 2 changesets with 2 changes to 2 files
1143 added 2 changesets with 2 changes to 2 files
1144 new changesets e9f537e46dea:b740e3e5c05d
1144 new changesets e9f537e46dea:b740e3e5c05d
1145 test-debug-phase: new rev 5: x -> 0
1145 test-debug-phase: new rev 5: x -> 0
1146 test-debug-phase: new rev 6: x -> 0
1146 test-debug-phase: new rev 6: x -> 0
1147 (run 'hg update' to get a working copy)
1147 (run 'hg update' to get a working copy)
1148 $ hg phase f54f1bb90ff3
1148 $ hg phase f54f1bb90ff3
1149 2: draft
1149 2: draft
1150
1150
1151 same over the wire
1151 same over the wire
1152
1152
1153 $ cd ../beta
1153 $ cd ../beta
1154 $ hg serve -p $HGPORT -d --pid-file=../beta.pid -E ../beta-error.log
1154 $ hg serve -p $HGPORT -d --pid-file=../beta.pid -E ../beta-error.log
1155 $ cat ../beta.pid >> $DAEMON_PIDS
1155 $ cat ../beta.pid >> $DAEMON_PIDS
1156 $ cd ../gamma
1156 $ cd ../gamma
1157
1157
1158 $ hg pull http://localhost:$HGPORT/ # bundle2+
1158 $ hg pull http://localhost:$HGPORT/ # bundle2+
1159 pulling from http://localhost:$HGPORT/
1159 pulling from http://localhost:$HGPORT/
1160 searching for changes
1160 searching for changes
1161 no changes found
1161 no changes found
1162 $ hg phase f54f1bb90ff3
1162 $ hg phase f54f1bb90ff3
1163 2: draft
1163 2: draft
1164
1164
1165 enforce bundle1
1165 enforce bundle1
1166
1166
1167 $ hg pull http://localhost:$HGPORT/ --config devel.legacy.exchange=bundle1
1167 $ hg pull http://localhost:$HGPORT/ --config devel.legacy.exchange=bundle1
1168 pulling from http://localhost:$HGPORT/
1168 pulling from http://localhost:$HGPORT/
1169 searching for changes
1169 searching for changes
1170 no changes found
1170 no changes found
1171 $ hg phase f54f1bb90ff3
1171 $ hg phase f54f1bb90ff3
1172 2: draft
1172 2: draft
1173
1173
1174 check that secret local on both side are not synced to public
1174 check that secret local on both side are not synced to public
1175
1175
1176 $ hg push -r b555f63b6063 http://localhost:$HGPORT/
1176 $ hg push -r b555f63b6063 http://localhost:$HGPORT/
1177 pushing to http://localhost:$HGPORT/
1177 pushing to http://localhost:$HGPORT/
1178 searching for changes
1178 searching for changes
1179 no changes found
1179 no changes found
1180 [1]
1180 [1]
1181 $ hg phase f54f1bb90ff3
1181 $ hg phase f54f1bb90ff3
1182 2: draft
1182 2: draft
1183
1183
1184 $ killdaemons.py
1184 $ killdaemons.py
1185
1185
1186 put the changeset in the draft state again
1186 put the changeset in the draft state again
1187 (first test after this one expect to be able to copy)
1187 (first test after this one expect to be able to copy)
1188
1188
1189 $ cd ..
1189 $ cd ..
1190
1190
1191
1191
1192 Test Clone behavior
1192 Test Clone behavior
1193
1193
1194 A. Clone without secret changeset
1194 A. Clone without secret changeset
1195
1195
1196 1. cloning non-publishing repository
1196 1. cloning non-publishing repository
1197 (Phase should be preserved)
1197 (Phase should be preserved)
1198
1198
1199 # make sure there is no secret so we can use a copy clone
1199 # make sure there is no secret so we can use a copy clone
1200
1200
1201 $ hg -R mu phase --draft 'secret()'
1201 $ hg -R mu phase --draft 'secret()'
1202 test-debug-phase: move rev 11: 2 -> 1
1202 test-debug-phase: move rev 11: 2 -> 1
1203
1203
1204 $ hg clone -U mu Tau
1204 $ hg clone -U mu Tau
1205 $ hgph -R Tau
1205 $ hgph -R Tau
1206 o 12 draft mu-more - 5237fb433fc8
1206 o 12 draft mu-more - 5237fb433fc8
1207 |
1207 |
1208 | o 11 draft alpha-more - 1c5cfd894796
1208 | o 11 draft alpha-more - 1c5cfd894796
1209 | |
1209 | |
1210 o | 10 draft A-secret - 435b5d83910c
1210 o | 10 draft A-secret - 435b5d83910c
1211 |/
1211 |/
1212 o 9 public a-H - 967b449fbc94
1212 o 9 public a-H - 967b449fbc94
1213 |
1213 |
1214 | o 8 public a-F - b740e3e5c05d
1214 | o 8 public a-F - b740e3e5c05d
1215 | |
1215 | |
1216 | o 7 public a-E - e9f537e46dea
1216 | o 7 public a-E - e9f537e46dea
1217 | |
1217 | |
1218 +---o 6 public n-B - 145e75495359
1218 +---o 6 public n-B - 145e75495359
1219 | |
1219 | |
1220 o | 5 public n-A - d6bcb4f74035
1220 o | 5 public n-A - d6bcb4f74035
1221 | |
1221 | |
1222 | o 4 public a-D - b555f63b6063
1222 | o 4 public a-D - b555f63b6063
1223 | |
1223 | |
1224 | o 3 public a-C - 54acac6f23ab
1224 | o 3 public a-C - 54acac6f23ab
1225 | |
1225 | |
1226 o | 2 public b-A - f54f1bb90ff3
1226 o | 2 public b-A - f54f1bb90ff3
1227 |/
1227 |/
1228 o 1 public a-B - 548a3d25dbf0
1228 o 1 public a-B - 548a3d25dbf0
1229 |
1229 |
1230 o 0 public a-A - 054250a37db4
1230 o 0 public a-A - 054250a37db4
1231
1231
1232
1232
1233 2. cloning publishing repository
1233 2. cloning publishing repository
1234
1234
1235 (everything should be public)
1235 (everything should be public)
1236
1236
1237 $ hg clone -U alpha Upsilon
1237 $ hg clone -U alpha Upsilon
1238 $ hgph -R Upsilon
1238 $ hgph -R Upsilon
1239 o 13 public mu-more - 5237fb433fc8
1239 o 13 public mu-more - 5237fb433fc8
1240 |
1240 |
1241 | o 12 public alpha-more - 1c5cfd894796
1241 | o 12 public alpha-more - 1c5cfd894796
1242 | |
1242 | |
1243 o | 11 public A-secret - 435b5d83910c
1243 o | 11 public A-secret - 435b5d83910c
1244 |/
1244 |/
1245 o 10 public a-H - 967b449fbc94
1245 o 10 public a-H - 967b449fbc94
1246 |
1246 |
1247 | o 9 public a-G - 3e27b6f1eee1
1247 | o 9 public a-G - 3e27b6f1eee1
1248 | |
1248 | |
1249 | o 8 public a-F - b740e3e5c05d
1249 | o 8 public a-F - b740e3e5c05d
1250 | |
1250 | |
1251 | o 7 public a-E - e9f537e46dea
1251 | o 7 public a-E - e9f537e46dea
1252 | |
1252 | |
1253 +---o 6 public n-B - 145e75495359
1253 +---o 6 public n-B - 145e75495359
1254 | |
1254 | |
1255 o | 5 public n-A - d6bcb4f74035
1255 o | 5 public n-A - d6bcb4f74035
1256 | |
1256 | |
1257 o | 4 public b-A - f54f1bb90ff3
1257 o | 4 public b-A - f54f1bb90ff3
1258 | |
1258 | |
1259 | o 3 public a-D - b555f63b6063
1259 | o 3 public a-D - b555f63b6063
1260 | |
1260 | |
1261 | o 2 public a-C - 54acac6f23ab
1261 | o 2 public a-C - 54acac6f23ab
1262 |/
1262 |/
1263 o 1 public a-B - 548a3d25dbf0
1263 o 1 public a-B - 548a3d25dbf0
1264 |
1264 |
1265 o 0 public a-A - 054250a37db4
1265 o 0 public a-A - 054250a37db4
1266
1266
1267 #if unix-permissions no-root
1267 #if unix-permissions no-root
1268
1268
1269 Pushing From an unlockable repo
1269 Pushing From an unlockable repo
1270 --------------------------------
1270 --------------------------------
1271 (issue3684)
1271 (issue3684)
1272
1272
1273 Unability to lock the source repo should not prevent the push. It will prevent
1273 Unability to lock the source repo should not prevent the push. It will prevent
1274 the retrieval of remote phase during push. For example, pushing to a publishing
1274 the retrieval of remote phase during push. For example, pushing to a publishing
1275 server won't turn changeset public.
1275 server won't turn changeset public.
1276
1276
1277 1. Test that push is not prevented
1277 1. Test that push is not prevented
1278
1278
1279 $ hg init Phi
1279 $ hg init Phi
1280 $ cd Upsilon
1280 $ cd Upsilon
1281 $ chmod -R -w .hg
1281 $ chmod -R -w .hg
1282 $ hg push ../Phi
1282 $ hg push ../Phi
1283 pushing to ../Phi
1283 pushing to ../Phi
1284 searching for changes
1284 searching for changes
1285 adding changesets
1285 adding changesets
1286 adding manifests
1286 adding manifests
1287 adding file changes
1287 adding file changes
1288 added 14 changesets with 14 changes to 14 files (+3 heads)
1288 added 14 changesets with 14 changes to 14 files (+3 heads)
1289 test-debug-phase: new rev 0: x -> 0
1289 test-debug-phase: new rev 0: x -> 0
1290 test-debug-phase: new rev 1: x -> 0
1290 test-debug-phase: new rev 1: x -> 0
1291 test-debug-phase: new rev 2: x -> 0
1291 test-debug-phase: new rev 2: x -> 0
1292 test-debug-phase: new rev 3: x -> 0
1292 test-debug-phase: new rev 3: x -> 0
1293 test-debug-phase: new rev 4: x -> 0
1293 test-debug-phase: new rev 4: x -> 0
1294 test-debug-phase: new rev 5: x -> 0
1294 test-debug-phase: new rev 5: x -> 0
1295 test-debug-phase: new rev 6: x -> 0
1295 test-debug-phase: new rev 6: x -> 0
1296 test-debug-phase: new rev 7: x -> 0
1296 test-debug-phase: new rev 7: x -> 0
1297 test-debug-phase: new rev 8: x -> 0
1297 test-debug-phase: new rev 8: x -> 0
1298 test-debug-phase: new rev 9: x -> 0
1298 test-debug-phase: new rev 9: x -> 0
1299 test-debug-phase: new rev 10: x -> 0
1299 test-debug-phase: new rev 10: x -> 0
1300 test-debug-phase: new rev 11: x -> 0
1300 test-debug-phase: new rev 11: x -> 0
1301 test-debug-phase: new rev 12: x -> 0
1301 test-debug-phase: new rev 12: x -> 0
1302 test-debug-phase: new rev 13: x -> 0
1302 test-debug-phase: new rev 13: x -> 0
1303 $ chmod -R +w .hg
1303 $ chmod -R +w .hg
1304
1304
1305 2. Test that failed phases movement are reported
1305 2. Test that failed phases movement are reported
1306
1306
1307 $ hg phase --force --draft 3
1307 $ hg phase --force --draft 3
1308 test-debug-phase: move rev 3: 0 -> 1
1308 test-debug-phase: move rev 3: 0 -> 1
1309 test-debug-phase: move rev 7: 0 -> 1
1309 test-debug-phase: move rev 7: 0 -> 1
1310 test-debug-phase: move rev 8: 0 -> 1
1310 test-debug-phase: move rev 8: 0 -> 1
1311 test-debug-phase: move rev 9: 0 -> 1
1311 test-debug-phase: move rev 9: 0 -> 1
1312 $ chmod -R -w .hg
1312 $ chmod -R -w .hg
1313 $ hg push ../Phi
1313 $ hg push ../Phi
1314 pushing to ../Phi
1314 pushing to ../Phi
1315 searching for changes
1315 searching for changes
1316 no changes found
1316 no changes found
1317 cannot lock source repo, skipping local public phase update
1317 cannot lock source repo, skipping local public phase update
1318 [1]
1318 [1]
1319 $ chmod -R +w .hg
1319 $ chmod -R +w .hg
1320
1320
1321 3. Test that push is prevented if lock was already acquired (not a permission
1321 3. Test that push is prevented if lock was already acquired (not a permission
1322 error, but EEXIST)
1322 error, but EEXIST)
1323
1323
1324 $ touch .hg/store/lock
1324 $ touch .hg/store/lock
1325 $ hg push ../Phi --config ui.timeout=1 --config ui.timeout.warn=0
1325 $ hg push ../Phi --config ui.timeout=1 --config ui.timeout.warn=0
1326 pushing to ../Phi
1326 pushing to ../Phi
1327 waiting for lock on repository $TESTTMP/Upsilon held by ''
1327 waiting for lock on repository $TESTTMP/Upsilon held by ''
1328 abort: repository $TESTTMP/Upsilon: timed out waiting for lock held by ''
1328 abort: repository $TESTTMP/Upsilon: timed out waiting for lock held by ''
1329 (lock might be very busy)
1329 (lock might be very busy)
1330 [20]
1330 [20]
1331 $ rm .hg/store/lock
1331 $ rm .hg/store/lock
1332
1332
1333 $ cd ..
1333 $ cd ..
1334
1334
1335 #endif
1335 #endif
1336
1336
1337 Test that clone behaves like pull and doesn't publish changesets as plain push
1337 Test that clone behaves like pull and doesn't publish changesets as plain push
1338 does. The conditional output accounts for changes in the conditional block
1338 does. The conditional output accounts for changes in the conditional block
1339 above.
1339 above.
1340
1340
1341 #if unix-permissions no-root
1341 #if unix-permissions no-root
1342 $ hg -R Upsilon phase -q --force --draft 2
1342 $ hg -R Upsilon phase -q --force --draft 2
1343 test-debug-phase: move rev 2: 0 -> 1
1343 test-debug-phase: move rev 2: 0 -> 1
1344 #else
1344 #else
1345 $ hg -R Upsilon phase -q --force --draft 2
1345 $ hg -R Upsilon phase -q --force --draft 2
1346 test-debug-phase: move rev 2: 0 -> 1
1346 test-debug-phase: move rev 2: 0 -> 1
1347 test-debug-phase: move rev 3: 0 -> 1
1347 test-debug-phase: move rev 3: 0 -> 1
1348 test-debug-phase: move rev 7: 0 -> 1
1348 test-debug-phase: move rev 7: 0 -> 1
1349 test-debug-phase: move rev 8: 0 -> 1
1349 test-debug-phase: move rev 8: 0 -> 1
1350 test-debug-phase: move rev 9: 0 -> 1
1350 test-debug-phase: move rev 9: 0 -> 1
1351 #endif
1351 #endif
1352
1352
1353 $ hg clone -q Upsilon Pi -r 7
1353 $ hg clone -q Upsilon Pi -r 7
1354 test-debug-phase: new rev 0: x -> 0
1354 test-debug-phase: new rev 0: x -> 0
1355 test-debug-phase: new rev 1: x -> 0
1355 test-debug-phase: new rev 1: x -> 0
1356 test-debug-phase: new rev 2: x -> 0
1356 test-debug-phase: new rev 2: x -> 0
1357 test-debug-phase: new rev 3: x -> 0
1357 test-debug-phase: new rev 3: x -> 0
1358 test-debug-phase: new rev 4: x -> 0
1358 test-debug-phase: new rev 4: x -> 0
1359 $ hgph Upsilon -r 'min(draft())'
1359 $ hgph Upsilon -r 'min(draft())'
1360 o 2 draft a-C - 54acac6f23ab
1360 o 2 draft a-C - 54acac6f23ab
1361 |
1361 |
1362 ~
1362 ~
1363
1363
1364 $ hg -R Upsilon push Pi -r 7
1364 $ hg -R Upsilon push Pi -r 7
1365 pushing to Pi
1365 pushing to Pi
1366 searching for changes
1366 searching for changes
1367 no changes found
1367 no changes found
1368 test-debug-phase: move rev 2: 1 -> 0
1368 test-debug-phase: move rev 2: 1 -> 0
1369 test-debug-phase: move rev 3: 1 -> 0
1369 test-debug-phase: move rev 3: 1 -> 0
1370 test-debug-phase: move rev 7: 1 -> 0
1370 test-debug-phase: move rev 7: 1 -> 0
1371 [1]
1371 [1]
1372 $ hgph Upsilon -r 'min(draft())'
1372 $ hgph Upsilon -r 'min(draft())'
1373 o 8 draft a-F - b740e3e5c05d
1373 o 8 draft a-F - b740e3e5c05d
1374 |
1374 |
1375 ~
1375 ~
1376
1376
1377 $ hg -R Upsilon push Pi -r 8
1377 $ hg -R Upsilon push Pi -r 8
1378 pushing to Pi
1378 pushing to Pi
1379 searching for changes
1379 searching for changes
1380 adding changesets
1380 adding changesets
1381 adding manifests
1381 adding manifests
1382 adding file changes
1382 adding file changes
1383 added 1 changesets with 1 changes to 1 files
1383 added 1 changesets with 1 changes to 1 files
1384 test-debug-phase: new rev 5: x -> 0
1384 test-debug-phase: new rev 5: x -> 0
1385 test-debug-phase: move rev 8: 1 -> 0
1385 test-debug-phase: move rev 8: 1 -> 0
1386
1386
1387 $ hgph Upsilon -r 'min(draft())'
1387 $ hgph Upsilon -r 'min(draft())'
1388 o 9 draft a-G - 3e27b6f1eee1
1388 o 9 draft a-G - 3e27b6f1eee1
1389 |
1389 |
1390 ~
1390 ~
1391
1391
1392 Test phases exchange when a phaseroot is on a merge
1392 Test phases exchange when a phaseroot is on a merge
1393
1393
1394 $ hg init mergetest
1394 $ hg init mergetest
1395 $ cd mergetest
1395 $ cd mergetest
1396 > cat > .hg/hgrc << EOF
1396 > cat > .hg/hgrc << EOF
1397 > [phases]
1397 > [phases]
1398 > publish = false
1398 > publish = false
1399 > EOF
1399 > EOF
1400
1400
1401 $ hg debugdrawdag << EOF
1401 $ hg debugdrawdag << EOF
1402 > E Z
1402 > E Z
1403 > |\|
1403 > |\|
1404 > D Y
1404 > D Y
1405 > | |
1405 > | |
1406 > C X
1406 > C X
1407 > |/
1407 > |/
1408 > B
1408 > B
1409 > |
1409 > |
1410 > A
1410 > A
1411 > EOF
1411 > EOF
1412 test-debug-phase: new rev 0: x -> 1
1412 test-debug-phase: new rev 0: x -> 1
1413 test-debug-phase: new rev 1: x -> 1
1413 test-debug-phase: new rev 1: x -> 1
1414 test-debug-phase: new rev 2: x -> 1
1414 test-debug-phase: new rev 2: x -> 1
1415 test-debug-phase: new rev 3: x -> 1
1415 test-debug-phase: new rev 3: x -> 1
1416 test-debug-phase: new rev 4: x -> 1
1416 test-debug-phase: new rev 4: x -> 1
1417 test-debug-phase: new rev 5: x -> 1
1417 test-debug-phase: new rev 5: x -> 1
1418 test-debug-phase: new rev 6: x -> 1
1418 test-debug-phase: new rev 6: x -> 1
1419 test-debug-phase: new rev 7: x -> 1
1419 test-debug-phase: new rev 7: x -> 1
1420
1420
1421 $ hg phase --public -r D
1421 $ hg phase --public -r D
1422 test-debug-phase: move rev 0: 1 -> 0
1422 test-debug-phase: move rev 0: 1 -> 0
1423 test-debug-phase: move rev 1: 1 -> 0
1423 test-debug-phase: move rev 1: 1 -> 0
1424 test-debug-phase: move rev 2: 1 -> 0
1424 test-debug-phase: move rev 2: 1 -> 0
1425 test-debug-phase: move rev 4: 1 -> 0
1425 test-debug-phase: move rev 4: 1 -> 0
1426
1426
1427 $ hg log -G -T '{shortest(node, 5)} {phase}'
1427 $ hg log -G -T '{shortest(node, 5)} {phase}'
1428 o bb947 draft
1428 o bb947 draft
1429 |
1429 |
1430 | o 5ac28 draft
1430 | o 5ac28 draft
1431 |/|
1431 |/|
1432 o | 13b7b draft
1432 o | 13b7b draft
1433 | |
1433 | |
1434 | o f5853 public
1434 | o f5853 public
1435 | |
1435 | |
1436 o | c67c4 draft
1436 o | c67c4 draft
1437 | |
1437 | |
1438 | o 26805 public
1438 | o 26805 public
1439 |/
1439 |/
1440 o 11247 public
1440 o 11247 public
1441 |
1441 |
1442 o 426ba public
1442 o 426ba public
1443
1443
1444 $ cd ..
1444 $ cd ..
1445
1445
1446 Works with default settings
1446 Works with default settings
1447
1447
1448 $ hg -R mergetest serve -p $HGPORT -d --pid-file=hg.pid
1448 $ hg -R mergetest serve -p $HGPORT -d --pid-file=hg.pid
1449 $ cat hg.pid >> $DAEMON_PIDS
1449 $ cat hg.pid >> $DAEMON_PIDS
1450
1450
1451 $ hg clone -U http://localhost:$HGPORT mergetest-normal
1451 $ hg clone -U http://localhost:$HGPORT mergetest-normal
1452 requesting all changes
1452 requesting all changes
1453 adding changesets
1453 adding changesets
1454 adding manifests
1454 adding manifests
1455 adding file changes
1455 adding file changes
1456 added 8 changesets with 7 changes to 7 files (+1 heads)
1456 added 8 changesets with 7 changes to 7 files (+1 heads)
1457 new changesets 426bada5c675:bb94757e651a (4 drafts)
1457 new changesets 426bada5c675:bb94757e651a (4 drafts)
1458 test-debug-phase: new rev 0: x -> 0
1458 test-debug-phase: new rev 0: x -> 0
1459 test-debug-phase: new rev 1: x -> 0
1459 test-debug-phase: new rev 1: x -> 0
1460 test-debug-phase: new rev 2: x -> 0
1460 test-debug-phase: new rev 2: x -> 0
1461 test-debug-phase: new rev 3: x -> 1
1461 test-debug-phase: new rev 3: x -> 1
1462 test-debug-phase: new rev 4: x -> 0
1462 test-debug-phase: new rev 4: x -> 0
1463 test-debug-phase: new rev 5: x -> 1
1463 test-debug-phase: new rev 5: x -> 1
1464 test-debug-phase: new rev 6: x -> 1
1464 test-debug-phase: new rev 6: x -> 1
1465 test-debug-phase: new rev 7: x -> 1
1465 test-debug-phase: new rev 7: x -> 1
1466
1466
1467 $ hg -R mergetest-normal log -G -T '{shortest(node, 5)} {phase}'
1467 $ hg -R mergetest-normal log -G -T '{shortest(node, 5)} {phase}'
1468 o bb947 draft
1468 o bb947 draft
1469 |
1469 |
1470 | o 5ac28 draft
1470 | o 5ac28 draft
1471 |/|
1471 |/|
1472 o | 13b7b draft
1472 o | 13b7b draft
1473 | |
1473 | |
1474 | o f5853 public
1474 | o f5853 public
1475 | |
1475 | |
1476 o | c67c4 draft
1476 o | c67c4 draft
1477 | |
1477 | |
1478 | o 26805 public
1478 | o 26805 public
1479 |/
1479 |/
1480 o 11247 public
1480 o 11247 public
1481 |
1481 |
1482 o 426ba public
1482 o 426ba public
1483
1483
1484 $ killdaemons.py
1484 $ killdaemons.py
1485
1485
1486 With legacy listkeys over bundle2
1486 With legacy listkeys over bundle2
1487 (issue 5939: public phase was lost on 26805 and f5853 before, due to a bug
1487 (issue 5939: public phase was lost on 26805 and f5853 before, due to a bug
1488 of phase heads computation)
1488 of phase heads computation)
1489
1489
1490 $ hg -R mergetest --config devel.legacy.exchange=phases serve -p $HGPORT -d --pid-file=hg.pid
1490 $ hg -R mergetest --config devel.legacy.exchange=phases serve -p $HGPORT -d --pid-file=hg.pid
1491 $ cat hg.pid >> $DAEMON_PIDS
1491 $ cat hg.pid >> $DAEMON_PIDS
1492
1492
1493 $ hg clone -U http://localhost:$HGPORT mergetest-nobinarypart
1493 $ hg clone -U http://localhost:$HGPORT mergetest-nobinarypart
1494 requesting all changes
1494 requesting all changes
1495 adding changesets
1495 adding changesets
1496 adding manifests
1496 adding manifests
1497 adding file changes
1497 adding file changes
1498 added 8 changesets with 7 changes to 7 files (+1 heads)
1498 added 8 changesets with 7 changes to 7 files (+1 heads)
1499 new changesets 426bada5c675:bb94757e651a (4 drafts)
1499 new changesets 426bada5c675:bb94757e651a (4 drafts)
1500 test-debug-phase: new rev 0: x -> 0
1500 test-debug-phase: new rev 0: x -> 0
1501 test-debug-phase: new rev 1: x -> 0
1501 test-debug-phase: new rev 1: x -> 0
1502 test-debug-phase: new rev 2: x -> 0
1502 test-debug-phase: new rev 2: x -> 0
1503 test-debug-phase: new rev 3: x -> 1
1503 test-debug-phase: new rev 3: x -> 1
1504 test-debug-phase: new rev 4: x -> 0
1504 test-debug-phase: new rev 4: x -> 0
1505 test-debug-phase: new rev 5: x -> 1
1505 test-debug-phase: new rev 5: x -> 1
1506 test-debug-phase: new rev 6: x -> 1
1506 test-debug-phase: new rev 6: x -> 1
1507 test-debug-phase: new rev 7: x -> 1
1507 test-debug-phase: new rev 7: x -> 1
1508
1508
1509 $ hg -R mergetest-nobinarypart log -G -T '{shortest(node, 5)} {phase}'
1509 $ hg -R mergetest-nobinarypart log -G -T '{shortest(node, 5)} {phase}'
1510 o bb947 draft
1510 o bb947 draft
1511 |
1511 |
1512 | o 5ac28 draft
1512 | o 5ac28 draft
1513 |/|
1513 |/|
1514 o | 13b7b draft
1514 o | 13b7b draft
1515 | |
1515 | |
1516 | o f5853 public
1516 | o f5853 public
1517 | |
1517 | |
1518 o | c67c4 draft
1518 o | c67c4 draft
1519 | |
1519 | |
1520 | o 26805 public
1520 | o 26805 public
1521 |/
1521 |/
1522 o 11247 public
1522 o 11247 public
1523 |
1523 |
1524 o 426ba public
1524 o 426ba public
1525
1525
1526 $ killdaemons.py
1526 $ killdaemons.py
1527
1527
1528 Without bundle2
1528 Without bundle2
1529 (issue 5939: public phase was lost on 26805 and f5853 before, due to a bug
1529 (issue 5939: public phase was lost on 26805 and f5853 before, due to a bug
1530 of phase heads computation)
1530 of phase heads computation)
1531
1531
1532 $ hg -R mergetest serve -p $HGPORT -d --pid-file=hg.pid
1532 $ hg -R mergetest serve -p $HGPORT -d --pid-file=hg.pid
1533 $ cat hg.pid >> $DAEMON_PIDS
1533 $ cat hg.pid >> $DAEMON_PIDS
1534
1534
1535 $ hg --config devel.legacy.exchange=bundle1 clone -U http://localhost:$HGPORT mergetest-bundle1
1535 $ hg --config devel.legacy.exchange=bundle1 clone -U http://localhost:$HGPORT mergetest-bundle1
1536 requesting all changes
1536 requesting all changes
1537 adding changesets
1537 adding changesets
1538 adding manifests
1538 adding manifests
1539 adding file changes
1539 adding file changes
1540 added 8 changesets with 7 changes to 7 files (+1 heads)
1540 added 8 changesets with 7 changes to 7 files (+1 heads)
1541 new changesets 426bada5c675:bb94757e651a (4 drafts)
1541 new changesets 426bada5c675:bb94757e651a (4 drafts)
1542 test-debug-phase: new rev 0: x -> 0
1542 test-debug-phase: new rev 0: x -> 0
1543 test-debug-phase: new rev 1: x -> 0
1543 test-debug-phase: new rev 1: x -> 0
1544 test-debug-phase: new rev 2: x -> 0
1544 test-debug-phase: new rev 2: x -> 0
1545 test-debug-phase: new rev 3: x -> 1
1545 test-debug-phase: new rev 3: x -> 1
1546 test-debug-phase: new rev 4: x -> 0
1546 test-debug-phase: new rev 4: x -> 0
1547 test-debug-phase: new rev 5: x -> 1
1547 test-debug-phase: new rev 5: x -> 1
1548 test-debug-phase: new rev 6: x -> 1
1548 test-debug-phase: new rev 6: x -> 1
1549 test-debug-phase: new rev 7: x -> 1
1549 test-debug-phase: new rev 7: x -> 1
1550
1550
1551 $ hg -R mergetest-bundle1 log -G -T '{shortest(node, 5)} {phase}'
1551 $ hg -R mergetest-bundle1 log -G -T '{shortest(node, 5)} {phase}'
1552 o bb947 draft
1552 o bb947 draft
1553 |
1553 |
1554 | o 5ac28 draft
1554 | o 5ac28 draft
1555 |/|
1555 |/|
1556 o | 13b7b draft
1556 o | 13b7b draft
1557 | |
1557 | |
1558 | o f5853 public
1558 | o f5853 public
1559 | |
1559 | |
1560 o | c67c4 draft
1560 o | c67c4 draft
1561 | |
1561 | |
1562 | o 26805 public
1562 | o 26805 public
1563 |/
1563 |/
1564 o 11247 public
1564 o 11247 public
1565 |
1565 |
1566 o 426ba public
1566 o 426ba public
1567
1567
1568 $ killdaemons.py
1568 $ killdaemons.py
1569
1569
1570
1570
1571 auto-publish config
1571 auto-publish config
1572 -------------------
1572 -------------------
1573
1573
1574 $ hg init auto-publish-orig
1574 $ hg init auto-publish-orig
1575 $ hg clone -q auto-publish-orig auto-publish-clone
1575 $ hg clone -q auto-publish-orig auto-publish-clone
1576 $ cd auto-publish-clone
1576 $ cd auto-publish-clone
1577 $ mkcommit a-p-A
1577 $ mkcommit a-p-A
1578 test-debug-phase: new rev 0: x -> 1
1578 test-debug-phase: new rev 0: x -> 1
1579 $ mkcommit a-p-B
1579 $ mkcommit a-p-B
1580 test-debug-phase: new rev 1: x -> 1
1580 test-debug-phase: new rev 1: x -> 1
1581
1581
1582 abort behavior
1582 abort behavior
1583
1583
1584 $ hg push --config experimental.auto-publish=abort
1584 $ hg push --config experimental.auto-publish=abort
1585 pushing to $TESTTMP/auto-publish-orig
1585 pushing to $TESTTMP/auto-publish-orig
1586 abort: push would publish 2 changesets
1586 abort: push would publish 2 changesets
1587 (use --publish or adjust 'experimental.auto-publish' config)
1587 (use --publish or adjust 'experimental.auto-publish' config)
1588 [255]
1588 [255]
1589 $ hg push -r '.^' --config experimental.auto-publish=abort
1589 $ hg push -r '.^' --config experimental.auto-publish=abort
1590 pushing to $TESTTMP/auto-publish-orig
1590 pushing to $TESTTMP/auto-publish-orig
1591 abort: push would publish 1 changesets
1591 abort: push would publish 1 changesets
1592 (use --publish or adjust 'experimental.auto-publish' config)
1592 (use --publish or adjust 'experimental.auto-publish' config)
1593 [255]
1593 [255]
1594
1594
1595 trying to push a secret changeset doesn't confuse auto-publish
1596
1597 $ hg phase --secret --force
1598 test-debug-phase: move rev 0: 1 -> 2
1599 test-debug-phase: move rev 1: 1 -> 2
1600
1601 $ hg push --config experimental.auto-publish=abort
1602 pushing to $TESTTMP/auto-publish-orig
1603 abort: push would publish 1 changesets
1604 (use --publish or adjust 'experimental.auto-publish' config)
1605 [255]
1606 $ hg push -r . --config experimental.auto-publish=abort
1607 pushing to $TESTTMP/auto-publish-orig
1608 abort: push would publish 1 changesets
1609 (use --publish or adjust 'experimental.auto-publish' config)
1610 [255]
1611
1612 $ hg phase --draft
1613 test-debug-phase: move rev 1: 2 -> 1
1614
1595 --publish flag makes push succeed
1615 --publish flag makes push succeed
1596
1616
1597 $ hg push -r '.^' --publish --config experimental.auto-publish=abort
1617 $ hg push -r '.^' --publish --config experimental.auto-publish=abort
1598 pushing to $TESTTMP/auto-publish-orig
1618 pushing to $TESTTMP/auto-publish-orig
1599 searching for changes
1619 searching for changes
1600 adding changesets
1620 adding changesets
1601 adding manifests
1621 adding manifests
1602 adding file changes
1622 adding file changes
1603 added 1 changesets with 1 changes to 1 files
1623 added 1 changesets with 1 changes to 1 files
1604 test-debug-phase: new rev 0: x -> 0
1624 test-debug-phase: new rev 0: x -> 0
1605 test-debug-phase: move rev 0: 1 -> 0
1625 test-debug-phase: move rev 0: 1 -> 0
1606
1626
1607 warn behavior
1627 warn behavior
1608
1628
1609 $ hg push --config experimental.auto-publish=warn
1629 $ hg push --config experimental.auto-publish=warn
1610 pushing to $TESTTMP/auto-publish-orig
1630 pushing to $TESTTMP/auto-publish-orig
1611 1 changesets about to be published
1631 1 changesets about to be published
1612 searching for changes
1632 searching for changes
1613 adding changesets
1633 adding changesets
1614 adding manifests
1634 adding manifests
1615 adding file changes
1635 adding file changes
1616 added 1 changesets with 1 changes to 1 files
1636 added 1 changesets with 1 changes to 1 files
1617 test-debug-phase: new rev 1: x -> 0
1637 test-debug-phase: new rev 1: x -> 0
1618 test-debug-phase: move rev 1: 1 -> 0
1638 test-debug-phase: move rev 1: 1 -> 0
1619
1639
1620 confirm behavior
1640 confirm behavior
1621
1641
1622 $ mkcommit a-p-C
1642 $ mkcommit a-p-C
1623 test-debug-phase: new rev 2: x -> 1
1643 test-debug-phase: new rev 2: x -> 1
1624 $ hg push --config experimental.auto-publish=confirm
1644 $ hg push --config experimental.auto-publish=confirm
1625 pushing to $TESTTMP/auto-publish-orig
1645 pushing to $TESTTMP/auto-publish-orig
1626 push and publish 1 changesets (yn)? y
1646 push and publish 1 changesets (yn)? y
1627 searching for changes
1647 searching for changes
1628 adding changesets
1648 adding changesets
1629 adding manifests
1649 adding manifests
1630 adding file changes
1650 adding file changes
1631 added 1 changesets with 1 changes to 1 files
1651 added 1 changesets with 1 changes to 1 files
1632 test-debug-phase: new rev 2: x -> 0
1652 test-debug-phase: new rev 2: x -> 0
1633 test-debug-phase: move rev 2: 1 -> 0
1653 test-debug-phase: move rev 2: 1 -> 0
1634
1654
1635 $ cd ..
1655 $ cd ..
1636
1656
1637
1657
1638 --publish flag
1658 --publish flag
1639 --------------
1659 --------------
1640
1660
1641 $ hg init doesnt-publish
1661 $ hg init doesnt-publish
1642 $ cd doesnt-publish
1662 $ cd doesnt-publish
1643 $ cat > .hg/hgrc << EOF
1663 $ cat > .hg/hgrc << EOF
1644 > [phases]
1664 > [phases]
1645 > publish=0
1665 > publish=0
1646 > EOF
1666 > EOF
1647 $ mkcommit orig-root
1667 $ mkcommit orig-root
1648 test-debug-phase: new rev 0: x -> 1
1668 test-debug-phase: new rev 0: x -> 1
1649 $ hg phase --public -r 'all()'
1669 $ hg phase --public -r 'all()'
1650 test-debug-phase: move rev 0: 1 -> 0
1670 test-debug-phase: move rev 0: 1 -> 0
1651 $ cd ..
1671 $ cd ..
1652
1672
1653 $ hg clone -q doesnt-publish client
1673 $ hg clone -q doesnt-publish client
1654 $ cd client
1674 $ cd client
1655
1675
1656 pushing nothing
1676 pushing nothing
1657
1677
1658 $ mkcommit new-A
1678 $ mkcommit new-A
1659 test-debug-phase: new rev 1: x -> 1
1679 test-debug-phase: new rev 1: x -> 1
1660 $ mkcommit new-B
1680 $ mkcommit new-B
1661 test-debug-phase: new rev 2: x -> 1
1681 test-debug-phase: new rev 2: x -> 1
1662 $ hg push --publish -r null
1682 $ hg push --publish -r null
1663 pushing to $TESTTMP/doesnt-publish
1683 pushing to $TESTTMP/doesnt-publish
1664 searching for changes
1684 searching for changes
1665 no changes found
1685 no changes found
1666 [1]
1686 [1]
1667 $ hgph
1687 $ hgph
1668 @ 2 draft new-B - 89512e87d697
1688 @ 2 draft new-B - 89512e87d697
1669 |
1689 |
1670 o 1 draft new-A - 4826e44e690e
1690 o 1 draft new-A - 4826e44e690e
1671 |
1691 |
1672 o 0 public orig-root - c48edaf99a10
1692 o 0 public orig-root - c48edaf99a10
1673
1693
1674
1694
1675 pushing a new changeset (selective)
1695 pushing a new changeset (selective)
1676
1696
1677 $ hg push --publish -r 'desc("new-A")'
1697 $ hg push --publish -r 'desc("new-A")'
1678 pushing to $TESTTMP/doesnt-publish
1698 pushing to $TESTTMP/doesnt-publish
1679 searching for changes
1699 searching for changes
1680 adding changesets
1700 adding changesets
1681 adding manifests
1701 adding manifests
1682 adding file changes
1702 adding file changes
1683 added 1 changesets with 1 changes to 1 files
1703 added 1 changesets with 1 changes to 1 files
1684 test-debug-phase: new rev 1: x -> 0
1704 test-debug-phase: new rev 1: x -> 0
1685 test-debug-phase: move rev 1: 1 -> 0
1705 test-debug-phase: move rev 1: 1 -> 0
1686 $ hgph
1706 $ hgph
1687 @ 2 draft new-B - 89512e87d697
1707 @ 2 draft new-B - 89512e87d697
1688 |
1708 |
1689 o 1 public new-A - 4826e44e690e
1709 o 1 public new-A - 4826e44e690e
1690 |
1710 |
1691 o 0 public orig-root - c48edaf99a10
1711 o 0 public orig-root - c48edaf99a10
1692
1712
1693
1713
1694 pushing a new changeset (linear)
1714 pushing a new changeset (linear)
1695
1715
1696 $ hg push --publish
1716 $ hg push --publish
1697 pushing to $TESTTMP/doesnt-publish
1717 pushing to $TESTTMP/doesnt-publish
1698 searching for changes
1718 searching for changes
1699 adding changesets
1719 adding changesets
1700 adding manifests
1720 adding manifests
1701 adding file changes
1721 adding file changes
1702 added 1 changesets with 1 changes to 1 files
1722 added 1 changesets with 1 changes to 1 files
1703 test-debug-phase: new rev 2: x -> 0
1723 test-debug-phase: new rev 2: x -> 0
1704 test-debug-phase: move rev 2: 1 -> 0
1724 test-debug-phase: move rev 2: 1 -> 0
1705 $ hgph
1725 $ hgph
1706 @ 2 public new-B - 89512e87d697
1726 @ 2 public new-B - 89512e87d697
1707 |
1727 |
1708 o 1 public new-A - 4826e44e690e
1728 o 1 public new-A - 4826e44e690e
1709 |
1729 |
1710 o 0 public orig-root - c48edaf99a10
1730 o 0 public orig-root - c48edaf99a10
1711
1731
1712
1732
1713 pushing new changesets (different branches)
1733 pushing new changesets (different branches)
1714
1734
1715 $ mkcommit new-C
1735 $ mkcommit new-C
1716 test-debug-phase: new rev 3: x -> 1
1736 test-debug-phase: new rev 3: x -> 1
1717 $ hg update -q '.^'
1737 $ hg update -q '.^'
1718 $ hg branch -q another
1738 $ hg branch -q another
1719 $ mkcommit new-D
1739 $ mkcommit new-D
1720 test-debug-phase: new rev 4: x -> 1
1740 test-debug-phase: new rev 4: x -> 1
1721 $ hg push --new-branch --publish
1741 $ hg push --new-branch --publish
1722 pushing to $TESTTMP/doesnt-publish
1742 pushing to $TESTTMP/doesnt-publish
1723 searching for changes
1743 searching for changes
1724 adding changesets
1744 adding changesets
1725 adding manifests
1745 adding manifests
1726 adding file changes
1746 adding file changes
1727 added 2 changesets with 2 changes to 2 files (+1 heads)
1747 added 2 changesets with 2 changes to 2 files (+1 heads)
1728 test-debug-phase: new rev 3: x -> 0
1748 test-debug-phase: new rev 3: x -> 0
1729 test-debug-phase: new rev 4: x -> 0
1749 test-debug-phase: new rev 4: x -> 0
1730 test-debug-phase: move rev 3: 1 -> 0
1750 test-debug-phase: move rev 3: 1 -> 0
1731 test-debug-phase: move rev 4: 1 -> 0
1751 test-debug-phase: move rev 4: 1 -> 0
1732 $ hgph
1752 $ hgph
1733 @ 4 public new-D - 5e53dcafd13c
1753 @ 4 public new-D - 5e53dcafd13c
1734 |
1754 |
1735 | o 3 public new-C - 1665482cc06d
1755 | o 3 public new-C - 1665482cc06d
1736 |/
1756 |/
1737 o 2 public new-B - 89512e87d697
1757 o 2 public new-B - 89512e87d697
1738 |
1758 |
1739 o 1 public new-A - 4826e44e690e
1759 o 1 public new-A - 4826e44e690e
1740 |
1760 |
1741 o 0 public orig-root - c48edaf99a10
1761 o 0 public orig-root - c48edaf99a10
1742
1762
1743
1763
1744 pushing a shared changeset
1764 pushing a shared changeset
1745
1765
1746 $ mkcommit new-E
1766 $ mkcommit new-E
1747 test-debug-phase: new rev 5: x -> 1
1767 test-debug-phase: new rev 5: x -> 1
1748 $ hg push
1768 $ hg push
1749 pushing to $TESTTMP/doesnt-publish
1769 pushing to $TESTTMP/doesnt-publish
1750 searching for changes
1770 searching for changes
1751 adding changesets
1771 adding changesets
1752 adding manifests
1772 adding manifests
1753 adding file changes
1773 adding file changes
1754 added 1 changesets with 1 changes to 1 files
1774 added 1 changesets with 1 changes to 1 files
1755 test-debug-phase: new rev 5: x -> 1
1775 test-debug-phase: new rev 5: x -> 1
1756 $ hg push --publish
1776 $ hg push --publish
1757 pushing to $TESTTMP/doesnt-publish
1777 pushing to $TESTTMP/doesnt-publish
1758 searching for changes
1778 searching for changes
1759 no changes found
1779 no changes found
1760 test-debug-phase: move rev 5: 1 -> 0
1780 test-debug-phase: move rev 5: 1 -> 0
1761 test-debug-phase: move rev 5: 1 -> 0
1781 test-debug-phase: move rev 5: 1 -> 0
1762 [1]
1782 [1]
1763 $ hgph
1783 $ hgph
1764 @ 5 public new-E - 48931ee3529c
1784 @ 5 public new-E - 48931ee3529c
1765 |
1785 |
1766 o 4 public new-D - 5e53dcafd13c
1786 o 4 public new-D - 5e53dcafd13c
1767 |
1787 |
1768 | o 3 public new-C - 1665482cc06d
1788 | o 3 public new-C - 1665482cc06d
1769 |/
1789 |/
1770 o 2 public new-B - 89512e87d697
1790 o 2 public new-B - 89512e87d697
1771 |
1791 |
1772 o 1 public new-A - 4826e44e690e
1792 o 1 public new-A - 4826e44e690e
1773 |
1793 |
1774 o 0 public orig-root - c48edaf99a10
1794 o 0 public orig-root - c48edaf99a10
1775
1795
1776 $ cd ..
1796 $ cd ..
1777
1797
1778 --publish with subrepos (doesn't propagate to subrepos currently)
1798 --publish with subrepos (doesn't propagate to subrepos currently)
1779
1799
1780 $ hg init with-subrepo
1800 $ hg init with-subrepo
1781 $ cd with-subrepo
1801 $ cd with-subrepo
1782 $ cat > .hg/hgrc << EOF
1802 $ cat > .hg/hgrc << EOF
1783 > [phases]
1803 > [phases]
1784 > publish=0
1804 > publish=0
1785 > EOF
1805 > EOF
1786 $ hg init subrepo
1806 $ hg init subrepo
1787 $ cd subrepo
1807 $ cd subrepo
1788 $ cat > .hg/hgrc << EOF
1808 $ cat > .hg/hgrc << EOF
1789 > [phases]
1809 > [phases]
1790 > publish=0
1810 > publish=0
1791 > EOF
1811 > EOF
1792 $ echo foo > foo
1812 $ echo foo > foo
1793 $ hg ci -qAm0
1813 $ hg ci -qAm0
1794 test-debug-phase: new rev 0: x -> 1
1814 test-debug-phase: new rev 0: x -> 1
1795 $ cd ..
1815 $ cd ..
1796 $ echo 'subrepo = subrepo' > .hgsub
1816 $ echo 'subrepo = subrepo' > .hgsub
1797 $ hg add .hgsub
1817 $ hg add .hgsub
1798 $ hg ci -m 'Adding subrepo'
1818 $ hg ci -m 'Adding subrepo'
1799 test-debug-phase: new rev 0: x -> 1
1819 test-debug-phase: new rev 0: x -> 1
1800 $ hgph
1820 $ hgph
1801 @ 0 draft Adding subrepo - 74d5b62379c0
1821 @ 0 draft Adding subrepo - 74d5b62379c0
1802
1822
1803 $ hgph -R subrepo
1823 $ hgph -R subrepo
1804 @ 0 draft 0 - 4b3f578e3344
1824 @ 0 draft 0 - 4b3f578e3344
1805
1825
1806 $ cd ..
1826 $ cd ..
1807 $ hg clone with-subrepo client-with-subrepo
1827 $ hg clone with-subrepo client-with-subrepo
1808 updating to branch default
1828 updating to branch default
1809 cloning subrepo subrepo from $TESTTMP/with-subrepo/subrepo
1829 cloning subrepo subrepo from $TESTTMP/with-subrepo/subrepo
1810 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1830 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1811 $ cd client-with-subrepo
1831 $ cd client-with-subrepo
1812 $ hg push --publish
1832 $ hg push --publish
1813 pushing to $TESTTMP/with-subrepo
1833 pushing to $TESTTMP/with-subrepo
1814 no changes made to subrepo subrepo since last push to $TESTTMP/with-subrepo/subrepo
1834 no changes made to subrepo subrepo since last push to $TESTTMP/with-subrepo/subrepo
1815 searching for changes
1835 searching for changes
1816 no changes found
1836 no changes found
1817 test-debug-phase: move rev 0: 1 -> 0
1837 test-debug-phase: move rev 0: 1 -> 0
1818 test-debug-phase: move rev 0: 1 -> 0
1838 test-debug-phase: move rev 0: 1 -> 0
1819 [1]
1839 [1]
1820 $ hgph
1840 $ hgph
1821 @ 0 public Adding subrepo - 74d5b62379c0
1841 @ 0 public Adding subrepo - 74d5b62379c0
1822
1842
1823 $ hgph -R subrepo
1843 $ hgph -R subrepo
1824 @ 0 draft 0 - 4b3f578e3344
1844 @ 0 draft 0 - 4b3f578e3344
1825
1845
General Comments 0
You need to be logged in to leave comments. Login now