##// END OF EJS Templates
errors: raise RemoteError in some places in exchange.py...
Martin von Zweigbergk -
r47739:3f87d2af default
parent child Browse files
Show More
@@ -1,2819 +1,2819 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import weakref
11 import weakref
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 nullrev,
17 nullrev,
18 )
18 )
19 from . import (
19 from . import (
20 bookmarks as bookmod,
20 bookmarks as bookmod,
21 bundle2,
21 bundle2,
22 bundlecaches,
22 bundlecaches,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 exchangev2,
26 exchangev2,
27 lock as lockmod,
27 lock as lockmod,
28 logexchange,
28 logexchange,
29 narrowspec,
29 narrowspec,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 phases,
32 phases,
33 pushkey,
33 pushkey,
34 pycompat,
34 pycompat,
35 requirements,
35 requirements,
36 scmutil,
36 scmutil,
37 streamclone,
37 streamclone,
38 url as urlmod,
38 url as urlmod,
39 util,
39 util,
40 wireprototypes,
40 wireprototypes,
41 )
41 )
42 from .utils import (
42 from .utils import (
43 hashutil,
43 hashutil,
44 stringutil,
44 stringutil,
45 urlutil,
45 urlutil,
46 )
46 )
47
47
48 urlerr = util.urlerr
48 urlerr = util.urlerr
49 urlreq = util.urlreq
49 urlreq = util.urlreq
50
50
51 _NARROWACL_SECTION = b'narrowacl'
51 _NARROWACL_SECTION = b'narrowacl'
52
52
53
53
54 def readbundle(ui, fh, fname, vfs=None):
54 def readbundle(ui, fh, fname, vfs=None):
55 header = changegroup.readexactly(fh, 4)
55 header = changegroup.readexactly(fh, 4)
56
56
57 alg = None
57 alg = None
58 if not fname:
58 if not fname:
59 fname = b"stream"
59 fname = b"stream"
60 if not header.startswith(b'HG') and header.startswith(b'\0'):
60 if not header.startswith(b'HG') and header.startswith(b'\0'):
61 fh = changegroup.headerlessfixup(fh, header)
61 fh = changegroup.headerlessfixup(fh, header)
62 header = b"HG10"
62 header = b"HG10"
63 alg = b'UN'
63 alg = b'UN'
64 elif vfs:
64 elif vfs:
65 fname = vfs.join(fname)
65 fname = vfs.join(fname)
66
66
67 magic, version = header[0:2], header[2:4]
67 magic, version = header[0:2], header[2:4]
68
68
69 if magic != b'HG':
69 if magic != b'HG':
70 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
70 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
71 if version == b'10':
71 if version == b'10':
72 if alg is None:
72 if alg is None:
73 alg = changegroup.readexactly(fh, 2)
73 alg = changegroup.readexactly(fh, 2)
74 return changegroup.cg1unpacker(fh, alg)
74 return changegroup.cg1unpacker(fh, alg)
75 elif version.startswith(b'2'):
75 elif version.startswith(b'2'):
76 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
76 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
77 elif version == b'S1':
77 elif version == b'S1':
78 return streamclone.streamcloneapplier(fh)
78 return streamclone.streamcloneapplier(fh)
79 else:
79 else:
80 raise error.Abort(
80 raise error.Abort(
81 _(b'%s: unknown bundle version %s') % (fname, version)
81 _(b'%s: unknown bundle version %s') % (fname, version)
82 )
82 )
83
83
84
84
85 def getbundlespec(ui, fh):
85 def getbundlespec(ui, fh):
86 """Infer the bundlespec from a bundle file handle.
86 """Infer the bundlespec from a bundle file handle.
87
87
88 The input file handle is seeked and the original seek position is not
88 The input file handle is seeked and the original seek position is not
89 restored.
89 restored.
90 """
90 """
91
91
92 def speccompression(alg):
92 def speccompression(alg):
93 try:
93 try:
94 return util.compengines.forbundletype(alg).bundletype()[0]
94 return util.compengines.forbundletype(alg).bundletype()[0]
95 except KeyError:
95 except KeyError:
96 return None
96 return None
97
97
98 b = readbundle(ui, fh, None)
98 b = readbundle(ui, fh, None)
99 if isinstance(b, changegroup.cg1unpacker):
99 if isinstance(b, changegroup.cg1unpacker):
100 alg = b._type
100 alg = b._type
101 if alg == b'_truncatedBZ':
101 if alg == b'_truncatedBZ':
102 alg = b'BZ'
102 alg = b'BZ'
103 comp = speccompression(alg)
103 comp = speccompression(alg)
104 if not comp:
104 if not comp:
105 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
105 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
106 return b'%s-v1' % comp
106 return b'%s-v1' % comp
107 elif isinstance(b, bundle2.unbundle20):
107 elif isinstance(b, bundle2.unbundle20):
108 if b'Compression' in b.params:
108 if b'Compression' in b.params:
109 comp = speccompression(b.params[b'Compression'])
109 comp = speccompression(b.params[b'Compression'])
110 if not comp:
110 if not comp:
111 raise error.Abort(
111 raise error.Abort(
112 _(b'unknown compression algorithm: %s') % comp
112 _(b'unknown compression algorithm: %s') % comp
113 )
113 )
114 else:
114 else:
115 comp = b'none'
115 comp = b'none'
116
116
117 version = None
117 version = None
118 for part in b.iterparts():
118 for part in b.iterparts():
119 if part.type == b'changegroup':
119 if part.type == b'changegroup':
120 version = part.params[b'version']
120 version = part.params[b'version']
121 if version in (b'01', b'02'):
121 if version in (b'01', b'02'):
122 version = b'v2'
122 version = b'v2'
123 else:
123 else:
124 raise error.Abort(
124 raise error.Abort(
125 _(
125 _(
126 b'changegroup version %s does not have '
126 b'changegroup version %s does not have '
127 b'a known bundlespec'
127 b'a known bundlespec'
128 )
128 )
129 % version,
129 % version,
130 hint=_(b'try upgrading your Mercurial client'),
130 hint=_(b'try upgrading your Mercurial client'),
131 )
131 )
132 elif part.type == b'stream2' and version is None:
132 elif part.type == b'stream2' and version is None:
133 # A stream2 part requires to be part of a v2 bundle
133 # A stream2 part requires to be part of a v2 bundle
134 requirements = urlreq.unquote(part.params[b'requirements'])
134 requirements = urlreq.unquote(part.params[b'requirements'])
135 splitted = requirements.split()
135 splitted = requirements.split()
136 params = bundle2._formatrequirementsparams(splitted)
136 params = bundle2._formatrequirementsparams(splitted)
137 return b'none-v2;stream=v2;%s' % params
137 return b'none-v2;stream=v2;%s' % params
138
138
139 if not version:
139 if not version:
140 raise error.Abort(
140 raise error.Abort(
141 _(b'could not identify changegroup version in bundle')
141 _(b'could not identify changegroup version in bundle')
142 )
142 )
143
143
144 return b'%s-%s' % (comp, version)
144 return b'%s-%s' % (comp, version)
145 elif isinstance(b, streamclone.streamcloneapplier):
145 elif isinstance(b, streamclone.streamcloneapplier):
146 requirements = streamclone.readbundle1header(fh)[2]
146 requirements = streamclone.readbundle1header(fh)[2]
147 formatted = bundle2._formatrequirementsparams(requirements)
147 formatted = bundle2._formatrequirementsparams(requirements)
148 return b'none-packed1;%s' % formatted
148 return b'none-packed1;%s' % formatted
149 else:
149 else:
150 raise error.Abort(_(b'unknown bundle type: %s') % b)
150 raise error.Abort(_(b'unknown bundle type: %s') % b)
151
151
152
152
153 def _computeoutgoing(repo, heads, common):
153 def _computeoutgoing(repo, heads, common):
154 """Computes which revs are outgoing given a set of common
154 """Computes which revs are outgoing given a set of common
155 and a set of heads.
155 and a set of heads.
156
156
157 This is a separate function so extensions can have access to
157 This is a separate function so extensions can have access to
158 the logic.
158 the logic.
159
159
160 Returns a discovery.outgoing object.
160 Returns a discovery.outgoing object.
161 """
161 """
162 cl = repo.changelog
162 cl = repo.changelog
163 if common:
163 if common:
164 hasnode = cl.hasnode
164 hasnode = cl.hasnode
165 common = [n for n in common if hasnode(n)]
165 common = [n for n in common if hasnode(n)]
166 else:
166 else:
167 common = [nullid]
167 common = [nullid]
168 if not heads:
168 if not heads:
169 heads = cl.heads()
169 heads = cl.heads()
170 return discovery.outgoing(repo, common, heads)
170 return discovery.outgoing(repo, common, heads)
171
171
172
172
173 def _checkpublish(pushop):
173 def _checkpublish(pushop):
174 repo = pushop.repo
174 repo = pushop.repo
175 ui = repo.ui
175 ui = repo.ui
176 behavior = ui.config(b'experimental', b'auto-publish')
176 behavior = ui.config(b'experimental', b'auto-publish')
177 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
177 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
178 return
178 return
179 remotephases = listkeys(pushop.remote, b'phases')
179 remotephases = listkeys(pushop.remote, b'phases')
180 if not remotephases.get(b'publishing', False):
180 if not remotephases.get(b'publishing', False):
181 return
181 return
182
182
183 if pushop.revs is None:
183 if pushop.revs is None:
184 published = repo.filtered(b'served').revs(b'not public()')
184 published = repo.filtered(b'served').revs(b'not public()')
185 else:
185 else:
186 published = repo.revs(b'::%ln - public()', pushop.revs)
186 published = repo.revs(b'::%ln - public()', pushop.revs)
187 if published:
187 if published:
188 if behavior == b'warn':
188 if behavior == b'warn':
189 ui.warn(
189 ui.warn(
190 _(b'%i changesets about to be published\n') % len(published)
190 _(b'%i changesets about to be published\n') % len(published)
191 )
191 )
192 elif behavior == b'confirm':
192 elif behavior == b'confirm':
193 if ui.promptchoice(
193 if ui.promptchoice(
194 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
194 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
195 % len(published)
195 % len(published)
196 ):
196 ):
197 raise error.CanceledError(_(b'user quit'))
197 raise error.CanceledError(_(b'user quit'))
198 elif behavior == b'abort':
198 elif behavior == b'abort':
199 msg = _(b'push would publish %i changesets') % len(published)
199 msg = _(b'push would publish %i changesets') % len(published)
200 hint = _(
200 hint = _(
201 b"use --publish or adjust 'experimental.auto-publish'"
201 b"use --publish or adjust 'experimental.auto-publish'"
202 b" config"
202 b" config"
203 )
203 )
204 raise error.Abort(msg, hint=hint)
204 raise error.Abort(msg, hint=hint)
205
205
206
206
207 def _forcebundle1(op):
207 def _forcebundle1(op):
208 """return true if a pull/push must use bundle1
208 """return true if a pull/push must use bundle1
209
209
210 This function is used to allow testing of the older bundle version"""
210 This function is used to allow testing of the older bundle version"""
211 ui = op.repo.ui
211 ui = op.repo.ui
212 # The goal is this config is to allow developer to choose the bundle
212 # The goal is this config is to allow developer to choose the bundle
213 # version used during exchanged. This is especially handy during test.
213 # version used during exchanged. This is especially handy during test.
214 # Value is a list of bundle version to be picked from, highest version
214 # Value is a list of bundle version to be picked from, highest version
215 # should be used.
215 # should be used.
216 #
216 #
217 # developer config: devel.legacy.exchange
217 # developer config: devel.legacy.exchange
218 exchange = ui.configlist(b'devel', b'legacy.exchange')
218 exchange = ui.configlist(b'devel', b'legacy.exchange')
219 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
219 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
220 return forcebundle1 or not op.remote.capable(b'bundle2')
220 return forcebundle1 or not op.remote.capable(b'bundle2')
221
221
222
222
223 class pushoperation(object):
223 class pushoperation(object):
224 """A object that represent a single push operation
224 """A object that represent a single push operation
225
225
226 Its purpose is to carry push related state and very common operations.
226 Its purpose is to carry push related state and very common operations.
227
227
228 A new pushoperation should be created at the beginning of each push and
228 A new pushoperation should be created at the beginning of each push and
229 discarded afterward.
229 discarded afterward.
230 """
230 """
231
231
232 def __init__(
232 def __init__(
233 self,
233 self,
234 repo,
234 repo,
235 remote,
235 remote,
236 force=False,
236 force=False,
237 revs=None,
237 revs=None,
238 newbranch=False,
238 newbranch=False,
239 bookmarks=(),
239 bookmarks=(),
240 publish=False,
240 publish=False,
241 pushvars=None,
241 pushvars=None,
242 ):
242 ):
243 # repo we push from
243 # repo we push from
244 self.repo = repo
244 self.repo = repo
245 self.ui = repo.ui
245 self.ui = repo.ui
246 # repo we push to
246 # repo we push to
247 self.remote = remote
247 self.remote = remote
248 # force option provided
248 # force option provided
249 self.force = force
249 self.force = force
250 # revs to be pushed (None is "all")
250 # revs to be pushed (None is "all")
251 self.revs = revs
251 self.revs = revs
252 # bookmark explicitly pushed
252 # bookmark explicitly pushed
253 self.bookmarks = bookmarks
253 self.bookmarks = bookmarks
254 # allow push of new branch
254 # allow push of new branch
255 self.newbranch = newbranch
255 self.newbranch = newbranch
256 # step already performed
256 # step already performed
257 # (used to check what steps have been already performed through bundle2)
257 # (used to check what steps have been already performed through bundle2)
258 self.stepsdone = set()
258 self.stepsdone = set()
259 # Integer version of the changegroup push result
259 # Integer version of the changegroup push result
260 # - None means nothing to push
260 # - None means nothing to push
261 # - 0 means HTTP error
261 # - 0 means HTTP error
262 # - 1 means we pushed and remote head count is unchanged *or*
262 # - 1 means we pushed and remote head count is unchanged *or*
263 # we have outgoing changesets but refused to push
263 # we have outgoing changesets but refused to push
264 # - other values as described by addchangegroup()
264 # - other values as described by addchangegroup()
265 self.cgresult = None
265 self.cgresult = None
266 # Boolean value for the bookmark push
266 # Boolean value for the bookmark push
267 self.bkresult = None
267 self.bkresult = None
268 # discover.outgoing object (contains common and outgoing data)
268 # discover.outgoing object (contains common and outgoing data)
269 self.outgoing = None
269 self.outgoing = None
270 # all remote topological heads before the push
270 # all remote topological heads before the push
271 self.remoteheads = None
271 self.remoteheads = None
272 # Details of the remote branch pre and post push
272 # Details of the remote branch pre and post push
273 #
273 #
274 # mapping: {'branch': ([remoteheads],
274 # mapping: {'branch': ([remoteheads],
275 # [newheads],
275 # [newheads],
276 # [unsyncedheads],
276 # [unsyncedheads],
277 # [discardedheads])}
277 # [discardedheads])}
278 # - branch: the branch name
278 # - branch: the branch name
279 # - remoteheads: the list of remote heads known locally
279 # - remoteheads: the list of remote heads known locally
280 # None if the branch is new
280 # None if the branch is new
281 # - newheads: the new remote heads (known locally) with outgoing pushed
281 # - newheads: the new remote heads (known locally) with outgoing pushed
282 # - unsyncedheads: the list of remote heads unknown locally.
282 # - unsyncedheads: the list of remote heads unknown locally.
283 # - discardedheads: the list of remote heads made obsolete by the push
283 # - discardedheads: the list of remote heads made obsolete by the push
284 self.pushbranchmap = None
284 self.pushbranchmap = None
285 # testable as a boolean indicating if any nodes are missing locally.
285 # testable as a boolean indicating if any nodes are missing locally.
286 self.incoming = None
286 self.incoming = None
287 # summary of the remote phase situation
287 # summary of the remote phase situation
288 self.remotephases = None
288 self.remotephases = None
289 # phases changes that must be pushed along side the changesets
289 # phases changes that must be pushed along side the changesets
290 self.outdatedphases = None
290 self.outdatedphases = None
291 # phases changes that must be pushed if changeset push fails
291 # phases changes that must be pushed if changeset push fails
292 self.fallbackoutdatedphases = None
292 self.fallbackoutdatedphases = None
293 # outgoing obsmarkers
293 # outgoing obsmarkers
294 self.outobsmarkers = set()
294 self.outobsmarkers = set()
295 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
295 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
296 self.outbookmarks = []
296 self.outbookmarks = []
297 # transaction manager
297 # transaction manager
298 self.trmanager = None
298 self.trmanager = None
299 # map { pushkey partid -> callback handling failure}
299 # map { pushkey partid -> callback handling failure}
300 # used to handle exception from mandatory pushkey part failure
300 # used to handle exception from mandatory pushkey part failure
301 self.pkfailcb = {}
301 self.pkfailcb = {}
302 # an iterable of pushvars or None
302 # an iterable of pushvars or None
303 self.pushvars = pushvars
303 self.pushvars = pushvars
304 # publish pushed changesets
304 # publish pushed changesets
305 self.publish = publish
305 self.publish = publish
306
306
307 @util.propertycache
307 @util.propertycache
308 def futureheads(self):
308 def futureheads(self):
309 """future remote heads if the changeset push succeeds"""
309 """future remote heads if the changeset push succeeds"""
310 return self.outgoing.ancestorsof
310 return self.outgoing.ancestorsof
311
311
312 @util.propertycache
312 @util.propertycache
313 def fallbackheads(self):
313 def fallbackheads(self):
314 """future remote heads if the changeset push fails"""
314 """future remote heads if the changeset push fails"""
315 if self.revs is None:
315 if self.revs is None:
316 # not target to push, all common are relevant
316 # not target to push, all common are relevant
317 return self.outgoing.commonheads
317 return self.outgoing.commonheads
318 unfi = self.repo.unfiltered()
318 unfi = self.repo.unfiltered()
319 # I want cheads = heads(::ancestorsof and ::commonheads)
319 # I want cheads = heads(::ancestorsof and ::commonheads)
320 # (ancestorsof is revs with secret changeset filtered out)
320 # (ancestorsof is revs with secret changeset filtered out)
321 #
321 #
322 # This can be expressed as:
322 # This can be expressed as:
323 # cheads = ( (ancestorsof and ::commonheads)
323 # cheads = ( (ancestorsof and ::commonheads)
324 # + (commonheads and ::ancestorsof))"
324 # + (commonheads and ::ancestorsof))"
325 # )
325 # )
326 #
326 #
327 # while trying to push we already computed the following:
327 # while trying to push we already computed the following:
328 # common = (::commonheads)
328 # common = (::commonheads)
329 # missing = ((commonheads::ancestorsof) - commonheads)
329 # missing = ((commonheads::ancestorsof) - commonheads)
330 #
330 #
331 # We can pick:
331 # We can pick:
332 # * ancestorsof part of common (::commonheads)
332 # * ancestorsof part of common (::commonheads)
333 common = self.outgoing.common
333 common = self.outgoing.common
334 rev = self.repo.changelog.index.rev
334 rev = self.repo.changelog.index.rev
335 cheads = [node for node in self.revs if rev(node) in common]
335 cheads = [node for node in self.revs if rev(node) in common]
336 # and
336 # and
337 # * commonheads parents on missing
337 # * commonheads parents on missing
338 revset = unfi.set(
338 revset = unfi.set(
339 b'%ln and parents(roots(%ln))',
339 b'%ln and parents(roots(%ln))',
340 self.outgoing.commonheads,
340 self.outgoing.commonheads,
341 self.outgoing.missing,
341 self.outgoing.missing,
342 )
342 )
343 cheads.extend(c.node() for c in revset)
343 cheads.extend(c.node() for c in revset)
344 return cheads
344 return cheads
345
345
346 @property
346 @property
347 def commonheads(self):
347 def commonheads(self):
348 """set of all common heads after changeset bundle push"""
348 """set of all common heads after changeset bundle push"""
349 if self.cgresult:
349 if self.cgresult:
350 return self.futureheads
350 return self.futureheads
351 else:
351 else:
352 return self.fallbackheads
352 return self.fallbackheads
353
353
354
354
355 # mapping of message used when pushing bookmark
355 # mapping of message used when pushing bookmark
356 bookmsgmap = {
356 bookmsgmap = {
357 b'update': (
357 b'update': (
358 _(b"updating bookmark %s\n"),
358 _(b"updating bookmark %s\n"),
359 _(b'updating bookmark %s failed\n'),
359 _(b'updating bookmark %s failed\n'),
360 ),
360 ),
361 b'export': (
361 b'export': (
362 _(b"exporting bookmark %s\n"),
362 _(b"exporting bookmark %s\n"),
363 _(b'exporting bookmark %s failed\n'),
363 _(b'exporting bookmark %s failed\n'),
364 ),
364 ),
365 b'delete': (
365 b'delete': (
366 _(b"deleting remote bookmark %s\n"),
366 _(b"deleting remote bookmark %s\n"),
367 _(b'deleting remote bookmark %s failed\n'),
367 _(b'deleting remote bookmark %s failed\n'),
368 ),
368 ),
369 }
369 }
370
370
371
371
372 def push(
372 def push(
373 repo,
373 repo,
374 remote,
374 remote,
375 force=False,
375 force=False,
376 revs=None,
376 revs=None,
377 newbranch=False,
377 newbranch=False,
378 bookmarks=(),
378 bookmarks=(),
379 publish=False,
379 publish=False,
380 opargs=None,
380 opargs=None,
381 ):
381 ):
382 """Push outgoing changesets (limited by revs) from a local
382 """Push outgoing changesets (limited by revs) from a local
383 repository to remote. Return an integer:
383 repository to remote. Return an integer:
384 - None means nothing to push
384 - None means nothing to push
385 - 0 means HTTP error
385 - 0 means HTTP error
386 - 1 means we pushed and remote head count is unchanged *or*
386 - 1 means we pushed and remote head count is unchanged *or*
387 we have outgoing changesets but refused to push
387 we have outgoing changesets but refused to push
388 - other values as described by addchangegroup()
388 - other values as described by addchangegroup()
389 """
389 """
390 if opargs is None:
390 if opargs is None:
391 opargs = {}
391 opargs = {}
392 pushop = pushoperation(
392 pushop = pushoperation(
393 repo,
393 repo,
394 remote,
394 remote,
395 force,
395 force,
396 revs,
396 revs,
397 newbranch,
397 newbranch,
398 bookmarks,
398 bookmarks,
399 publish,
399 publish,
400 **pycompat.strkwargs(opargs)
400 **pycompat.strkwargs(opargs)
401 )
401 )
402 if pushop.remote.local():
402 if pushop.remote.local():
403 missing = (
403 missing = (
404 set(pushop.repo.requirements) - pushop.remote.local().supported
404 set(pushop.repo.requirements) - pushop.remote.local().supported
405 )
405 )
406 if missing:
406 if missing:
407 msg = _(
407 msg = _(
408 b"required features are not"
408 b"required features are not"
409 b" supported in the destination:"
409 b" supported in the destination:"
410 b" %s"
410 b" %s"
411 ) % (b', '.join(sorted(missing)))
411 ) % (b', '.join(sorted(missing)))
412 raise error.Abort(msg)
412 raise error.Abort(msg)
413
413
414 if not pushop.remote.canpush():
414 if not pushop.remote.canpush():
415 raise error.Abort(_(b"destination does not support push"))
415 raise error.Abort(_(b"destination does not support push"))
416
416
417 if not pushop.remote.capable(b'unbundle'):
417 if not pushop.remote.capable(b'unbundle'):
418 raise error.Abort(
418 raise error.Abort(
419 _(
419 _(
420 b'cannot push: destination does not support the '
420 b'cannot push: destination does not support the '
421 b'unbundle wire protocol command'
421 b'unbundle wire protocol command'
422 )
422 )
423 )
423 )
424 for category in sorted(bundle2.read_remote_wanted_sidedata(pushop.remote)):
424 for category in sorted(bundle2.read_remote_wanted_sidedata(pushop.remote)):
425 # Check that a computer is registered for that category for at least
425 # Check that a computer is registered for that category for at least
426 # one revlog kind.
426 # one revlog kind.
427 for kind, computers in repo._sidedata_computers.items():
427 for kind, computers in repo._sidedata_computers.items():
428 if computers.get(category):
428 if computers.get(category):
429 break
429 break
430 else:
430 else:
431 raise error.Abort(
431 raise error.Abort(
432 _(
432 _(
433 b'cannot push: required sidedata category not supported'
433 b'cannot push: required sidedata category not supported'
434 b" by this client: '%s'"
434 b" by this client: '%s'"
435 )
435 )
436 % pycompat.bytestr(category)
436 % pycompat.bytestr(category)
437 )
437 )
438 # get lock as we might write phase data
438 # get lock as we might write phase data
439 wlock = lock = None
439 wlock = lock = None
440 try:
440 try:
441 # bundle2 push may receive a reply bundle touching bookmarks
441 # bundle2 push may receive a reply bundle touching bookmarks
442 # requiring the wlock. Take it now to ensure proper ordering.
442 # requiring the wlock. Take it now to ensure proper ordering.
443 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
443 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
444 if (
444 if (
445 (not _forcebundle1(pushop))
445 (not _forcebundle1(pushop))
446 and maypushback
446 and maypushback
447 and not bookmod.bookmarksinstore(repo)
447 and not bookmod.bookmarksinstore(repo)
448 ):
448 ):
449 wlock = pushop.repo.wlock()
449 wlock = pushop.repo.wlock()
450 lock = pushop.repo.lock()
450 lock = pushop.repo.lock()
451 pushop.trmanager = transactionmanager(
451 pushop.trmanager = transactionmanager(
452 pushop.repo, b'push-response', pushop.remote.url()
452 pushop.repo, b'push-response', pushop.remote.url()
453 )
453 )
454 except error.LockUnavailable as err:
454 except error.LockUnavailable as err:
455 # source repo cannot be locked.
455 # source repo cannot be locked.
456 # We do not abort the push, but just disable the local phase
456 # We do not abort the push, but just disable the local phase
457 # synchronisation.
457 # synchronisation.
458 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
458 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
459 err
459 err
460 )
460 )
461 pushop.ui.debug(msg)
461 pushop.ui.debug(msg)
462
462
463 with wlock or util.nullcontextmanager():
463 with wlock or util.nullcontextmanager():
464 with lock or util.nullcontextmanager():
464 with lock or util.nullcontextmanager():
465 with pushop.trmanager or util.nullcontextmanager():
465 with pushop.trmanager or util.nullcontextmanager():
466 pushop.repo.checkpush(pushop)
466 pushop.repo.checkpush(pushop)
467 _checkpublish(pushop)
467 _checkpublish(pushop)
468 _pushdiscovery(pushop)
468 _pushdiscovery(pushop)
469 if not pushop.force:
469 if not pushop.force:
470 _checksubrepostate(pushop)
470 _checksubrepostate(pushop)
471 if not _forcebundle1(pushop):
471 if not _forcebundle1(pushop):
472 _pushbundle2(pushop)
472 _pushbundle2(pushop)
473 _pushchangeset(pushop)
473 _pushchangeset(pushop)
474 _pushsyncphase(pushop)
474 _pushsyncphase(pushop)
475 _pushobsolete(pushop)
475 _pushobsolete(pushop)
476 _pushbookmark(pushop)
476 _pushbookmark(pushop)
477
477
478 if repo.ui.configbool(b'experimental', b'remotenames'):
478 if repo.ui.configbool(b'experimental', b'remotenames'):
479 logexchange.pullremotenames(repo, remote)
479 logexchange.pullremotenames(repo, remote)
480
480
481 return pushop
481 return pushop
482
482
483
483
484 # list of steps to perform discovery before push
484 # list of steps to perform discovery before push
485 pushdiscoveryorder = []
485 pushdiscoveryorder = []
486
486
487 # Mapping between step name and function
487 # Mapping between step name and function
488 #
488 #
489 # This exists to help extensions wrap steps if necessary
489 # This exists to help extensions wrap steps if necessary
490 pushdiscoverymapping = {}
490 pushdiscoverymapping = {}
491
491
492
492
493 def pushdiscovery(stepname):
493 def pushdiscovery(stepname):
494 """decorator for function performing discovery before push
494 """decorator for function performing discovery before push
495
495
496 The function is added to the step -> function mapping and appended to the
496 The function is added to the step -> function mapping and appended to the
497 list of steps. Beware that decorated function will be added in order (this
497 list of steps. Beware that decorated function will be added in order (this
498 may matter).
498 may matter).
499
499
500 You can only use this decorator for a new step, if you want to wrap a step
500 You can only use this decorator for a new step, if you want to wrap a step
501 from an extension, change the pushdiscovery dictionary directly."""
501 from an extension, change the pushdiscovery dictionary directly."""
502
502
503 def dec(func):
503 def dec(func):
504 assert stepname not in pushdiscoverymapping
504 assert stepname not in pushdiscoverymapping
505 pushdiscoverymapping[stepname] = func
505 pushdiscoverymapping[stepname] = func
506 pushdiscoveryorder.append(stepname)
506 pushdiscoveryorder.append(stepname)
507 return func
507 return func
508
508
509 return dec
509 return dec
510
510
511
511
512 def _pushdiscovery(pushop):
512 def _pushdiscovery(pushop):
513 """Run all discovery steps"""
513 """Run all discovery steps"""
514 for stepname in pushdiscoveryorder:
514 for stepname in pushdiscoveryorder:
515 step = pushdiscoverymapping[stepname]
515 step = pushdiscoverymapping[stepname]
516 step(pushop)
516 step(pushop)
517
517
518
518
519 def _checksubrepostate(pushop):
519 def _checksubrepostate(pushop):
520 """Ensure all outgoing referenced subrepo revisions are present locally"""
520 """Ensure all outgoing referenced subrepo revisions are present locally"""
521 for n in pushop.outgoing.missing:
521 for n in pushop.outgoing.missing:
522 ctx = pushop.repo[n]
522 ctx = pushop.repo[n]
523
523
524 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
524 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
525 for subpath in sorted(ctx.substate):
525 for subpath in sorted(ctx.substate):
526 sub = ctx.sub(subpath)
526 sub = ctx.sub(subpath)
527 sub.verify(onpush=True)
527 sub.verify(onpush=True)
528
528
529
529
530 @pushdiscovery(b'changeset')
530 @pushdiscovery(b'changeset')
531 def _pushdiscoverychangeset(pushop):
531 def _pushdiscoverychangeset(pushop):
532 """discover the changeset that need to be pushed"""
532 """discover the changeset that need to be pushed"""
533 fci = discovery.findcommonincoming
533 fci = discovery.findcommonincoming
534 if pushop.revs:
534 if pushop.revs:
535 commoninc = fci(
535 commoninc = fci(
536 pushop.repo,
536 pushop.repo,
537 pushop.remote,
537 pushop.remote,
538 force=pushop.force,
538 force=pushop.force,
539 ancestorsof=pushop.revs,
539 ancestorsof=pushop.revs,
540 )
540 )
541 else:
541 else:
542 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
542 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
543 common, inc, remoteheads = commoninc
543 common, inc, remoteheads = commoninc
544 fco = discovery.findcommonoutgoing
544 fco = discovery.findcommonoutgoing
545 outgoing = fco(
545 outgoing = fco(
546 pushop.repo,
546 pushop.repo,
547 pushop.remote,
547 pushop.remote,
548 onlyheads=pushop.revs,
548 onlyheads=pushop.revs,
549 commoninc=commoninc,
549 commoninc=commoninc,
550 force=pushop.force,
550 force=pushop.force,
551 )
551 )
552 pushop.outgoing = outgoing
552 pushop.outgoing = outgoing
553 pushop.remoteheads = remoteheads
553 pushop.remoteheads = remoteheads
554 pushop.incoming = inc
554 pushop.incoming = inc
555
555
556
556
557 @pushdiscovery(b'phase')
557 @pushdiscovery(b'phase')
558 def _pushdiscoveryphase(pushop):
558 def _pushdiscoveryphase(pushop):
559 """discover the phase that needs to be pushed
559 """discover the phase that needs to be pushed
560
560
561 (computed for both success and failure case for changesets push)"""
561 (computed for both success and failure case for changesets push)"""
562 outgoing = pushop.outgoing
562 outgoing = pushop.outgoing
563 unfi = pushop.repo.unfiltered()
563 unfi = pushop.repo.unfiltered()
564 remotephases = listkeys(pushop.remote, b'phases')
564 remotephases = listkeys(pushop.remote, b'phases')
565
565
566 if (
566 if (
567 pushop.ui.configbool(b'ui', b'_usedassubrepo')
567 pushop.ui.configbool(b'ui', b'_usedassubrepo')
568 and remotephases # server supports phases
568 and remotephases # server supports phases
569 and not pushop.outgoing.missing # no changesets to be pushed
569 and not pushop.outgoing.missing # no changesets to be pushed
570 and remotephases.get(b'publishing', False)
570 and remotephases.get(b'publishing', False)
571 ):
571 ):
572 # When:
572 # When:
573 # - this is a subrepo push
573 # - this is a subrepo push
574 # - and remote support phase
574 # - and remote support phase
575 # - and no changeset are to be pushed
575 # - and no changeset are to be pushed
576 # - and remote is publishing
576 # - and remote is publishing
577 # We may be in issue 3781 case!
577 # We may be in issue 3781 case!
578 # We drop the possible phase synchronisation done by
578 # We drop the possible phase synchronisation done by
579 # courtesy to publish changesets possibly locally draft
579 # courtesy to publish changesets possibly locally draft
580 # on the remote.
580 # on the remote.
581 pushop.outdatedphases = []
581 pushop.outdatedphases = []
582 pushop.fallbackoutdatedphases = []
582 pushop.fallbackoutdatedphases = []
583 return
583 return
584
584
585 pushop.remotephases = phases.remotephasessummary(
585 pushop.remotephases = phases.remotephasessummary(
586 pushop.repo, pushop.fallbackheads, remotephases
586 pushop.repo, pushop.fallbackheads, remotephases
587 )
587 )
588 droots = pushop.remotephases.draftroots
588 droots = pushop.remotephases.draftroots
589
589
590 extracond = b''
590 extracond = b''
591 if not pushop.remotephases.publishing:
591 if not pushop.remotephases.publishing:
592 extracond = b' and public()'
592 extracond = b' and public()'
593 revset = b'heads((%%ln::%%ln) %s)' % extracond
593 revset = b'heads((%%ln::%%ln) %s)' % extracond
594 # Get the list of all revs draft on remote by public here.
594 # Get the list of all revs draft on remote by public here.
595 # XXX Beware that revset break if droots is not strictly
595 # XXX Beware that revset break if droots is not strictly
596 # XXX root we may want to ensure it is but it is costly
596 # XXX root we may want to ensure it is but it is costly
597 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
597 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
598 if not pushop.remotephases.publishing and pushop.publish:
598 if not pushop.remotephases.publishing and pushop.publish:
599 future = list(
599 future = list(
600 unfi.set(
600 unfi.set(
601 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
601 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
602 )
602 )
603 )
603 )
604 elif not outgoing.missing:
604 elif not outgoing.missing:
605 future = fallback
605 future = fallback
606 else:
606 else:
607 # adds changeset we are going to push as draft
607 # adds changeset we are going to push as draft
608 #
608 #
609 # should not be necessary for publishing server, but because of an
609 # should not be necessary for publishing server, but because of an
610 # issue fixed in xxxxx we have to do it anyway.
610 # issue fixed in xxxxx we have to do it anyway.
611 fdroots = list(
611 fdroots = list(
612 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
612 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
613 )
613 )
614 fdroots = [f.node() for f in fdroots]
614 fdroots = [f.node() for f in fdroots]
615 future = list(unfi.set(revset, fdroots, pushop.futureheads))
615 future = list(unfi.set(revset, fdroots, pushop.futureheads))
616 pushop.outdatedphases = future
616 pushop.outdatedphases = future
617 pushop.fallbackoutdatedphases = fallback
617 pushop.fallbackoutdatedphases = fallback
618
618
619
619
620 @pushdiscovery(b'obsmarker')
620 @pushdiscovery(b'obsmarker')
621 def _pushdiscoveryobsmarkers(pushop):
621 def _pushdiscoveryobsmarkers(pushop):
622 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
622 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
623 return
623 return
624
624
625 if not pushop.repo.obsstore:
625 if not pushop.repo.obsstore:
626 return
626 return
627
627
628 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
628 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
629 return
629 return
630
630
631 repo = pushop.repo
631 repo = pushop.repo
632 # very naive computation, that can be quite expensive on big repo.
632 # very naive computation, that can be quite expensive on big repo.
633 # However: evolution is currently slow on them anyway.
633 # However: evolution is currently slow on them anyway.
634 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
634 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
635 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
635 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
636
636
637
637
638 @pushdiscovery(b'bookmarks')
638 @pushdiscovery(b'bookmarks')
639 def _pushdiscoverybookmarks(pushop):
639 def _pushdiscoverybookmarks(pushop):
640 ui = pushop.ui
640 ui = pushop.ui
641 repo = pushop.repo.unfiltered()
641 repo = pushop.repo.unfiltered()
642 remote = pushop.remote
642 remote = pushop.remote
643 ui.debug(b"checking for updated bookmarks\n")
643 ui.debug(b"checking for updated bookmarks\n")
644 ancestors = ()
644 ancestors = ()
645 if pushop.revs:
645 if pushop.revs:
646 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
646 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
647 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
647 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
648
648
649 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
649 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
650
650
651 explicit = {
651 explicit = {
652 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
652 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
653 }
653 }
654
654
655 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
655 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
656 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
656 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
657
657
658
658
659 def _processcompared(pushop, pushed, explicit, remotebms, comp):
659 def _processcompared(pushop, pushed, explicit, remotebms, comp):
660 """take decision on bookmarks to push to the remote repo
660 """take decision on bookmarks to push to the remote repo
661
661
662 Exists to help extensions alter this behavior.
662 Exists to help extensions alter this behavior.
663 """
663 """
664 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
664 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
665
665
666 repo = pushop.repo
666 repo = pushop.repo
667
667
668 for b, scid, dcid in advsrc:
668 for b, scid, dcid in advsrc:
669 if b in explicit:
669 if b in explicit:
670 explicit.remove(b)
670 explicit.remove(b)
671 if not pushed or repo[scid].rev() in pushed:
671 if not pushed or repo[scid].rev() in pushed:
672 pushop.outbookmarks.append((b, dcid, scid))
672 pushop.outbookmarks.append((b, dcid, scid))
673 # search added bookmark
673 # search added bookmark
674 for b, scid, dcid in addsrc:
674 for b, scid, dcid in addsrc:
675 if b in explicit:
675 if b in explicit:
676 explicit.remove(b)
676 explicit.remove(b)
677 if bookmod.isdivergent(b):
677 if bookmod.isdivergent(b):
678 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
678 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
679 pushop.bkresult = 2
679 pushop.bkresult = 2
680 else:
680 else:
681 pushop.outbookmarks.append((b, b'', scid))
681 pushop.outbookmarks.append((b, b'', scid))
682 # search for overwritten bookmark
682 # search for overwritten bookmark
683 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
683 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
684 if b in explicit:
684 if b in explicit:
685 explicit.remove(b)
685 explicit.remove(b)
686 pushop.outbookmarks.append((b, dcid, scid))
686 pushop.outbookmarks.append((b, dcid, scid))
687 # search for bookmark to delete
687 # search for bookmark to delete
688 for b, scid, dcid in adddst:
688 for b, scid, dcid in adddst:
689 if b in explicit:
689 if b in explicit:
690 explicit.remove(b)
690 explicit.remove(b)
691 # treat as "deleted locally"
691 # treat as "deleted locally"
692 pushop.outbookmarks.append((b, dcid, b''))
692 pushop.outbookmarks.append((b, dcid, b''))
693 # identical bookmarks shouldn't get reported
693 # identical bookmarks shouldn't get reported
694 for b, scid, dcid in same:
694 for b, scid, dcid in same:
695 if b in explicit:
695 if b in explicit:
696 explicit.remove(b)
696 explicit.remove(b)
697
697
698 if explicit:
698 if explicit:
699 explicit = sorted(explicit)
699 explicit = sorted(explicit)
700 # we should probably list all of them
700 # we should probably list all of them
701 pushop.ui.warn(
701 pushop.ui.warn(
702 _(
702 _(
703 b'bookmark %s does not exist on the local '
703 b'bookmark %s does not exist on the local '
704 b'or remote repository!\n'
704 b'or remote repository!\n'
705 )
705 )
706 % explicit[0]
706 % explicit[0]
707 )
707 )
708 pushop.bkresult = 2
708 pushop.bkresult = 2
709
709
710 pushop.outbookmarks.sort()
710 pushop.outbookmarks.sort()
711
711
712
712
713 def _pushcheckoutgoing(pushop):
713 def _pushcheckoutgoing(pushop):
714 outgoing = pushop.outgoing
714 outgoing = pushop.outgoing
715 unfi = pushop.repo.unfiltered()
715 unfi = pushop.repo.unfiltered()
716 if not outgoing.missing:
716 if not outgoing.missing:
717 # nothing to push
717 # nothing to push
718 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
718 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
719 return False
719 return False
720 # something to push
720 # something to push
721 if not pushop.force:
721 if not pushop.force:
722 # if repo.obsstore == False --> no obsolete
722 # if repo.obsstore == False --> no obsolete
723 # then, save the iteration
723 # then, save the iteration
724 if unfi.obsstore:
724 if unfi.obsstore:
725 # this message are here for 80 char limit reason
725 # this message are here for 80 char limit reason
726 mso = _(b"push includes obsolete changeset: %s!")
726 mso = _(b"push includes obsolete changeset: %s!")
727 mspd = _(b"push includes phase-divergent changeset: %s!")
727 mspd = _(b"push includes phase-divergent changeset: %s!")
728 mscd = _(b"push includes content-divergent changeset: %s!")
728 mscd = _(b"push includes content-divergent changeset: %s!")
729 mst = {
729 mst = {
730 b"orphan": _(b"push includes orphan changeset: %s!"),
730 b"orphan": _(b"push includes orphan changeset: %s!"),
731 b"phase-divergent": mspd,
731 b"phase-divergent": mspd,
732 b"content-divergent": mscd,
732 b"content-divergent": mscd,
733 }
733 }
734 # If we are to push if there is at least one
734 # If we are to push if there is at least one
735 # obsolete or unstable changeset in missing, at
735 # obsolete or unstable changeset in missing, at
736 # least one of the missinghead will be obsolete or
736 # least one of the missinghead will be obsolete or
737 # unstable. So checking heads only is ok
737 # unstable. So checking heads only is ok
738 for node in outgoing.ancestorsof:
738 for node in outgoing.ancestorsof:
739 ctx = unfi[node]
739 ctx = unfi[node]
740 if ctx.obsolete():
740 if ctx.obsolete():
741 raise error.Abort(mso % ctx)
741 raise error.Abort(mso % ctx)
742 elif ctx.isunstable():
742 elif ctx.isunstable():
743 # TODO print more than one instability in the abort
743 # TODO print more than one instability in the abort
744 # message
744 # message
745 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
745 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
746
746
747 discovery.checkheads(pushop)
747 discovery.checkheads(pushop)
748 return True
748 return True
749
749
750
750
751 # List of names of steps to perform for an outgoing bundle2, order matters.
751 # List of names of steps to perform for an outgoing bundle2, order matters.
752 b2partsgenorder = []
752 b2partsgenorder = []
753
753
754 # Mapping between step name and function
754 # Mapping between step name and function
755 #
755 #
756 # This exists to help extensions wrap steps if necessary
756 # This exists to help extensions wrap steps if necessary
757 b2partsgenmapping = {}
757 b2partsgenmapping = {}
758
758
759
759
760 def b2partsgenerator(stepname, idx=None):
760 def b2partsgenerator(stepname, idx=None):
761 """decorator for function generating bundle2 part
761 """decorator for function generating bundle2 part
762
762
763 The function is added to the step -> function mapping and appended to the
763 The function is added to the step -> function mapping and appended to the
764 list of steps. Beware that decorated functions will be added in order
764 list of steps. Beware that decorated functions will be added in order
765 (this may matter).
765 (this may matter).
766
766
767 You can only use this decorator for new steps, if you want to wrap a step
767 You can only use this decorator for new steps, if you want to wrap a step
768 from an extension, attack the b2partsgenmapping dictionary directly."""
768 from an extension, attack the b2partsgenmapping dictionary directly."""
769
769
770 def dec(func):
770 def dec(func):
771 assert stepname not in b2partsgenmapping
771 assert stepname not in b2partsgenmapping
772 b2partsgenmapping[stepname] = func
772 b2partsgenmapping[stepname] = func
773 if idx is None:
773 if idx is None:
774 b2partsgenorder.append(stepname)
774 b2partsgenorder.append(stepname)
775 else:
775 else:
776 b2partsgenorder.insert(idx, stepname)
776 b2partsgenorder.insert(idx, stepname)
777 return func
777 return func
778
778
779 return dec
779 return dec
780
780
781
781
782 def _pushb2ctxcheckheads(pushop, bundler):
782 def _pushb2ctxcheckheads(pushop, bundler):
783 """Generate race condition checking parts
783 """Generate race condition checking parts
784
784
785 Exists as an independent function to aid extensions
785 Exists as an independent function to aid extensions
786 """
786 """
787 # * 'force' do not check for push race,
787 # * 'force' do not check for push race,
788 # * if we don't push anything, there are nothing to check.
788 # * if we don't push anything, there are nothing to check.
789 if not pushop.force and pushop.outgoing.ancestorsof:
789 if not pushop.force and pushop.outgoing.ancestorsof:
790 allowunrelated = b'related' in bundler.capabilities.get(
790 allowunrelated = b'related' in bundler.capabilities.get(
791 b'checkheads', ()
791 b'checkheads', ()
792 )
792 )
793 emptyremote = pushop.pushbranchmap is None
793 emptyremote = pushop.pushbranchmap is None
794 if not allowunrelated or emptyremote:
794 if not allowunrelated or emptyremote:
795 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
795 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
796 else:
796 else:
797 affected = set()
797 affected = set()
798 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
798 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
799 remoteheads, newheads, unsyncedheads, discardedheads = heads
799 remoteheads, newheads, unsyncedheads, discardedheads = heads
800 if remoteheads is not None:
800 if remoteheads is not None:
801 remote = set(remoteheads)
801 remote = set(remoteheads)
802 affected |= set(discardedheads) & remote
802 affected |= set(discardedheads) & remote
803 affected |= remote - set(newheads)
803 affected |= remote - set(newheads)
804 if affected:
804 if affected:
805 data = iter(sorted(affected))
805 data = iter(sorted(affected))
806 bundler.newpart(b'check:updated-heads', data=data)
806 bundler.newpart(b'check:updated-heads', data=data)
807
807
808
808
809 def _pushing(pushop):
809 def _pushing(pushop):
810 """return True if we are pushing anything"""
810 """return True if we are pushing anything"""
811 return bool(
811 return bool(
812 pushop.outgoing.missing
812 pushop.outgoing.missing
813 or pushop.outdatedphases
813 or pushop.outdatedphases
814 or pushop.outobsmarkers
814 or pushop.outobsmarkers
815 or pushop.outbookmarks
815 or pushop.outbookmarks
816 )
816 )
817
817
818
818
819 @b2partsgenerator(b'check-bookmarks')
819 @b2partsgenerator(b'check-bookmarks')
820 def _pushb2checkbookmarks(pushop, bundler):
820 def _pushb2checkbookmarks(pushop, bundler):
821 """insert bookmark move checking"""
821 """insert bookmark move checking"""
822 if not _pushing(pushop) or pushop.force:
822 if not _pushing(pushop) or pushop.force:
823 return
823 return
824 b2caps = bundle2.bundle2caps(pushop.remote)
824 b2caps = bundle2.bundle2caps(pushop.remote)
825 hasbookmarkcheck = b'bookmarks' in b2caps
825 hasbookmarkcheck = b'bookmarks' in b2caps
826 if not (pushop.outbookmarks and hasbookmarkcheck):
826 if not (pushop.outbookmarks and hasbookmarkcheck):
827 return
827 return
828 data = []
828 data = []
829 for book, old, new in pushop.outbookmarks:
829 for book, old, new in pushop.outbookmarks:
830 data.append((book, old))
830 data.append((book, old))
831 checkdata = bookmod.binaryencode(pushop.repo, data)
831 checkdata = bookmod.binaryencode(pushop.repo, data)
832 bundler.newpart(b'check:bookmarks', data=checkdata)
832 bundler.newpart(b'check:bookmarks', data=checkdata)
833
833
834
834
835 @b2partsgenerator(b'check-phases')
835 @b2partsgenerator(b'check-phases')
836 def _pushb2checkphases(pushop, bundler):
836 def _pushb2checkphases(pushop, bundler):
837 """insert phase move checking"""
837 """insert phase move checking"""
838 if not _pushing(pushop) or pushop.force:
838 if not _pushing(pushop) or pushop.force:
839 return
839 return
840 b2caps = bundle2.bundle2caps(pushop.remote)
840 b2caps = bundle2.bundle2caps(pushop.remote)
841 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
841 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
842 if pushop.remotephases is not None and hasphaseheads:
842 if pushop.remotephases is not None and hasphaseheads:
843 # check that the remote phase has not changed
843 # check that the remote phase has not changed
844 checks = {p: [] for p in phases.allphases}
844 checks = {p: [] for p in phases.allphases}
845 checks[phases.public].extend(pushop.remotephases.publicheads)
845 checks[phases.public].extend(pushop.remotephases.publicheads)
846 checks[phases.draft].extend(pushop.remotephases.draftroots)
846 checks[phases.draft].extend(pushop.remotephases.draftroots)
847 if any(pycompat.itervalues(checks)):
847 if any(pycompat.itervalues(checks)):
848 for phase in checks:
848 for phase in checks:
849 checks[phase].sort()
849 checks[phase].sort()
850 checkdata = phases.binaryencode(checks)
850 checkdata = phases.binaryencode(checks)
851 bundler.newpart(b'check:phases', data=checkdata)
851 bundler.newpart(b'check:phases', data=checkdata)
852
852
853
853
854 @b2partsgenerator(b'changeset')
854 @b2partsgenerator(b'changeset')
855 def _pushb2ctx(pushop, bundler):
855 def _pushb2ctx(pushop, bundler):
856 """handle changegroup push through bundle2
856 """handle changegroup push through bundle2
857
857
858 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
858 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
859 """
859 """
860 if b'changesets' in pushop.stepsdone:
860 if b'changesets' in pushop.stepsdone:
861 return
861 return
862 pushop.stepsdone.add(b'changesets')
862 pushop.stepsdone.add(b'changesets')
863 # Send known heads to the server for race detection.
863 # Send known heads to the server for race detection.
864 if not _pushcheckoutgoing(pushop):
864 if not _pushcheckoutgoing(pushop):
865 return
865 return
866 pushop.repo.prepushoutgoinghooks(pushop)
866 pushop.repo.prepushoutgoinghooks(pushop)
867
867
868 _pushb2ctxcheckheads(pushop, bundler)
868 _pushb2ctxcheckheads(pushop, bundler)
869
869
870 b2caps = bundle2.bundle2caps(pushop.remote)
870 b2caps = bundle2.bundle2caps(pushop.remote)
871 version = b'01'
871 version = b'01'
872 cgversions = b2caps.get(b'changegroup')
872 cgversions = b2caps.get(b'changegroup')
873 if cgversions: # 3.1 and 3.2 ship with an empty value
873 if cgversions: # 3.1 and 3.2 ship with an empty value
874 cgversions = [
874 cgversions = [
875 v
875 v
876 for v in cgversions
876 for v in cgversions
877 if v in changegroup.supportedoutgoingversions(pushop.repo)
877 if v in changegroup.supportedoutgoingversions(pushop.repo)
878 ]
878 ]
879 if not cgversions:
879 if not cgversions:
880 raise error.Abort(_(b'no common changegroup version'))
880 raise error.Abort(_(b'no common changegroup version'))
881 version = max(cgversions)
881 version = max(cgversions)
882
882
883 remote_sidedata = bundle2.read_remote_wanted_sidedata(pushop.remote)
883 remote_sidedata = bundle2.read_remote_wanted_sidedata(pushop.remote)
884 cgstream = changegroup.makestream(
884 cgstream = changegroup.makestream(
885 pushop.repo,
885 pushop.repo,
886 pushop.outgoing,
886 pushop.outgoing,
887 version,
887 version,
888 b'push',
888 b'push',
889 bundlecaps=b2caps,
889 bundlecaps=b2caps,
890 remote_sidedata=remote_sidedata,
890 remote_sidedata=remote_sidedata,
891 )
891 )
892 cgpart = bundler.newpart(b'changegroup', data=cgstream)
892 cgpart = bundler.newpart(b'changegroup', data=cgstream)
893 if cgversions:
893 if cgversions:
894 cgpart.addparam(b'version', version)
894 cgpart.addparam(b'version', version)
895 if scmutil.istreemanifest(pushop.repo):
895 if scmutil.istreemanifest(pushop.repo):
896 cgpart.addparam(b'treemanifest', b'1')
896 cgpart.addparam(b'treemanifest', b'1')
897 if b'exp-sidedata-flag' in pushop.repo.requirements:
897 if b'exp-sidedata-flag' in pushop.repo.requirements:
898 cgpart.addparam(b'exp-sidedata', b'1')
898 cgpart.addparam(b'exp-sidedata', b'1')
899
899
900 def handlereply(op):
900 def handlereply(op):
901 """extract addchangegroup returns from server reply"""
901 """extract addchangegroup returns from server reply"""
902 cgreplies = op.records.getreplies(cgpart.id)
902 cgreplies = op.records.getreplies(cgpart.id)
903 assert len(cgreplies[b'changegroup']) == 1
903 assert len(cgreplies[b'changegroup']) == 1
904 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
904 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
905
905
906 return handlereply
906 return handlereply
907
907
908
908
909 @b2partsgenerator(b'phase')
909 @b2partsgenerator(b'phase')
910 def _pushb2phases(pushop, bundler):
910 def _pushb2phases(pushop, bundler):
911 """handle phase push through bundle2"""
911 """handle phase push through bundle2"""
912 if b'phases' in pushop.stepsdone:
912 if b'phases' in pushop.stepsdone:
913 return
913 return
914 b2caps = bundle2.bundle2caps(pushop.remote)
914 b2caps = bundle2.bundle2caps(pushop.remote)
915 ui = pushop.repo.ui
915 ui = pushop.repo.ui
916
916
917 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
917 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
918 haspushkey = b'pushkey' in b2caps
918 haspushkey = b'pushkey' in b2caps
919 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
919 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
920
920
921 if hasphaseheads and not legacyphase:
921 if hasphaseheads and not legacyphase:
922 return _pushb2phaseheads(pushop, bundler)
922 return _pushb2phaseheads(pushop, bundler)
923 elif haspushkey:
923 elif haspushkey:
924 return _pushb2phasespushkey(pushop, bundler)
924 return _pushb2phasespushkey(pushop, bundler)
925
925
926
926
927 def _pushb2phaseheads(pushop, bundler):
927 def _pushb2phaseheads(pushop, bundler):
928 """push phase information through a bundle2 - binary part"""
928 """push phase information through a bundle2 - binary part"""
929 pushop.stepsdone.add(b'phases')
929 pushop.stepsdone.add(b'phases')
930 if pushop.outdatedphases:
930 if pushop.outdatedphases:
931 updates = {p: [] for p in phases.allphases}
931 updates = {p: [] for p in phases.allphases}
932 updates[0].extend(h.node() for h in pushop.outdatedphases)
932 updates[0].extend(h.node() for h in pushop.outdatedphases)
933 phasedata = phases.binaryencode(updates)
933 phasedata = phases.binaryencode(updates)
934 bundler.newpart(b'phase-heads', data=phasedata)
934 bundler.newpart(b'phase-heads', data=phasedata)
935
935
936
936
937 def _pushb2phasespushkey(pushop, bundler):
937 def _pushb2phasespushkey(pushop, bundler):
938 """push phase information through a bundle2 - pushkey part"""
938 """push phase information through a bundle2 - pushkey part"""
939 pushop.stepsdone.add(b'phases')
939 pushop.stepsdone.add(b'phases')
940 part2node = []
940 part2node = []
941
941
942 def handlefailure(pushop, exc):
942 def handlefailure(pushop, exc):
943 targetid = int(exc.partid)
943 targetid = int(exc.partid)
944 for partid, node in part2node:
944 for partid, node in part2node:
945 if partid == targetid:
945 if partid == targetid:
946 raise error.Abort(_(b'updating %s to public failed') % node)
946 raise error.Abort(_(b'updating %s to public failed') % node)
947
947
948 enc = pushkey.encode
948 enc = pushkey.encode
949 for newremotehead in pushop.outdatedphases:
949 for newremotehead in pushop.outdatedphases:
950 part = bundler.newpart(b'pushkey')
950 part = bundler.newpart(b'pushkey')
951 part.addparam(b'namespace', enc(b'phases'))
951 part.addparam(b'namespace', enc(b'phases'))
952 part.addparam(b'key', enc(newremotehead.hex()))
952 part.addparam(b'key', enc(newremotehead.hex()))
953 part.addparam(b'old', enc(b'%d' % phases.draft))
953 part.addparam(b'old', enc(b'%d' % phases.draft))
954 part.addparam(b'new', enc(b'%d' % phases.public))
954 part.addparam(b'new', enc(b'%d' % phases.public))
955 part2node.append((part.id, newremotehead))
955 part2node.append((part.id, newremotehead))
956 pushop.pkfailcb[part.id] = handlefailure
956 pushop.pkfailcb[part.id] = handlefailure
957
957
958 def handlereply(op):
958 def handlereply(op):
959 for partid, node in part2node:
959 for partid, node in part2node:
960 partrep = op.records.getreplies(partid)
960 partrep = op.records.getreplies(partid)
961 results = partrep[b'pushkey']
961 results = partrep[b'pushkey']
962 assert len(results) <= 1
962 assert len(results) <= 1
963 msg = None
963 msg = None
964 if not results:
964 if not results:
965 msg = _(b'server ignored update of %s to public!\n') % node
965 msg = _(b'server ignored update of %s to public!\n') % node
966 elif not int(results[0][b'return']):
966 elif not int(results[0][b'return']):
967 msg = _(b'updating %s to public failed!\n') % node
967 msg = _(b'updating %s to public failed!\n') % node
968 if msg is not None:
968 if msg is not None:
969 pushop.ui.warn(msg)
969 pushop.ui.warn(msg)
970
970
971 return handlereply
971 return handlereply
972
972
973
973
974 @b2partsgenerator(b'obsmarkers')
974 @b2partsgenerator(b'obsmarkers')
975 def _pushb2obsmarkers(pushop, bundler):
975 def _pushb2obsmarkers(pushop, bundler):
976 if b'obsmarkers' in pushop.stepsdone:
976 if b'obsmarkers' in pushop.stepsdone:
977 return
977 return
978 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
978 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
979 if obsolete.commonversion(remoteversions) is None:
979 if obsolete.commonversion(remoteversions) is None:
980 return
980 return
981 pushop.stepsdone.add(b'obsmarkers')
981 pushop.stepsdone.add(b'obsmarkers')
982 if pushop.outobsmarkers:
982 if pushop.outobsmarkers:
983 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
983 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
984 bundle2.buildobsmarkerspart(bundler, markers)
984 bundle2.buildobsmarkerspart(bundler, markers)
985
985
986
986
987 @b2partsgenerator(b'bookmarks')
987 @b2partsgenerator(b'bookmarks')
988 def _pushb2bookmarks(pushop, bundler):
988 def _pushb2bookmarks(pushop, bundler):
989 """handle bookmark push through bundle2"""
989 """handle bookmark push through bundle2"""
990 if b'bookmarks' in pushop.stepsdone:
990 if b'bookmarks' in pushop.stepsdone:
991 return
991 return
992 b2caps = bundle2.bundle2caps(pushop.remote)
992 b2caps = bundle2.bundle2caps(pushop.remote)
993
993
994 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
994 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
995 legacybooks = b'bookmarks' in legacy
995 legacybooks = b'bookmarks' in legacy
996
996
997 if not legacybooks and b'bookmarks' in b2caps:
997 if not legacybooks and b'bookmarks' in b2caps:
998 return _pushb2bookmarkspart(pushop, bundler)
998 return _pushb2bookmarkspart(pushop, bundler)
999 elif b'pushkey' in b2caps:
999 elif b'pushkey' in b2caps:
1000 return _pushb2bookmarkspushkey(pushop, bundler)
1000 return _pushb2bookmarkspushkey(pushop, bundler)
1001
1001
1002
1002
1003 def _bmaction(old, new):
1003 def _bmaction(old, new):
1004 """small utility for bookmark pushing"""
1004 """small utility for bookmark pushing"""
1005 if not old:
1005 if not old:
1006 return b'export'
1006 return b'export'
1007 elif not new:
1007 elif not new:
1008 return b'delete'
1008 return b'delete'
1009 return b'update'
1009 return b'update'
1010
1010
1011
1011
1012 def _abortonsecretctx(pushop, node, b):
1012 def _abortonsecretctx(pushop, node, b):
1013 """abort if a given bookmark points to a secret changeset"""
1013 """abort if a given bookmark points to a secret changeset"""
1014 if node and pushop.repo[node].phase() == phases.secret:
1014 if node and pushop.repo[node].phase() == phases.secret:
1015 raise error.Abort(
1015 raise error.Abort(
1016 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1016 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1017 )
1017 )
1018
1018
1019
1019
1020 def _pushb2bookmarkspart(pushop, bundler):
1020 def _pushb2bookmarkspart(pushop, bundler):
1021 pushop.stepsdone.add(b'bookmarks')
1021 pushop.stepsdone.add(b'bookmarks')
1022 if not pushop.outbookmarks:
1022 if not pushop.outbookmarks:
1023 return
1023 return
1024
1024
1025 allactions = []
1025 allactions = []
1026 data = []
1026 data = []
1027 for book, old, new in pushop.outbookmarks:
1027 for book, old, new in pushop.outbookmarks:
1028 _abortonsecretctx(pushop, new, book)
1028 _abortonsecretctx(pushop, new, book)
1029 data.append((book, new))
1029 data.append((book, new))
1030 allactions.append((book, _bmaction(old, new)))
1030 allactions.append((book, _bmaction(old, new)))
1031 checkdata = bookmod.binaryencode(pushop.repo, data)
1031 checkdata = bookmod.binaryencode(pushop.repo, data)
1032 bundler.newpart(b'bookmarks', data=checkdata)
1032 bundler.newpart(b'bookmarks', data=checkdata)
1033
1033
1034 def handlereply(op):
1034 def handlereply(op):
1035 ui = pushop.ui
1035 ui = pushop.ui
1036 # if success
1036 # if success
1037 for book, action in allactions:
1037 for book, action in allactions:
1038 ui.status(bookmsgmap[action][0] % book)
1038 ui.status(bookmsgmap[action][0] % book)
1039
1039
1040 return handlereply
1040 return handlereply
1041
1041
1042
1042
1043 def _pushb2bookmarkspushkey(pushop, bundler):
1043 def _pushb2bookmarkspushkey(pushop, bundler):
1044 pushop.stepsdone.add(b'bookmarks')
1044 pushop.stepsdone.add(b'bookmarks')
1045 part2book = []
1045 part2book = []
1046 enc = pushkey.encode
1046 enc = pushkey.encode
1047
1047
1048 def handlefailure(pushop, exc):
1048 def handlefailure(pushop, exc):
1049 targetid = int(exc.partid)
1049 targetid = int(exc.partid)
1050 for partid, book, action in part2book:
1050 for partid, book, action in part2book:
1051 if partid == targetid:
1051 if partid == targetid:
1052 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1052 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1053 # we should not be called for part we did not generated
1053 # we should not be called for part we did not generated
1054 assert False
1054 assert False
1055
1055
1056 for book, old, new in pushop.outbookmarks:
1056 for book, old, new in pushop.outbookmarks:
1057 _abortonsecretctx(pushop, new, book)
1057 _abortonsecretctx(pushop, new, book)
1058 part = bundler.newpart(b'pushkey')
1058 part = bundler.newpart(b'pushkey')
1059 part.addparam(b'namespace', enc(b'bookmarks'))
1059 part.addparam(b'namespace', enc(b'bookmarks'))
1060 part.addparam(b'key', enc(book))
1060 part.addparam(b'key', enc(book))
1061 part.addparam(b'old', enc(hex(old)))
1061 part.addparam(b'old', enc(hex(old)))
1062 part.addparam(b'new', enc(hex(new)))
1062 part.addparam(b'new', enc(hex(new)))
1063 action = b'update'
1063 action = b'update'
1064 if not old:
1064 if not old:
1065 action = b'export'
1065 action = b'export'
1066 elif not new:
1066 elif not new:
1067 action = b'delete'
1067 action = b'delete'
1068 part2book.append((part.id, book, action))
1068 part2book.append((part.id, book, action))
1069 pushop.pkfailcb[part.id] = handlefailure
1069 pushop.pkfailcb[part.id] = handlefailure
1070
1070
1071 def handlereply(op):
1071 def handlereply(op):
1072 ui = pushop.ui
1072 ui = pushop.ui
1073 for partid, book, action in part2book:
1073 for partid, book, action in part2book:
1074 partrep = op.records.getreplies(partid)
1074 partrep = op.records.getreplies(partid)
1075 results = partrep[b'pushkey']
1075 results = partrep[b'pushkey']
1076 assert len(results) <= 1
1076 assert len(results) <= 1
1077 if not results:
1077 if not results:
1078 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1078 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1079 else:
1079 else:
1080 ret = int(results[0][b'return'])
1080 ret = int(results[0][b'return'])
1081 if ret:
1081 if ret:
1082 ui.status(bookmsgmap[action][0] % book)
1082 ui.status(bookmsgmap[action][0] % book)
1083 else:
1083 else:
1084 ui.warn(bookmsgmap[action][1] % book)
1084 ui.warn(bookmsgmap[action][1] % book)
1085 if pushop.bkresult is not None:
1085 if pushop.bkresult is not None:
1086 pushop.bkresult = 1
1086 pushop.bkresult = 1
1087
1087
1088 return handlereply
1088 return handlereply
1089
1089
1090
1090
1091 @b2partsgenerator(b'pushvars', idx=0)
1091 @b2partsgenerator(b'pushvars', idx=0)
1092 def _getbundlesendvars(pushop, bundler):
1092 def _getbundlesendvars(pushop, bundler):
1093 '''send shellvars via bundle2'''
1093 '''send shellvars via bundle2'''
1094 pushvars = pushop.pushvars
1094 pushvars = pushop.pushvars
1095 if pushvars:
1095 if pushvars:
1096 shellvars = {}
1096 shellvars = {}
1097 for raw in pushvars:
1097 for raw in pushvars:
1098 if b'=' not in raw:
1098 if b'=' not in raw:
1099 msg = (
1099 msg = (
1100 b"unable to parse variable '%s', should follow "
1100 b"unable to parse variable '%s', should follow "
1101 b"'KEY=VALUE' or 'KEY=' format"
1101 b"'KEY=VALUE' or 'KEY=' format"
1102 )
1102 )
1103 raise error.Abort(msg % raw)
1103 raise error.Abort(msg % raw)
1104 k, v = raw.split(b'=', 1)
1104 k, v = raw.split(b'=', 1)
1105 shellvars[k] = v
1105 shellvars[k] = v
1106
1106
1107 part = bundler.newpart(b'pushvars')
1107 part = bundler.newpart(b'pushvars')
1108
1108
1109 for key, value in pycompat.iteritems(shellvars):
1109 for key, value in pycompat.iteritems(shellvars):
1110 part.addparam(key, value, mandatory=False)
1110 part.addparam(key, value, mandatory=False)
1111
1111
1112
1112
1113 def _pushbundle2(pushop):
1113 def _pushbundle2(pushop):
1114 """push data to the remote using bundle2
1114 """push data to the remote using bundle2
1115
1115
1116 The only currently supported type of data is changegroup but this will
1116 The only currently supported type of data is changegroup but this will
1117 evolve in the future."""
1117 evolve in the future."""
1118 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1118 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1119 pushback = pushop.trmanager and pushop.ui.configbool(
1119 pushback = pushop.trmanager and pushop.ui.configbool(
1120 b'experimental', b'bundle2.pushback'
1120 b'experimental', b'bundle2.pushback'
1121 )
1121 )
1122
1122
1123 # create reply capability
1123 # create reply capability
1124 capsblob = bundle2.encodecaps(
1124 capsblob = bundle2.encodecaps(
1125 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1125 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1126 )
1126 )
1127 bundler.newpart(b'replycaps', data=capsblob)
1127 bundler.newpart(b'replycaps', data=capsblob)
1128 replyhandlers = []
1128 replyhandlers = []
1129 for partgenname in b2partsgenorder:
1129 for partgenname in b2partsgenorder:
1130 partgen = b2partsgenmapping[partgenname]
1130 partgen = b2partsgenmapping[partgenname]
1131 ret = partgen(pushop, bundler)
1131 ret = partgen(pushop, bundler)
1132 if callable(ret):
1132 if callable(ret):
1133 replyhandlers.append(ret)
1133 replyhandlers.append(ret)
1134 # do not push if nothing to push
1134 # do not push if nothing to push
1135 if bundler.nbparts <= 1:
1135 if bundler.nbparts <= 1:
1136 return
1136 return
1137 stream = util.chunkbuffer(bundler.getchunks())
1137 stream = util.chunkbuffer(bundler.getchunks())
1138 try:
1138 try:
1139 try:
1139 try:
1140 with pushop.remote.commandexecutor() as e:
1140 with pushop.remote.commandexecutor() as e:
1141 reply = e.callcommand(
1141 reply = e.callcommand(
1142 b'unbundle',
1142 b'unbundle',
1143 {
1143 {
1144 b'bundle': stream,
1144 b'bundle': stream,
1145 b'heads': [b'force'],
1145 b'heads': [b'force'],
1146 b'url': pushop.remote.url(),
1146 b'url': pushop.remote.url(),
1147 },
1147 },
1148 ).result()
1148 ).result()
1149 except error.BundleValueError as exc:
1149 except error.BundleValueError as exc:
1150 raise error.Abort(_(b'missing support for %s') % exc)
1150 raise error.RemoteError(_(b'missing support for %s') % exc)
1151 try:
1151 try:
1152 trgetter = None
1152 trgetter = None
1153 if pushback:
1153 if pushback:
1154 trgetter = pushop.trmanager.transaction
1154 trgetter = pushop.trmanager.transaction
1155 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1155 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1156 except error.BundleValueError as exc:
1156 except error.BundleValueError as exc:
1157 raise error.Abort(_(b'missing support for %s') % exc)
1157 raise error.RemoteError(_(b'missing support for %s') % exc)
1158 except bundle2.AbortFromPart as exc:
1158 except bundle2.AbortFromPart as exc:
1159 pushop.ui.error(_(b'remote: %s\n') % exc)
1159 pushop.ui.error(_(b'remote: %s\n') % exc)
1160 if exc.hint is not None:
1160 if exc.hint is not None:
1161 pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1161 pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1162 raise error.Abort(_(b'push failed on remote'))
1162 raise error.RemoteError(_(b'push failed on remote'))
1163 except error.PushkeyFailed as exc:
1163 except error.PushkeyFailed as exc:
1164 partid = int(exc.partid)
1164 partid = int(exc.partid)
1165 if partid not in pushop.pkfailcb:
1165 if partid not in pushop.pkfailcb:
1166 raise
1166 raise
1167 pushop.pkfailcb[partid](pushop, exc)
1167 pushop.pkfailcb[partid](pushop, exc)
1168 for rephand in replyhandlers:
1168 for rephand in replyhandlers:
1169 rephand(op)
1169 rephand(op)
1170
1170
1171
1171
1172 def _pushchangeset(pushop):
1172 def _pushchangeset(pushop):
1173 """Make the actual push of changeset bundle to remote repo"""
1173 """Make the actual push of changeset bundle to remote repo"""
1174 if b'changesets' in pushop.stepsdone:
1174 if b'changesets' in pushop.stepsdone:
1175 return
1175 return
1176 pushop.stepsdone.add(b'changesets')
1176 pushop.stepsdone.add(b'changesets')
1177 if not _pushcheckoutgoing(pushop):
1177 if not _pushcheckoutgoing(pushop):
1178 return
1178 return
1179
1179
1180 # Should have verified this in push().
1180 # Should have verified this in push().
1181 assert pushop.remote.capable(b'unbundle')
1181 assert pushop.remote.capable(b'unbundle')
1182
1182
1183 pushop.repo.prepushoutgoinghooks(pushop)
1183 pushop.repo.prepushoutgoinghooks(pushop)
1184 outgoing = pushop.outgoing
1184 outgoing = pushop.outgoing
1185 # TODO: get bundlecaps from remote
1185 # TODO: get bundlecaps from remote
1186 bundlecaps = None
1186 bundlecaps = None
1187 # create a changegroup from local
1187 # create a changegroup from local
1188 if pushop.revs is None and not (
1188 if pushop.revs is None and not (
1189 outgoing.excluded or pushop.repo.changelog.filteredrevs
1189 outgoing.excluded or pushop.repo.changelog.filteredrevs
1190 ):
1190 ):
1191 # push everything,
1191 # push everything,
1192 # use the fast path, no race possible on push
1192 # use the fast path, no race possible on push
1193 cg = changegroup.makechangegroup(
1193 cg = changegroup.makechangegroup(
1194 pushop.repo,
1194 pushop.repo,
1195 outgoing,
1195 outgoing,
1196 b'01',
1196 b'01',
1197 b'push',
1197 b'push',
1198 fastpath=True,
1198 fastpath=True,
1199 bundlecaps=bundlecaps,
1199 bundlecaps=bundlecaps,
1200 )
1200 )
1201 else:
1201 else:
1202 cg = changegroup.makechangegroup(
1202 cg = changegroup.makechangegroup(
1203 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1203 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1204 )
1204 )
1205
1205
1206 # apply changegroup to remote
1206 # apply changegroup to remote
1207 # local repo finds heads on server, finds out what
1207 # local repo finds heads on server, finds out what
1208 # revs it must push. once revs transferred, if server
1208 # revs it must push. once revs transferred, if server
1209 # finds it has different heads (someone else won
1209 # finds it has different heads (someone else won
1210 # commit/push race), server aborts.
1210 # commit/push race), server aborts.
1211 if pushop.force:
1211 if pushop.force:
1212 remoteheads = [b'force']
1212 remoteheads = [b'force']
1213 else:
1213 else:
1214 remoteheads = pushop.remoteheads
1214 remoteheads = pushop.remoteheads
1215 # ssh: return remote's addchangegroup()
1215 # ssh: return remote's addchangegroup()
1216 # http: return remote's addchangegroup() or 0 for error
1216 # http: return remote's addchangegroup() or 0 for error
1217 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1217 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1218
1218
1219
1219
1220 def _pushsyncphase(pushop):
1220 def _pushsyncphase(pushop):
1221 """synchronise phase information locally and remotely"""
1221 """synchronise phase information locally and remotely"""
1222 cheads = pushop.commonheads
1222 cheads = pushop.commonheads
1223 # even when we don't push, exchanging phase data is useful
1223 # even when we don't push, exchanging phase data is useful
1224 remotephases = listkeys(pushop.remote, b'phases')
1224 remotephases = listkeys(pushop.remote, b'phases')
1225 if (
1225 if (
1226 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1226 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1227 and remotephases # server supports phases
1227 and remotephases # server supports phases
1228 and pushop.cgresult is None # nothing was pushed
1228 and pushop.cgresult is None # nothing was pushed
1229 and remotephases.get(b'publishing', False)
1229 and remotephases.get(b'publishing', False)
1230 ):
1230 ):
1231 # When:
1231 # When:
1232 # - this is a subrepo push
1232 # - this is a subrepo push
1233 # - and remote support phase
1233 # - and remote support phase
1234 # - and no changeset was pushed
1234 # - and no changeset was pushed
1235 # - and remote is publishing
1235 # - and remote is publishing
1236 # We may be in issue 3871 case!
1236 # We may be in issue 3871 case!
1237 # We drop the possible phase synchronisation done by
1237 # We drop the possible phase synchronisation done by
1238 # courtesy to publish changesets possibly locally draft
1238 # courtesy to publish changesets possibly locally draft
1239 # on the remote.
1239 # on the remote.
1240 remotephases = {b'publishing': b'True'}
1240 remotephases = {b'publishing': b'True'}
1241 if not remotephases: # old server or public only reply from non-publishing
1241 if not remotephases: # old server or public only reply from non-publishing
1242 _localphasemove(pushop, cheads)
1242 _localphasemove(pushop, cheads)
1243 # don't push any phase data as there is nothing to push
1243 # don't push any phase data as there is nothing to push
1244 else:
1244 else:
1245 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1245 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1246 pheads, droots = ana
1246 pheads, droots = ana
1247 ### Apply remote phase on local
1247 ### Apply remote phase on local
1248 if remotephases.get(b'publishing', False):
1248 if remotephases.get(b'publishing', False):
1249 _localphasemove(pushop, cheads)
1249 _localphasemove(pushop, cheads)
1250 else: # publish = False
1250 else: # publish = False
1251 _localphasemove(pushop, pheads)
1251 _localphasemove(pushop, pheads)
1252 _localphasemove(pushop, cheads, phases.draft)
1252 _localphasemove(pushop, cheads, phases.draft)
1253 ### Apply local phase on remote
1253 ### Apply local phase on remote
1254
1254
1255 if pushop.cgresult:
1255 if pushop.cgresult:
1256 if b'phases' in pushop.stepsdone:
1256 if b'phases' in pushop.stepsdone:
1257 # phases already pushed though bundle2
1257 # phases already pushed though bundle2
1258 return
1258 return
1259 outdated = pushop.outdatedphases
1259 outdated = pushop.outdatedphases
1260 else:
1260 else:
1261 outdated = pushop.fallbackoutdatedphases
1261 outdated = pushop.fallbackoutdatedphases
1262
1262
1263 pushop.stepsdone.add(b'phases')
1263 pushop.stepsdone.add(b'phases')
1264
1264
1265 # filter heads already turned public by the push
1265 # filter heads already turned public by the push
1266 outdated = [c for c in outdated if c.node() not in pheads]
1266 outdated = [c for c in outdated if c.node() not in pheads]
1267 # fallback to independent pushkey command
1267 # fallback to independent pushkey command
1268 for newremotehead in outdated:
1268 for newremotehead in outdated:
1269 with pushop.remote.commandexecutor() as e:
1269 with pushop.remote.commandexecutor() as e:
1270 r = e.callcommand(
1270 r = e.callcommand(
1271 b'pushkey',
1271 b'pushkey',
1272 {
1272 {
1273 b'namespace': b'phases',
1273 b'namespace': b'phases',
1274 b'key': newremotehead.hex(),
1274 b'key': newremotehead.hex(),
1275 b'old': b'%d' % phases.draft,
1275 b'old': b'%d' % phases.draft,
1276 b'new': b'%d' % phases.public,
1276 b'new': b'%d' % phases.public,
1277 },
1277 },
1278 ).result()
1278 ).result()
1279
1279
1280 if not r:
1280 if not r:
1281 pushop.ui.warn(
1281 pushop.ui.warn(
1282 _(b'updating %s to public failed!\n') % newremotehead
1282 _(b'updating %s to public failed!\n') % newremotehead
1283 )
1283 )
1284
1284
1285
1285
1286 def _localphasemove(pushop, nodes, phase=phases.public):
1286 def _localphasemove(pushop, nodes, phase=phases.public):
1287 """move <nodes> to <phase> in the local source repo"""
1287 """move <nodes> to <phase> in the local source repo"""
1288 if pushop.trmanager:
1288 if pushop.trmanager:
1289 phases.advanceboundary(
1289 phases.advanceboundary(
1290 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1290 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1291 )
1291 )
1292 else:
1292 else:
1293 # repo is not locked, do not change any phases!
1293 # repo is not locked, do not change any phases!
1294 # Informs the user that phases should have been moved when
1294 # Informs the user that phases should have been moved when
1295 # applicable.
1295 # applicable.
1296 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1296 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1297 phasestr = phases.phasenames[phase]
1297 phasestr = phases.phasenames[phase]
1298 if actualmoves:
1298 if actualmoves:
1299 pushop.ui.status(
1299 pushop.ui.status(
1300 _(
1300 _(
1301 b'cannot lock source repo, skipping '
1301 b'cannot lock source repo, skipping '
1302 b'local %s phase update\n'
1302 b'local %s phase update\n'
1303 )
1303 )
1304 % phasestr
1304 % phasestr
1305 )
1305 )
1306
1306
1307
1307
1308 def _pushobsolete(pushop):
1308 def _pushobsolete(pushop):
1309 """utility function to push obsolete markers to a remote"""
1309 """utility function to push obsolete markers to a remote"""
1310 if b'obsmarkers' in pushop.stepsdone:
1310 if b'obsmarkers' in pushop.stepsdone:
1311 return
1311 return
1312 repo = pushop.repo
1312 repo = pushop.repo
1313 remote = pushop.remote
1313 remote = pushop.remote
1314 pushop.stepsdone.add(b'obsmarkers')
1314 pushop.stepsdone.add(b'obsmarkers')
1315 if pushop.outobsmarkers:
1315 if pushop.outobsmarkers:
1316 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1316 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1317 rslts = []
1317 rslts = []
1318 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1318 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1319 remotedata = obsolete._pushkeyescape(markers)
1319 remotedata = obsolete._pushkeyescape(markers)
1320 for key in sorted(remotedata, reverse=True):
1320 for key in sorted(remotedata, reverse=True):
1321 # reverse sort to ensure we end with dump0
1321 # reverse sort to ensure we end with dump0
1322 data = remotedata[key]
1322 data = remotedata[key]
1323 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1323 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1324 if [r for r in rslts if not r]:
1324 if [r for r in rslts if not r]:
1325 msg = _(b'failed to push some obsolete markers!\n')
1325 msg = _(b'failed to push some obsolete markers!\n')
1326 repo.ui.warn(msg)
1326 repo.ui.warn(msg)
1327
1327
1328
1328
1329 def _pushbookmark(pushop):
1329 def _pushbookmark(pushop):
1330 """Update bookmark position on remote"""
1330 """Update bookmark position on remote"""
1331 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1331 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1332 return
1332 return
1333 pushop.stepsdone.add(b'bookmarks')
1333 pushop.stepsdone.add(b'bookmarks')
1334 ui = pushop.ui
1334 ui = pushop.ui
1335 remote = pushop.remote
1335 remote = pushop.remote
1336
1336
1337 for b, old, new in pushop.outbookmarks:
1337 for b, old, new in pushop.outbookmarks:
1338 action = b'update'
1338 action = b'update'
1339 if not old:
1339 if not old:
1340 action = b'export'
1340 action = b'export'
1341 elif not new:
1341 elif not new:
1342 action = b'delete'
1342 action = b'delete'
1343
1343
1344 with remote.commandexecutor() as e:
1344 with remote.commandexecutor() as e:
1345 r = e.callcommand(
1345 r = e.callcommand(
1346 b'pushkey',
1346 b'pushkey',
1347 {
1347 {
1348 b'namespace': b'bookmarks',
1348 b'namespace': b'bookmarks',
1349 b'key': b,
1349 b'key': b,
1350 b'old': hex(old),
1350 b'old': hex(old),
1351 b'new': hex(new),
1351 b'new': hex(new),
1352 },
1352 },
1353 ).result()
1353 ).result()
1354
1354
1355 if r:
1355 if r:
1356 ui.status(bookmsgmap[action][0] % b)
1356 ui.status(bookmsgmap[action][0] % b)
1357 else:
1357 else:
1358 ui.warn(bookmsgmap[action][1] % b)
1358 ui.warn(bookmsgmap[action][1] % b)
1359 # discovery can have set the value form invalid entry
1359 # discovery can have set the value form invalid entry
1360 if pushop.bkresult is not None:
1360 if pushop.bkresult is not None:
1361 pushop.bkresult = 1
1361 pushop.bkresult = 1
1362
1362
1363
1363
1364 class pulloperation(object):
1364 class pulloperation(object):
1365 """A object that represent a single pull operation
1365 """A object that represent a single pull operation
1366
1366
1367 It purpose is to carry pull related state and very common operation.
1367 It purpose is to carry pull related state and very common operation.
1368
1368
1369 A new should be created at the beginning of each pull and discarded
1369 A new should be created at the beginning of each pull and discarded
1370 afterward.
1370 afterward.
1371 """
1371 """
1372
1372
1373 def __init__(
1373 def __init__(
1374 self,
1374 self,
1375 repo,
1375 repo,
1376 remote,
1376 remote,
1377 heads=None,
1377 heads=None,
1378 force=False,
1378 force=False,
1379 bookmarks=(),
1379 bookmarks=(),
1380 remotebookmarks=None,
1380 remotebookmarks=None,
1381 streamclonerequested=None,
1381 streamclonerequested=None,
1382 includepats=None,
1382 includepats=None,
1383 excludepats=None,
1383 excludepats=None,
1384 depth=None,
1384 depth=None,
1385 ):
1385 ):
1386 # repo we pull into
1386 # repo we pull into
1387 self.repo = repo
1387 self.repo = repo
1388 # repo we pull from
1388 # repo we pull from
1389 self.remote = remote
1389 self.remote = remote
1390 # revision we try to pull (None is "all")
1390 # revision we try to pull (None is "all")
1391 self.heads = heads
1391 self.heads = heads
1392 # bookmark pulled explicitly
1392 # bookmark pulled explicitly
1393 self.explicitbookmarks = [
1393 self.explicitbookmarks = [
1394 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1394 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1395 ]
1395 ]
1396 # do we force pull?
1396 # do we force pull?
1397 self.force = force
1397 self.force = force
1398 # whether a streaming clone was requested
1398 # whether a streaming clone was requested
1399 self.streamclonerequested = streamclonerequested
1399 self.streamclonerequested = streamclonerequested
1400 # transaction manager
1400 # transaction manager
1401 self.trmanager = None
1401 self.trmanager = None
1402 # set of common changeset between local and remote before pull
1402 # set of common changeset between local and remote before pull
1403 self.common = None
1403 self.common = None
1404 # set of pulled head
1404 # set of pulled head
1405 self.rheads = None
1405 self.rheads = None
1406 # list of missing changeset to fetch remotely
1406 # list of missing changeset to fetch remotely
1407 self.fetch = None
1407 self.fetch = None
1408 # remote bookmarks data
1408 # remote bookmarks data
1409 self.remotebookmarks = remotebookmarks
1409 self.remotebookmarks = remotebookmarks
1410 # result of changegroup pulling (used as return code by pull)
1410 # result of changegroup pulling (used as return code by pull)
1411 self.cgresult = None
1411 self.cgresult = None
1412 # list of step already done
1412 # list of step already done
1413 self.stepsdone = set()
1413 self.stepsdone = set()
1414 # Whether we attempted a clone from pre-generated bundles.
1414 # Whether we attempted a clone from pre-generated bundles.
1415 self.clonebundleattempted = False
1415 self.clonebundleattempted = False
1416 # Set of file patterns to include.
1416 # Set of file patterns to include.
1417 self.includepats = includepats
1417 self.includepats = includepats
1418 # Set of file patterns to exclude.
1418 # Set of file patterns to exclude.
1419 self.excludepats = excludepats
1419 self.excludepats = excludepats
1420 # Number of ancestor changesets to pull from each pulled head.
1420 # Number of ancestor changesets to pull from each pulled head.
1421 self.depth = depth
1421 self.depth = depth
1422
1422
1423 @util.propertycache
1423 @util.propertycache
1424 def pulledsubset(self):
1424 def pulledsubset(self):
1425 """heads of the set of changeset target by the pull"""
1425 """heads of the set of changeset target by the pull"""
1426 # compute target subset
1426 # compute target subset
1427 if self.heads is None:
1427 if self.heads is None:
1428 # We pulled every thing possible
1428 # We pulled every thing possible
1429 # sync on everything common
1429 # sync on everything common
1430 c = set(self.common)
1430 c = set(self.common)
1431 ret = list(self.common)
1431 ret = list(self.common)
1432 for n in self.rheads:
1432 for n in self.rheads:
1433 if n not in c:
1433 if n not in c:
1434 ret.append(n)
1434 ret.append(n)
1435 return ret
1435 return ret
1436 else:
1436 else:
1437 # We pulled a specific subset
1437 # We pulled a specific subset
1438 # sync on this subset
1438 # sync on this subset
1439 return self.heads
1439 return self.heads
1440
1440
1441 @util.propertycache
1441 @util.propertycache
1442 def canusebundle2(self):
1442 def canusebundle2(self):
1443 return not _forcebundle1(self)
1443 return not _forcebundle1(self)
1444
1444
1445 @util.propertycache
1445 @util.propertycache
1446 def remotebundle2caps(self):
1446 def remotebundle2caps(self):
1447 return bundle2.bundle2caps(self.remote)
1447 return bundle2.bundle2caps(self.remote)
1448
1448
1449 def gettransaction(self):
1449 def gettransaction(self):
1450 # deprecated; talk to trmanager directly
1450 # deprecated; talk to trmanager directly
1451 return self.trmanager.transaction()
1451 return self.trmanager.transaction()
1452
1452
1453
1453
1454 class transactionmanager(util.transactional):
1454 class transactionmanager(util.transactional):
1455 """An object to manage the life cycle of a transaction
1455 """An object to manage the life cycle of a transaction
1456
1456
1457 It creates the transaction on demand and calls the appropriate hooks when
1457 It creates the transaction on demand and calls the appropriate hooks when
1458 closing the transaction."""
1458 closing the transaction."""
1459
1459
1460 def __init__(self, repo, source, url):
1460 def __init__(self, repo, source, url):
1461 self.repo = repo
1461 self.repo = repo
1462 self.source = source
1462 self.source = source
1463 self.url = url
1463 self.url = url
1464 self._tr = None
1464 self._tr = None
1465
1465
1466 def transaction(self):
1466 def transaction(self):
1467 """Return an open transaction object, constructing if necessary"""
1467 """Return an open transaction object, constructing if necessary"""
1468 if not self._tr:
1468 if not self._tr:
1469 trname = b'%s\n%s' % (self.source, urlutil.hidepassword(self.url))
1469 trname = b'%s\n%s' % (self.source, urlutil.hidepassword(self.url))
1470 self._tr = self.repo.transaction(trname)
1470 self._tr = self.repo.transaction(trname)
1471 self._tr.hookargs[b'source'] = self.source
1471 self._tr.hookargs[b'source'] = self.source
1472 self._tr.hookargs[b'url'] = self.url
1472 self._tr.hookargs[b'url'] = self.url
1473 return self._tr
1473 return self._tr
1474
1474
1475 def close(self):
1475 def close(self):
1476 """close transaction if created"""
1476 """close transaction if created"""
1477 if self._tr is not None:
1477 if self._tr is not None:
1478 self._tr.close()
1478 self._tr.close()
1479
1479
1480 def release(self):
1480 def release(self):
1481 """release transaction if created"""
1481 """release transaction if created"""
1482 if self._tr is not None:
1482 if self._tr is not None:
1483 self._tr.release()
1483 self._tr.release()
1484
1484
1485
1485
1486 def listkeys(remote, namespace):
1486 def listkeys(remote, namespace):
1487 with remote.commandexecutor() as e:
1487 with remote.commandexecutor() as e:
1488 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1488 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1489
1489
1490
1490
1491 def _fullpullbundle2(repo, pullop):
1491 def _fullpullbundle2(repo, pullop):
1492 # The server may send a partial reply, i.e. when inlining
1492 # The server may send a partial reply, i.e. when inlining
1493 # pre-computed bundles. In that case, update the common
1493 # pre-computed bundles. In that case, update the common
1494 # set based on the results and pull another bundle.
1494 # set based on the results and pull another bundle.
1495 #
1495 #
1496 # There are two indicators that the process is finished:
1496 # There are two indicators that the process is finished:
1497 # - no changeset has been added, or
1497 # - no changeset has been added, or
1498 # - all remote heads are known locally.
1498 # - all remote heads are known locally.
1499 # The head check must use the unfiltered view as obsoletion
1499 # The head check must use the unfiltered view as obsoletion
1500 # markers can hide heads.
1500 # markers can hide heads.
1501 unfi = repo.unfiltered()
1501 unfi = repo.unfiltered()
1502 unficl = unfi.changelog
1502 unficl = unfi.changelog
1503
1503
1504 def headsofdiff(h1, h2):
1504 def headsofdiff(h1, h2):
1505 """Returns heads(h1 % h2)"""
1505 """Returns heads(h1 % h2)"""
1506 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1506 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1507 return {ctx.node() for ctx in res}
1507 return {ctx.node() for ctx in res}
1508
1508
1509 def headsofunion(h1, h2):
1509 def headsofunion(h1, h2):
1510 """Returns heads((h1 + h2) - null)"""
1510 """Returns heads((h1 + h2) - null)"""
1511 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1511 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1512 return {ctx.node() for ctx in res}
1512 return {ctx.node() for ctx in res}
1513
1513
1514 while True:
1514 while True:
1515 old_heads = unficl.heads()
1515 old_heads = unficl.heads()
1516 clstart = len(unficl)
1516 clstart = len(unficl)
1517 _pullbundle2(pullop)
1517 _pullbundle2(pullop)
1518 if requirements.NARROW_REQUIREMENT in repo.requirements:
1518 if requirements.NARROW_REQUIREMENT in repo.requirements:
1519 # XXX narrow clones filter the heads on the server side during
1519 # XXX narrow clones filter the heads on the server side during
1520 # XXX getbundle and result in partial replies as well.
1520 # XXX getbundle and result in partial replies as well.
1521 # XXX Disable pull bundles in this case as band aid to avoid
1521 # XXX Disable pull bundles in this case as band aid to avoid
1522 # XXX extra round trips.
1522 # XXX extra round trips.
1523 break
1523 break
1524 if clstart == len(unficl):
1524 if clstart == len(unficl):
1525 break
1525 break
1526 if all(unficl.hasnode(n) for n in pullop.rheads):
1526 if all(unficl.hasnode(n) for n in pullop.rheads):
1527 break
1527 break
1528 new_heads = headsofdiff(unficl.heads(), old_heads)
1528 new_heads = headsofdiff(unficl.heads(), old_heads)
1529 pullop.common = headsofunion(new_heads, pullop.common)
1529 pullop.common = headsofunion(new_heads, pullop.common)
1530 pullop.rheads = set(pullop.rheads) - pullop.common
1530 pullop.rheads = set(pullop.rheads) - pullop.common
1531
1531
1532
1532
1533 def add_confirm_callback(repo, pullop):
1533 def add_confirm_callback(repo, pullop):
1534 """adds a finalize callback to transaction which can be used to show stats
1534 """adds a finalize callback to transaction which can be used to show stats
1535 to user and confirm the pull before committing transaction"""
1535 to user and confirm the pull before committing transaction"""
1536
1536
1537 tr = pullop.trmanager.transaction()
1537 tr = pullop.trmanager.transaction()
1538 scmutil.registersummarycallback(
1538 scmutil.registersummarycallback(
1539 repo, tr, txnname=b'pull', as_validator=True
1539 repo, tr, txnname=b'pull', as_validator=True
1540 )
1540 )
1541 reporef = weakref.ref(repo.unfiltered())
1541 reporef = weakref.ref(repo.unfiltered())
1542
1542
1543 def prompt(tr):
1543 def prompt(tr):
1544 repo = reporef()
1544 repo = reporef()
1545 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1545 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1546 if repo.ui.promptchoice(cm):
1546 if repo.ui.promptchoice(cm):
1547 raise error.Abort(b"user aborted")
1547 raise error.Abort(b"user aborted")
1548
1548
1549 tr.addvalidator(b'900-pull-prompt', prompt)
1549 tr.addvalidator(b'900-pull-prompt', prompt)
1550
1550
1551
1551
1552 def pull(
1552 def pull(
1553 repo,
1553 repo,
1554 remote,
1554 remote,
1555 heads=None,
1555 heads=None,
1556 force=False,
1556 force=False,
1557 bookmarks=(),
1557 bookmarks=(),
1558 opargs=None,
1558 opargs=None,
1559 streamclonerequested=None,
1559 streamclonerequested=None,
1560 includepats=None,
1560 includepats=None,
1561 excludepats=None,
1561 excludepats=None,
1562 depth=None,
1562 depth=None,
1563 confirm=None,
1563 confirm=None,
1564 ):
1564 ):
1565 """Fetch repository data from a remote.
1565 """Fetch repository data from a remote.
1566
1566
1567 This is the main function used to retrieve data from a remote repository.
1567 This is the main function used to retrieve data from a remote repository.
1568
1568
1569 ``repo`` is the local repository to clone into.
1569 ``repo`` is the local repository to clone into.
1570 ``remote`` is a peer instance.
1570 ``remote`` is a peer instance.
1571 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1571 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1572 default) means to pull everything from the remote.
1572 default) means to pull everything from the remote.
1573 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1573 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1574 default, all remote bookmarks are pulled.
1574 default, all remote bookmarks are pulled.
1575 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1575 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1576 initialization.
1576 initialization.
1577 ``streamclonerequested`` is a boolean indicating whether a "streaming
1577 ``streamclonerequested`` is a boolean indicating whether a "streaming
1578 clone" is requested. A "streaming clone" is essentially a raw file copy
1578 clone" is requested. A "streaming clone" is essentially a raw file copy
1579 of revlogs from the server. This only works when the local repository is
1579 of revlogs from the server. This only works when the local repository is
1580 empty. The default value of ``None`` means to respect the server
1580 empty. The default value of ``None`` means to respect the server
1581 configuration for preferring stream clones.
1581 configuration for preferring stream clones.
1582 ``includepats`` and ``excludepats`` define explicit file patterns to
1582 ``includepats`` and ``excludepats`` define explicit file patterns to
1583 include and exclude in storage, respectively. If not defined, narrow
1583 include and exclude in storage, respectively. If not defined, narrow
1584 patterns from the repo instance are used, if available.
1584 patterns from the repo instance are used, if available.
1585 ``depth`` is an integer indicating the DAG depth of history we're
1585 ``depth`` is an integer indicating the DAG depth of history we're
1586 interested in. If defined, for each revision specified in ``heads``, we
1586 interested in. If defined, for each revision specified in ``heads``, we
1587 will fetch up to this many of its ancestors and data associated with them.
1587 will fetch up to this many of its ancestors and data associated with them.
1588 ``confirm`` is a boolean indicating whether the pull should be confirmed
1588 ``confirm`` is a boolean indicating whether the pull should be confirmed
1589 before committing the transaction. This overrides HGPLAIN.
1589 before committing the transaction. This overrides HGPLAIN.
1590
1590
1591 Returns the ``pulloperation`` created for this pull.
1591 Returns the ``pulloperation`` created for this pull.
1592 """
1592 """
1593 if opargs is None:
1593 if opargs is None:
1594 opargs = {}
1594 opargs = {}
1595
1595
1596 # We allow the narrow patterns to be passed in explicitly to provide more
1596 # We allow the narrow patterns to be passed in explicitly to provide more
1597 # flexibility for API consumers.
1597 # flexibility for API consumers.
1598 if includepats or excludepats:
1598 if includepats or excludepats:
1599 includepats = includepats or set()
1599 includepats = includepats or set()
1600 excludepats = excludepats or set()
1600 excludepats = excludepats or set()
1601 else:
1601 else:
1602 includepats, excludepats = repo.narrowpats
1602 includepats, excludepats = repo.narrowpats
1603
1603
1604 narrowspec.validatepatterns(includepats)
1604 narrowspec.validatepatterns(includepats)
1605 narrowspec.validatepatterns(excludepats)
1605 narrowspec.validatepatterns(excludepats)
1606
1606
1607 pullop = pulloperation(
1607 pullop = pulloperation(
1608 repo,
1608 repo,
1609 remote,
1609 remote,
1610 heads,
1610 heads,
1611 force,
1611 force,
1612 bookmarks=bookmarks,
1612 bookmarks=bookmarks,
1613 streamclonerequested=streamclonerequested,
1613 streamclonerequested=streamclonerequested,
1614 includepats=includepats,
1614 includepats=includepats,
1615 excludepats=excludepats,
1615 excludepats=excludepats,
1616 depth=depth,
1616 depth=depth,
1617 **pycompat.strkwargs(opargs)
1617 **pycompat.strkwargs(opargs)
1618 )
1618 )
1619
1619
1620 peerlocal = pullop.remote.local()
1620 peerlocal = pullop.remote.local()
1621 if peerlocal:
1621 if peerlocal:
1622 missing = set(peerlocal.requirements) - pullop.repo.supported
1622 missing = set(peerlocal.requirements) - pullop.repo.supported
1623 if missing:
1623 if missing:
1624 msg = _(
1624 msg = _(
1625 b"required features are not"
1625 b"required features are not"
1626 b" supported in the destination:"
1626 b" supported in the destination:"
1627 b" %s"
1627 b" %s"
1628 ) % (b', '.join(sorted(missing)))
1628 ) % (b', '.join(sorted(missing)))
1629 raise error.Abort(msg)
1629 raise error.Abort(msg)
1630
1630
1631 for category in repo._wanted_sidedata:
1631 for category in repo._wanted_sidedata:
1632 # Check that a computer is registered for that category for at least
1632 # Check that a computer is registered for that category for at least
1633 # one revlog kind.
1633 # one revlog kind.
1634 for kind, computers in repo._sidedata_computers.items():
1634 for kind, computers in repo._sidedata_computers.items():
1635 if computers.get(category):
1635 if computers.get(category):
1636 break
1636 break
1637 else:
1637 else:
1638 # This should never happen since repos are supposed to be able to
1638 # This should never happen since repos are supposed to be able to
1639 # generate the sidedata they require.
1639 # generate the sidedata they require.
1640 raise error.ProgrammingError(
1640 raise error.ProgrammingError(
1641 _(
1641 _(
1642 b'sidedata category requested by local side without local'
1642 b'sidedata category requested by local side without local'
1643 b"support: '%s'"
1643 b"support: '%s'"
1644 )
1644 )
1645 % pycompat.bytestr(category)
1645 % pycompat.bytestr(category)
1646 )
1646 )
1647
1647
1648 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1648 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1649 wlock = util.nullcontextmanager()
1649 wlock = util.nullcontextmanager()
1650 if not bookmod.bookmarksinstore(repo):
1650 if not bookmod.bookmarksinstore(repo):
1651 wlock = repo.wlock()
1651 wlock = repo.wlock()
1652 with wlock, repo.lock(), pullop.trmanager:
1652 with wlock, repo.lock(), pullop.trmanager:
1653 if confirm or (
1653 if confirm or (
1654 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1654 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1655 ):
1655 ):
1656 add_confirm_callback(repo, pullop)
1656 add_confirm_callback(repo, pullop)
1657
1657
1658 # Use the modern wire protocol, if available.
1658 # Use the modern wire protocol, if available.
1659 if remote.capable(b'command-changesetdata'):
1659 if remote.capable(b'command-changesetdata'):
1660 exchangev2.pull(pullop)
1660 exchangev2.pull(pullop)
1661 else:
1661 else:
1662 # This should ideally be in _pullbundle2(). However, it needs to run
1662 # This should ideally be in _pullbundle2(). However, it needs to run
1663 # before discovery to avoid extra work.
1663 # before discovery to avoid extra work.
1664 _maybeapplyclonebundle(pullop)
1664 _maybeapplyclonebundle(pullop)
1665 streamclone.maybeperformlegacystreamclone(pullop)
1665 streamclone.maybeperformlegacystreamclone(pullop)
1666 _pulldiscovery(pullop)
1666 _pulldiscovery(pullop)
1667 if pullop.canusebundle2:
1667 if pullop.canusebundle2:
1668 _fullpullbundle2(repo, pullop)
1668 _fullpullbundle2(repo, pullop)
1669 _pullchangeset(pullop)
1669 _pullchangeset(pullop)
1670 _pullphase(pullop)
1670 _pullphase(pullop)
1671 _pullbookmarks(pullop)
1671 _pullbookmarks(pullop)
1672 _pullobsolete(pullop)
1672 _pullobsolete(pullop)
1673
1673
1674 # storing remotenames
1674 # storing remotenames
1675 if repo.ui.configbool(b'experimental', b'remotenames'):
1675 if repo.ui.configbool(b'experimental', b'remotenames'):
1676 logexchange.pullremotenames(repo, remote)
1676 logexchange.pullremotenames(repo, remote)
1677
1677
1678 return pullop
1678 return pullop
1679
1679
1680
1680
1681 # list of steps to perform discovery before pull
1681 # list of steps to perform discovery before pull
1682 pulldiscoveryorder = []
1682 pulldiscoveryorder = []
1683
1683
1684 # Mapping between step name and function
1684 # Mapping between step name and function
1685 #
1685 #
1686 # This exists to help extensions wrap steps if necessary
1686 # This exists to help extensions wrap steps if necessary
1687 pulldiscoverymapping = {}
1687 pulldiscoverymapping = {}
1688
1688
1689
1689
1690 def pulldiscovery(stepname):
1690 def pulldiscovery(stepname):
1691 """decorator for function performing discovery before pull
1691 """decorator for function performing discovery before pull
1692
1692
1693 The function is added to the step -> function mapping and appended to the
1693 The function is added to the step -> function mapping and appended to the
1694 list of steps. Beware that decorated function will be added in order (this
1694 list of steps. Beware that decorated function will be added in order (this
1695 may matter).
1695 may matter).
1696
1696
1697 You can only use this decorator for a new step, if you want to wrap a step
1697 You can only use this decorator for a new step, if you want to wrap a step
1698 from an extension, change the pulldiscovery dictionary directly."""
1698 from an extension, change the pulldiscovery dictionary directly."""
1699
1699
1700 def dec(func):
1700 def dec(func):
1701 assert stepname not in pulldiscoverymapping
1701 assert stepname not in pulldiscoverymapping
1702 pulldiscoverymapping[stepname] = func
1702 pulldiscoverymapping[stepname] = func
1703 pulldiscoveryorder.append(stepname)
1703 pulldiscoveryorder.append(stepname)
1704 return func
1704 return func
1705
1705
1706 return dec
1706 return dec
1707
1707
1708
1708
1709 def _pulldiscovery(pullop):
1709 def _pulldiscovery(pullop):
1710 """Run all discovery steps"""
1710 """Run all discovery steps"""
1711 for stepname in pulldiscoveryorder:
1711 for stepname in pulldiscoveryorder:
1712 step = pulldiscoverymapping[stepname]
1712 step = pulldiscoverymapping[stepname]
1713 step(pullop)
1713 step(pullop)
1714
1714
1715
1715
1716 @pulldiscovery(b'b1:bookmarks')
1716 @pulldiscovery(b'b1:bookmarks')
1717 def _pullbookmarkbundle1(pullop):
1717 def _pullbookmarkbundle1(pullop):
1718 """fetch bookmark data in bundle1 case
1718 """fetch bookmark data in bundle1 case
1719
1719
1720 If not using bundle2, we have to fetch bookmarks before changeset
1720 If not using bundle2, we have to fetch bookmarks before changeset
1721 discovery to reduce the chance and impact of race conditions."""
1721 discovery to reduce the chance and impact of race conditions."""
1722 if pullop.remotebookmarks is not None:
1722 if pullop.remotebookmarks is not None:
1723 return
1723 return
1724 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1724 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1725 # all known bundle2 servers now support listkeys, but lets be nice with
1725 # all known bundle2 servers now support listkeys, but lets be nice with
1726 # new implementation.
1726 # new implementation.
1727 return
1727 return
1728 books = listkeys(pullop.remote, b'bookmarks')
1728 books = listkeys(pullop.remote, b'bookmarks')
1729 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1729 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1730
1730
1731
1731
1732 @pulldiscovery(b'changegroup')
1732 @pulldiscovery(b'changegroup')
1733 def _pulldiscoverychangegroup(pullop):
1733 def _pulldiscoverychangegroup(pullop):
1734 """discovery phase for the pull
1734 """discovery phase for the pull
1735
1735
1736 Current handle changeset discovery only, will change handle all discovery
1736 Current handle changeset discovery only, will change handle all discovery
1737 at some point."""
1737 at some point."""
1738 tmp = discovery.findcommonincoming(
1738 tmp = discovery.findcommonincoming(
1739 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1739 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1740 )
1740 )
1741 common, fetch, rheads = tmp
1741 common, fetch, rheads = tmp
1742 has_node = pullop.repo.unfiltered().changelog.index.has_node
1742 has_node = pullop.repo.unfiltered().changelog.index.has_node
1743 if fetch and rheads:
1743 if fetch and rheads:
1744 # If a remote heads is filtered locally, put in back in common.
1744 # If a remote heads is filtered locally, put in back in common.
1745 #
1745 #
1746 # This is a hackish solution to catch most of "common but locally
1746 # This is a hackish solution to catch most of "common but locally
1747 # hidden situation". We do not performs discovery on unfiltered
1747 # hidden situation". We do not performs discovery on unfiltered
1748 # repository because it end up doing a pathological amount of round
1748 # repository because it end up doing a pathological amount of round
1749 # trip for w huge amount of changeset we do not care about.
1749 # trip for w huge amount of changeset we do not care about.
1750 #
1750 #
1751 # If a set of such "common but filtered" changeset exist on the server
1751 # If a set of such "common but filtered" changeset exist on the server
1752 # but are not including a remote heads, we'll not be able to detect it,
1752 # but are not including a remote heads, we'll not be able to detect it,
1753 scommon = set(common)
1753 scommon = set(common)
1754 for n in rheads:
1754 for n in rheads:
1755 if has_node(n):
1755 if has_node(n):
1756 if n not in scommon:
1756 if n not in scommon:
1757 common.append(n)
1757 common.append(n)
1758 if set(rheads).issubset(set(common)):
1758 if set(rheads).issubset(set(common)):
1759 fetch = []
1759 fetch = []
1760 pullop.common = common
1760 pullop.common = common
1761 pullop.fetch = fetch
1761 pullop.fetch = fetch
1762 pullop.rheads = rheads
1762 pullop.rheads = rheads
1763
1763
1764
1764
1765 def _pullbundle2(pullop):
1765 def _pullbundle2(pullop):
1766 """pull data using bundle2
1766 """pull data using bundle2
1767
1767
1768 For now, the only supported data are changegroup."""
1768 For now, the only supported data are changegroup."""
1769 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1769 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1770
1770
1771 # make ui easier to access
1771 # make ui easier to access
1772 ui = pullop.repo.ui
1772 ui = pullop.repo.ui
1773
1773
1774 # At the moment we don't do stream clones over bundle2. If that is
1774 # At the moment we don't do stream clones over bundle2. If that is
1775 # implemented then here's where the check for that will go.
1775 # implemented then here's where the check for that will go.
1776 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1776 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1777
1777
1778 # declare pull perimeters
1778 # declare pull perimeters
1779 kwargs[b'common'] = pullop.common
1779 kwargs[b'common'] = pullop.common
1780 kwargs[b'heads'] = pullop.heads or pullop.rheads
1780 kwargs[b'heads'] = pullop.heads or pullop.rheads
1781
1781
1782 # check server supports narrow and then adding includepats and excludepats
1782 # check server supports narrow and then adding includepats and excludepats
1783 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1783 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1784 if servernarrow and pullop.includepats:
1784 if servernarrow and pullop.includepats:
1785 kwargs[b'includepats'] = pullop.includepats
1785 kwargs[b'includepats'] = pullop.includepats
1786 if servernarrow and pullop.excludepats:
1786 if servernarrow and pullop.excludepats:
1787 kwargs[b'excludepats'] = pullop.excludepats
1787 kwargs[b'excludepats'] = pullop.excludepats
1788
1788
1789 if streaming:
1789 if streaming:
1790 kwargs[b'cg'] = False
1790 kwargs[b'cg'] = False
1791 kwargs[b'stream'] = True
1791 kwargs[b'stream'] = True
1792 pullop.stepsdone.add(b'changegroup')
1792 pullop.stepsdone.add(b'changegroup')
1793 pullop.stepsdone.add(b'phases')
1793 pullop.stepsdone.add(b'phases')
1794
1794
1795 else:
1795 else:
1796 # pulling changegroup
1796 # pulling changegroup
1797 pullop.stepsdone.add(b'changegroup')
1797 pullop.stepsdone.add(b'changegroup')
1798
1798
1799 kwargs[b'cg'] = pullop.fetch
1799 kwargs[b'cg'] = pullop.fetch
1800
1800
1801 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1801 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1802 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1802 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1803 if not legacyphase and hasbinaryphase:
1803 if not legacyphase and hasbinaryphase:
1804 kwargs[b'phases'] = True
1804 kwargs[b'phases'] = True
1805 pullop.stepsdone.add(b'phases')
1805 pullop.stepsdone.add(b'phases')
1806
1806
1807 if b'listkeys' in pullop.remotebundle2caps:
1807 if b'listkeys' in pullop.remotebundle2caps:
1808 if b'phases' not in pullop.stepsdone:
1808 if b'phases' not in pullop.stepsdone:
1809 kwargs[b'listkeys'] = [b'phases']
1809 kwargs[b'listkeys'] = [b'phases']
1810
1810
1811 bookmarksrequested = False
1811 bookmarksrequested = False
1812 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1812 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1813 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1813 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1814
1814
1815 if pullop.remotebookmarks is not None:
1815 if pullop.remotebookmarks is not None:
1816 pullop.stepsdone.add(b'request-bookmarks')
1816 pullop.stepsdone.add(b'request-bookmarks')
1817
1817
1818 if (
1818 if (
1819 b'request-bookmarks' not in pullop.stepsdone
1819 b'request-bookmarks' not in pullop.stepsdone
1820 and pullop.remotebookmarks is None
1820 and pullop.remotebookmarks is None
1821 and not legacybookmark
1821 and not legacybookmark
1822 and hasbinarybook
1822 and hasbinarybook
1823 ):
1823 ):
1824 kwargs[b'bookmarks'] = True
1824 kwargs[b'bookmarks'] = True
1825 bookmarksrequested = True
1825 bookmarksrequested = True
1826
1826
1827 if b'listkeys' in pullop.remotebundle2caps:
1827 if b'listkeys' in pullop.remotebundle2caps:
1828 if b'request-bookmarks' not in pullop.stepsdone:
1828 if b'request-bookmarks' not in pullop.stepsdone:
1829 # make sure to always includes bookmark data when migrating
1829 # make sure to always includes bookmark data when migrating
1830 # `hg incoming --bundle` to using this function.
1830 # `hg incoming --bundle` to using this function.
1831 pullop.stepsdone.add(b'request-bookmarks')
1831 pullop.stepsdone.add(b'request-bookmarks')
1832 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1832 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1833
1833
1834 # If this is a full pull / clone and the server supports the clone bundles
1834 # If this is a full pull / clone and the server supports the clone bundles
1835 # feature, tell the server whether we attempted a clone bundle. The
1835 # feature, tell the server whether we attempted a clone bundle. The
1836 # presence of this flag indicates the client supports clone bundles. This
1836 # presence of this flag indicates the client supports clone bundles. This
1837 # will enable the server to treat clients that support clone bundles
1837 # will enable the server to treat clients that support clone bundles
1838 # differently from those that don't.
1838 # differently from those that don't.
1839 if (
1839 if (
1840 pullop.remote.capable(b'clonebundles')
1840 pullop.remote.capable(b'clonebundles')
1841 and pullop.heads is None
1841 and pullop.heads is None
1842 and list(pullop.common) == [nullid]
1842 and list(pullop.common) == [nullid]
1843 ):
1843 ):
1844 kwargs[b'cbattempted'] = pullop.clonebundleattempted
1844 kwargs[b'cbattempted'] = pullop.clonebundleattempted
1845
1845
1846 if streaming:
1846 if streaming:
1847 pullop.repo.ui.status(_(b'streaming all changes\n'))
1847 pullop.repo.ui.status(_(b'streaming all changes\n'))
1848 elif not pullop.fetch:
1848 elif not pullop.fetch:
1849 pullop.repo.ui.status(_(b"no changes found\n"))
1849 pullop.repo.ui.status(_(b"no changes found\n"))
1850 pullop.cgresult = 0
1850 pullop.cgresult = 0
1851 else:
1851 else:
1852 if pullop.heads is None and list(pullop.common) == [nullid]:
1852 if pullop.heads is None and list(pullop.common) == [nullid]:
1853 pullop.repo.ui.status(_(b"requesting all changes\n"))
1853 pullop.repo.ui.status(_(b"requesting all changes\n"))
1854 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1854 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1855 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1855 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1856 if obsolete.commonversion(remoteversions) is not None:
1856 if obsolete.commonversion(remoteversions) is not None:
1857 kwargs[b'obsmarkers'] = True
1857 kwargs[b'obsmarkers'] = True
1858 pullop.stepsdone.add(b'obsmarkers')
1858 pullop.stepsdone.add(b'obsmarkers')
1859 _pullbundle2extraprepare(pullop, kwargs)
1859 _pullbundle2extraprepare(pullop, kwargs)
1860
1860
1861 remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote)
1861 remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote)
1862 if remote_sidedata:
1862 if remote_sidedata:
1863 kwargs[b'remote_sidedata'] = remote_sidedata
1863 kwargs[b'remote_sidedata'] = remote_sidedata
1864
1864
1865 with pullop.remote.commandexecutor() as e:
1865 with pullop.remote.commandexecutor() as e:
1866 args = dict(kwargs)
1866 args = dict(kwargs)
1867 args[b'source'] = b'pull'
1867 args[b'source'] = b'pull'
1868 bundle = e.callcommand(b'getbundle', args).result()
1868 bundle = e.callcommand(b'getbundle', args).result()
1869
1869
1870 try:
1870 try:
1871 op = bundle2.bundleoperation(
1871 op = bundle2.bundleoperation(
1872 pullop.repo, pullop.gettransaction, source=b'pull'
1872 pullop.repo, pullop.gettransaction, source=b'pull'
1873 )
1873 )
1874 op.modes[b'bookmarks'] = b'records'
1874 op.modes[b'bookmarks'] = b'records'
1875 bundle2.processbundle(pullop.repo, bundle, op=op)
1875 bundle2.processbundle(pullop.repo, bundle, op=op)
1876 except bundle2.AbortFromPart as exc:
1876 except bundle2.AbortFromPart as exc:
1877 pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc)
1877 pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc)
1878 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
1878 raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint)
1879 except error.BundleValueError as exc:
1879 except error.BundleValueError as exc:
1880 raise error.Abort(_(b'missing support for %s') % exc)
1880 raise error.RemoteError(_(b'missing support for %s') % exc)
1881
1881
1882 if pullop.fetch:
1882 if pullop.fetch:
1883 pullop.cgresult = bundle2.combinechangegroupresults(op)
1883 pullop.cgresult = bundle2.combinechangegroupresults(op)
1884
1884
1885 # processing phases change
1885 # processing phases change
1886 for namespace, value in op.records[b'listkeys']:
1886 for namespace, value in op.records[b'listkeys']:
1887 if namespace == b'phases':
1887 if namespace == b'phases':
1888 _pullapplyphases(pullop, value)
1888 _pullapplyphases(pullop, value)
1889
1889
1890 # processing bookmark update
1890 # processing bookmark update
1891 if bookmarksrequested:
1891 if bookmarksrequested:
1892 books = {}
1892 books = {}
1893 for record in op.records[b'bookmarks']:
1893 for record in op.records[b'bookmarks']:
1894 books[record[b'bookmark']] = record[b"node"]
1894 books[record[b'bookmark']] = record[b"node"]
1895 pullop.remotebookmarks = books
1895 pullop.remotebookmarks = books
1896 else:
1896 else:
1897 for namespace, value in op.records[b'listkeys']:
1897 for namespace, value in op.records[b'listkeys']:
1898 if namespace == b'bookmarks':
1898 if namespace == b'bookmarks':
1899 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1899 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1900
1900
1901 # bookmark data were either already there or pulled in the bundle
1901 # bookmark data were either already there or pulled in the bundle
1902 if pullop.remotebookmarks is not None:
1902 if pullop.remotebookmarks is not None:
1903 _pullbookmarks(pullop)
1903 _pullbookmarks(pullop)
1904
1904
1905
1905
1906 def _pullbundle2extraprepare(pullop, kwargs):
1906 def _pullbundle2extraprepare(pullop, kwargs):
1907 """hook function so that extensions can extend the getbundle call"""
1907 """hook function so that extensions can extend the getbundle call"""
1908
1908
1909
1909
1910 def _pullchangeset(pullop):
1910 def _pullchangeset(pullop):
1911 """pull changeset from unbundle into the local repo"""
1911 """pull changeset from unbundle into the local repo"""
1912 # We delay the open of the transaction as late as possible so we
1912 # We delay the open of the transaction as late as possible so we
1913 # don't open transaction for nothing or you break future useful
1913 # don't open transaction for nothing or you break future useful
1914 # rollback call
1914 # rollback call
1915 if b'changegroup' in pullop.stepsdone:
1915 if b'changegroup' in pullop.stepsdone:
1916 return
1916 return
1917 pullop.stepsdone.add(b'changegroup')
1917 pullop.stepsdone.add(b'changegroup')
1918 if not pullop.fetch:
1918 if not pullop.fetch:
1919 pullop.repo.ui.status(_(b"no changes found\n"))
1919 pullop.repo.ui.status(_(b"no changes found\n"))
1920 pullop.cgresult = 0
1920 pullop.cgresult = 0
1921 return
1921 return
1922 tr = pullop.gettransaction()
1922 tr = pullop.gettransaction()
1923 if pullop.heads is None and list(pullop.common) == [nullid]:
1923 if pullop.heads is None and list(pullop.common) == [nullid]:
1924 pullop.repo.ui.status(_(b"requesting all changes\n"))
1924 pullop.repo.ui.status(_(b"requesting all changes\n"))
1925 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
1925 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
1926 # issue1320, avoid a race if remote changed after discovery
1926 # issue1320, avoid a race if remote changed after discovery
1927 pullop.heads = pullop.rheads
1927 pullop.heads = pullop.rheads
1928
1928
1929 if pullop.remote.capable(b'getbundle'):
1929 if pullop.remote.capable(b'getbundle'):
1930 # TODO: get bundlecaps from remote
1930 # TODO: get bundlecaps from remote
1931 cg = pullop.remote.getbundle(
1931 cg = pullop.remote.getbundle(
1932 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
1932 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
1933 )
1933 )
1934 elif pullop.heads is None:
1934 elif pullop.heads is None:
1935 with pullop.remote.commandexecutor() as e:
1935 with pullop.remote.commandexecutor() as e:
1936 cg = e.callcommand(
1936 cg = e.callcommand(
1937 b'changegroup',
1937 b'changegroup',
1938 {
1938 {
1939 b'nodes': pullop.fetch,
1939 b'nodes': pullop.fetch,
1940 b'source': b'pull',
1940 b'source': b'pull',
1941 },
1941 },
1942 ).result()
1942 ).result()
1943
1943
1944 elif not pullop.remote.capable(b'changegroupsubset'):
1944 elif not pullop.remote.capable(b'changegroupsubset'):
1945 raise error.Abort(
1945 raise error.Abort(
1946 _(
1946 _(
1947 b"partial pull cannot be done because "
1947 b"partial pull cannot be done because "
1948 b"other repository doesn't support "
1948 b"other repository doesn't support "
1949 b"changegroupsubset."
1949 b"changegroupsubset."
1950 )
1950 )
1951 )
1951 )
1952 else:
1952 else:
1953 with pullop.remote.commandexecutor() as e:
1953 with pullop.remote.commandexecutor() as e:
1954 cg = e.callcommand(
1954 cg = e.callcommand(
1955 b'changegroupsubset',
1955 b'changegroupsubset',
1956 {
1956 {
1957 b'bases': pullop.fetch,
1957 b'bases': pullop.fetch,
1958 b'heads': pullop.heads,
1958 b'heads': pullop.heads,
1959 b'source': b'pull',
1959 b'source': b'pull',
1960 },
1960 },
1961 ).result()
1961 ).result()
1962
1962
1963 bundleop = bundle2.applybundle(
1963 bundleop = bundle2.applybundle(
1964 pullop.repo, cg, tr, b'pull', pullop.remote.url()
1964 pullop.repo, cg, tr, b'pull', pullop.remote.url()
1965 )
1965 )
1966 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1966 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1967
1967
1968
1968
1969 def _pullphase(pullop):
1969 def _pullphase(pullop):
1970 # Get remote phases data from remote
1970 # Get remote phases data from remote
1971 if b'phases' in pullop.stepsdone:
1971 if b'phases' in pullop.stepsdone:
1972 return
1972 return
1973 remotephases = listkeys(pullop.remote, b'phases')
1973 remotephases = listkeys(pullop.remote, b'phases')
1974 _pullapplyphases(pullop, remotephases)
1974 _pullapplyphases(pullop, remotephases)
1975
1975
1976
1976
1977 def _pullapplyphases(pullop, remotephases):
1977 def _pullapplyphases(pullop, remotephases):
1978 """apply phase movement from observed remote state"""
1978 """apply phase movement from observed remote state"""
1979 if b'phases' in pullop.stepsdone:
1979 if b'phases' in pullop.stepsdone:
1980 return
1980 return
1981 pullop.stepsdone.add(b'phases')
1981 pullop.stepsdone.add(b'phases')
1982 publishing = bool(remotephases.get(b'publishing', False))
1982 publishing = bool(remotephases.get(b'publishing', False))
1983 if remotephases and not publishing:
1983 if remotephases and not publishing:
1984 # remote is new and non-publishing
1984 # remote is new and non-publishing
1985 pheads, _dr = phases.analyzeremotephases(
1985 pheads, _dr = phases.analyzeremotephases(
1986 pullop.repo, pullop.pulledsubset, remotephases
1986 pullop.repo, pullop.pulledsubset, remotephases
1987 )
1987 )
1988 dheads = pullop.pulledsubset
1988 dheads = pullop.pulledsubset
1989 else:
1989 else:
1990 # Remote is old or publishing all common changesets
1990 # Remote is old or publishing all common changesets
1991 # should be seen as public
1991 # should be seen as public
1992 pheads = pullop.pulledsubset
1992 pheads = pullop.pulledsubset
1993 dheads = []
1993 dheads = []
1994 unfi = pullop.repo.unfiltered()
1994 unfi = pullop.repo.unfiltered()
1995 phase = unfi._phasecache.phase
1995 phase = unfi._phasecache.phase
1996 rev = unfi.changelog.index.get_rev
1996 rev = unfi.changelog.index.get_rev
1997 public = phases.public
1997 public = phases.public
1998 draft = phases.draft
1998 draft = phases.draft
1999
1999
2000 # exclude changesets already public locally and update the others
2000 # exclude changesets already public locally and update the others
2001 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2001 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2002 if pheads:
2002 if pheads:
2003 tr = pullop.gettransaction()
2003 tr = pullop.gettransaction()
2004 phases.advanceboundary(pullop.repo, tr, public, pheads)
2004 phases.advanceboundary(pullop.repo, tr, public, pheads)
2005
2005
2006 # exclude changesets already draft locally and update the others
2006 # exclude changesets already draft locally and update the others
2007 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2007 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2008 if dheads:
2008 if dheads:
2009 tr = pullop.gettransaction()
2009 tr = pullop.gettransaction()
2010 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2010 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2011
2011
2012
2012
2013 def _pullbookmarks(pullop):
2013 def _pullbookmarks(pullop):
2014 """process the remote bookmark information to update the local one"""
2014 """process the remote bookmark information to update the local one"""
2015 if b'bookmarks' in pullop.stepsdone:
2015 if b'bookmarks' in pullop.stepsdone:
2016 return
2016 return
2017 pullop.stepsdone.add(b'bookmarks')
2017 pullop.stepsdone.add(b'bookmarks')
2018 repo = pullop.repo
2018 repo = pullop.repo
2019 remotebookmarks = pullop.remotebookmarks
2019 remotebookmarks = pullop.remotebookmarks
2020 bookmod.updatefromremote(
2020 bookmod.updatefromremote(
2021 repo.ui,
2021 repo.ui,
2022 repo,
2022 repo,
2023 remotebookmarks,
2023 remotebookmarks,
2024 pullop.remote.url(),
2024 pullop.remote.url(),
2025 pullop.gettransaction,
2025 pullop.gettransaction,
2026 explicit=pullop.explicitbookmarks,
2026 explicit=pullop.explicitbookmarks,
2027 )
2027 )
2028
2028
2029
2029
2030 def _pullobsolete(pullop):
2030 def _pullobsolete(pullop):
2031 """utility function to pull obsolete markers from a remote
2031 """utility function to pull obsolete markers from a remote
2032
2032
2033 The `gettransaction` is function that return the pull transaction, creating
2033 The `gettransaction` is function that return the pull transaction, creating
2034 one if necessary. We return the transaction to inform the calling code that
2034 one if necessary. We return the transaction to inform the calling code that
2035 a new transaction have been created (when applicable).
2035 a new transaction have been created (when applicable).
2036
2036
2037 Exists mostly to allow overriding for experimentation purpose"""
2037 Exists mostly to allow overriding for experimentation purpose"""
2038 if b'obsmarkers' in pullop.stepsdone:
2038 if b'obsmarkers' in pullop.stepsdone:
2039 return
2039 return
2040 pullop.stepsdone.add(b'obsmarkers')
2040 pullop.stepsdone.add(b'obsmarkers')
2041 tr = None
2041 tr = None
2042 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2042 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2043 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2043 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2044 remoteobs = listkeys(pullop.remote, b'obsolete')
2044 remoteobs = listkeys(pullop.remote, b'obsolete')
2045 if b'dump0' in remoteobs:
2045 if b'dump0' in remoteobs:
2046 tr = pullop.gettransaction()
2046 tr = pullop.gettransaction()
2047 markers = []
2047 markers = []
2048 for key in sorted(remoteobs, reverse=True):
2048 for key in sorted(remoteobs, reverse=True):
2049 if key.startswith(b'dump'):
2049 if key.startswith(b'dump'):
2050 data = util.b85decode(remoteobs[key])
2050 data = util.b85decode(remoteobs[key])
2051 version, newmarks = obsolete._readmarkers(data)
2051 version, newmarks = obsolete._readmarkers(data)
2052 markers += newmarks
2052 markers += newmarks
2053 if markers:
2053 if markers:
2054 pullop.repo.obsstore.add(tr, markers)
2054 pullop.repo.obsstore.add(tr, markers)
2055 pullop.repo.invalidatevolatilesets()
2055 pullop.repo.invalidatevolatilesets()
2056 return tr
2056 return tr
2057
2057
2058
2058
2059 def applynarrowacl(repo, kwargs):
2059 def applynarrowacl(repo, kwargs):
2060 """Apply narrow fetch access control.
2060 """Apply narrow fetch access control.
2061
2061
2062 This massages the named arguments for getbundle wire protocol commands
2062 This massages the named arguments for getbundle wire protocol commands
2063 so requested data is filtered through access control rules.
2063 so requested data is filtered through access control rules.
2064 """
2064 """
2065 ui = repo.ui
2065 ui = repo.ui
2066 # TODO this assumes existence of HTTP and is a layering violation.
2066 # TODO this assumes existence of HTTP and is a layering violation.
2067 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2067 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2068 user_includes = ui.configlist(
2068 user_includes = ui.configlist(
2069 _NARROWACL_SECTION,
2069 _NARROWACL_SECTION,
2070 username + b'.includes',
2070 username + b'.includes',
2071 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2071 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2072 )
2072 )
2073 user_excludes = ui.configlist(
2073 user_excludes = ui.configlist(
2074 _NARROWACL_SECTION,
2074 _NARROWACL_SECTION,
2075 username + b'.excludes',
2075 username + b'.excludes',
2076 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2076 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2077 )
2077 )
2078 if not user_includes:
2078 if not user_includes:
2079 raise error.Abort(
2079 raise error.Abort(
2080 _(b"%s configuration for user %s is empty")
2080 _(b"%s configuration for user %s is empty")
2081 % (_NARROWACL_SECTION, username)
2081 % (_NARROWACL_SECTION, username)
2082 )
2082 )
2083
2083
2084 user_includes = [
2084 user_includes = [
2085 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2085 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2086 ]
2086 ]
2087 user_excludes = [
2087 user_excludes = [
2088 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2088 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2089 ]
2089 ]
2090
2090
2091 req_includes = set(kwargs.get('includepats', []))
2091 req_includes = set(kwargs.get('includepats', []))
2092 req_excludes = set(kwargs.get('excludepats', []))
2092 req_excludes = set(kwargs.get('excludepats', []))
2093
2093
2094 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2094 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2095 req_includes, req_excludes, user_includes, user_excludes
2095 req_includes, req_excludes, user_includes, user_excludes
2096 )
2096 )
2097
2097
2098 if invalid_includes:
2098 if invalid_includes:
2099 raise error.Abort(
2099 raise error.Abort(
2100 _(b"The following includes are not accessible for %s: %s")
2100 _(b"The following includes are not accessible for %s: %s")
2101 % (username, stringutil.pprint(invalid_includes))
2101 % (username, stringutil.pprint(invalid_includes))
2102 )
2102 )
2103
2103
2104 new_args = {}
2104 new_args = {}
2105 new_args.update(kwargs)
2105 new_args.update(kwargs)
2106 new_args['narrow'] = True
2106 new_args['narrow'] = True
2107 new_args['narrow_acl'] = True
2107 new_args['narrow_acl'] = True
2108 new_args['includepats'] = req_includes
2108 new_args['includepats'] = req_includes
2109 if req_excludes:
2109 if req_excludes:
2110 new_args['excludepats'] = req_excludes
2110 new_args['excludepats'] = req_excludes
2111
2111
2112 return new_args
2112 return new_args
2113
2113
2114
2114
2115 def _computeellipsis(repo, common, heads, known, match, depth=None):
2115 def _computeellipsis(repo, common, heads, known, match, depth=None):
2116 """Compute the shape of a narrowed DAG.
2116 """Compute the shape of a narrowed DAG.
2117
2117
2118 Args:
2118 Args:
2119 repo: The repository we're transferring.
2119 repo: The repository we're transferring.
2120 common: The roots of the DAG range we're transferring.
2120 common: The roots of the DAG range we're transferring.
2121 May be just [nullid], which means all ancestors of heads.
2121 May be just [nullid], which means all ancestors of heads.
2122 heads: The heads of the DAG range we're transferring.
2122 heads: The heads of the DAG range we're transferring.
2123 match: The narrowmatcher that allows us to identify relevant changes.
2123 match: The narrowmatcher that allows us to identify relevant changes.
2124 depth: If not None, only consider nodes to be full nodes if they are at
2124 depth: If not None, only consider nodes to be full nodes if they are at
2125 most depth changesets away from one of heads.
2125 most depth changesets away from one of heads.
2126
2126
2127 Returns:
2127 Returns:
2128 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2128 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2129
2129
2130 visitnodes: The list of nodes (either full or ellipsis) which
2130 visitnodes: The list of nodes (either full or ellipsis) which
2131 need to be sent to the client.
2131 need to be sent to the client.
2132 relevant_nodes: The set of changelog nodes which change a file inside
2132 relevant_nodes: The set of changelog nodes which change a file inside
2133 the narrowspec. The client needs these as non-ellipsis nodes.
2133 the narrowspec. The client needs these as non-ellipsis nodes.
2134 ellipsisroots: A dict of {rev: parents} that is used in
2134 ellipsisroots: A dict of {rev: parents} that is used in
2135 narrowchangegroup to produce ellipsis nodes with the
2135 narrowchangegroup to produce ellipsis nodes with the
2136 correct parents.
2136 correct parents.
2137 """
2137 """
2138 cl = repo.changelog
2138 cl = repo.changelog
2139 mfl = repo.manifestlog
2139 mfl = repo.manifestlog
2140
2140
2141 clrev = cl.rev
2141 clrev = cl.rev
2142
2142
2143 commonrevs = {clrev(n) for n in common} | {nullrev}
2143 commonrevs = {clrev(n) for n in common} | {nullrev}
2144 headsrevs = {clrev(n) for n in heads}
2144 headsrevs = {clrev(n) for n in heads}
2145
2145
2146 if depth:
2146 if depth:
2147 revdepth = {h: 0 for h in headsrevs}
2147 revdepth = {h: 0 for h in headsrevs}
2148
2148
2149 ellipsisheads = collections.defaultdict(set)
2149 ellipsisheads = collections.defaultdict(set)
2150 ellipsisroots = collections.defaultdict(set)
2150 ellipsisroots = collections.defaultdict(set)
2151
2151
2152 def addroot(head, curchange):
2152 def addroot(head, curchange):
2153 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2153 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2154 ellipsisroots[head].add(curchange)
2154 ellipsisroots[head].add(curchange)
2155 # Recursively split ellipsis heads with 3 roots by finding the
2155 # Recursively split ellipsis heads with 3 roots by finding the
2156 # roots' youngest common descendant which is an elided merge commit.
2156 # roots' youngest common descendant which is an elided merge commit.
2157 # That descendant takes 2 of the 3 roots as its own, and becomes a
2157 # That descendant takes 2 of the 3 roots as its own, and becomes a
2158 # root of the head.
2158 # root of the head.
2159 while len(ellipsisroots[head]) > 2:
2159 while len(ellipsisroots[head]) > 2:
2160 child, roots = splithead(head)
2160 child, roots = splithead(head)
2161 splitroots(head, child, roots)
2161 splitroots(head, child, roots)
2162 head = child # Recurse in case we just added a 3rd root
2162 head = child # Recurse in case we just added a 3rd root
2163
2163
2164 def splitroots(head, child, roots):
2164 def splitroots(head, child, roots):
2165 ellipsisroots[head].difference_update(roots)
2165 ellipsisroots[head].difference_update(roots)
2166 ellipsisroots[head].add(child)
2166 ellipsisroots[head].add(child)
2167 ellipsisroots[child].update(roots)
2167 ellipsisroots[child].update(roots)
2168 ellipsisroots[child].discard(child)
2168 ellipsisroots[child].discard(child)
2169
2169
2170 def splithead(head):
2170 def splithead(head):
2171 r1, r2, r3 = sorted(ellipsisroots[head])
2171 r1, r2, r3 = sorted(ellipsisroots[head])
2172 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2172 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2173 mid = repo.revs(
2173 mid = repo.revs(
2174 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2174 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2175 )
2175 )
2176 for j in mid:
2176 for j in mid:
2177 if j == nr2:
2177 if j == nr2:
2178 return nr2, (nr1, nr2)
2178 return nr2, (nr1, nr2)
2179 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2179 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2180 return j, (nr1, nr2)
2180 return j, (nr1, nr2)
2181 raise error.Abort(
2181 raise error.Abort(
2182 _(
2182 _(
2183 b'Failed to split up ellipsis node! head: %d, '
2183 b'Failed to split up ellipsis node! head: %d, '
2184 b'roots: %d %d %d'
2184 b'roots: %d %d %d'
2185 )
2185 )
2186 % (head, r1, r2, r3)
2186 % (head, r1, r2, r3)
2187 )
2187 )
2188
2188
2189 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2189 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2190 visit = reversed(missing)
2190 visit = reversed(missing)
2191 relevant_nodes = set()
2191 relevant_nodes = set()
2192 visitnodes = [cl.node(m) for m in missing]
2192 visitnodes = [cl.node(m) for m in missing]
2193 required = set(headsrevs) | known
2193 required = set(headsrevs) | known
2194 for rev in visit:
2194 for rev in visit:
2195 clrev = cl.changelogrevision(rev)
2195 clrev = cl.changelogrevision(rev)
2196 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2196 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2197 if depth is not None:
2197 if depth is not None:
2198 curdepth = revdepth[rev]
2198 curdepth = revdepth[rev]
2199 for p in ps:
2199 for p in ps:
2200 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2200 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2201 needed = False
2201 needed = False
2202 shallow_enough = depth is None or revdepth[rev] <= depth
2202 shallow_enough = depth is None or revdepth[rev] <= depth
2203 if shallow_enough:
2203 if shallow_enough:
2204 curmf = mfl[clrev.manifest].read()
2204 curmf = mfl[clrev.manifest].read()
2205 if ps:
2205 if ps:
2206 # We choose to not trust the changed files list in
2206 # We choose to not trust the changed files list in
2207 # changesets because it's not always correct. TODO: could
2207 # changesets because it's not always correct. TODO: could
2208 # we trust it for the non-merge case?
2208 # we trust it for the non-merge case?
2209 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2209 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2210 needed = bool(curmf.diff(p1mf, match))
2210 needed = bool(curmf.diff(p1mf, match))
2211 if not needed and len(ps) > 1:
2211 if not needed and len(ps) > 1:
2212 # For merge changes, the list of changed files is not
2212 # For merge changes, the list of changed files is not
2213 # helpful, since we need to emit the merge if a file
2213 # helpful, since we need to emit the merge if a file
2214 # in the narrow spec has changed on either side of the
2214 # in the narrow spec has changed on either side of the
2215 # merge. As a result, we do a manifest diff to check.
2215 # merge. As a result, we do a manifest diff to check.
2216 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2216 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2217 needed = bool(curmf.diff(p2mf, match))
2217 needed = bool(curmf.diff(p2mf, match))
2218 else:
2218 else:
2219 # For a root node, we need to include the node if any
2219 # For a root node, we need to include the node if any
2220 # files in the node match the narrowspec.
2220 # files in the node match the narrowspec.
2221 needed = any(curmf.walk(match))
2221 needed = any(curmf.walk(match))
2222
2222
2223 if needed:
2223 if needed:
2224 for head in ellipsisheads[rev]:
2224 for head in ellipsisheads[rev]:
2225 addroot(head, rev)
2225 addroot(head, rev)
2226 for p in ps:
2226 for p in ps:
2227 required.add(p)
2227 required.add(p)
2228 relevant_nodes.add(cl.node(rev))
2228 relevant_nodes.add(cl.node(rev))
2229 else:
2229 else:
2230 if not ps:
2230 if not ps:
2231 ps = [nullrev]
2231 ps = [nullrev]
2232 if rev in required:
2232 if rev in required:
2233 for head in ellipsisheads[rev]:
2233 for head in ellipsisheads[rev]:
2234 addroot(head, rev)
2234 addroot(head, rev)
2235 for p in ps:
2235 for p in ps:
2236 ellipsisheads[p].add(rev)
2236 ellipsisheads[p].add(rev)
2237 else:
2237 else:
2238 for p in ps:
2238 for p in ps:
2239 ellipsisheads[p] |= ellipsisheads[rev]
2239 ellipsisheads[p] |= ellipsisheads[rev]
2240
2240
2241 # add common changesets as roots of their reachable ellipsis heads
2241 # add common changesets as roots of their reachable ellipsis heads
2242 for c in commonrevs:
2242 for c in commonrevs:
2243 for head in ellipsisheads[c]:
2243 for head in ellipsisheads[c]:
2244 addroot(head, c)
2244 addroot(head, c)
2245 return visitnodes, relevant_nodes, ellipsisroots
2245 return visitnodes, relevant_nodes, ellipsisroots
2246
2246
2247
2247
2248 def caps20to10(repo, role):
2248 def caps20to10(repo, role):
2249 """return a set with appropriate options to use bundle20 during getbundle"""
2249 """return a set with appropriate options to use bundle20 during getbundle"""
2250 caps = {b'HG20'}
2250 caps = {b'HG20'}
2251 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2251 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2252 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2252 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2253 return caps
2253 return caps
2254
2254
2255
2255
2256 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2256 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2257 getbundle2partsorder = []
2257 getbundle2partsorder = []
2258
2258
2259 # Mapping between step name and function
2259 # Mapping between step name and function
2260 #
2260 #
2261 # This exists to help extensions wrap steps if necessary
2261 # This exists to help extensions wrap steps if necessary
2262 getbundle2partsmapping = {}
2262 getbundle2partsmapping = {}
2263
2263
2264
2264
2265 def getbundle2partsgenerator(stepname, idx=None):
2265 def getbundle2partsgenerator(stepname, idx=None):
2266 """decorator for function generating bundle2 part for getbundle
2266 """decorator for function generating bundle2 part for getbundle
2267
2267
2268 The function is added to the step -> function mapping and appended to the
2268 The function is added to the step -> function mapping and appended to the
2269 list of steps. Beware that decorated functions will be added in order
2269 list of steps. Beware that decorated functions will be added in order
2270 (this may matter).
2270 (this may matter).
2271
2271
2272 You can only use this decorator for new steps, if you want to wrap a step
2272 You can only use this decorator for new steps, if you want to wrap a step
2273 from an extension, attack the getbundle2partsmapping dictionary directly."""
2273 from an extension, attack the getbundle2partsmapping dictionary directly."""
2274
2274
2275 def dec(func):
2275 def dec(func):
2276 assert stepname not in getbundle2partsmapping
2276 assert stepname not in getbundle2partsmapping
2277 getbundle2partsmapping[stepname] = func
2277 getbundle2partsmapping[stepname] = func
2278 if idx is None:
2278 if idx is None:
2279 getbundle2partsorder.append(stepname)
2279 getbundle2partsorder.append(stepname)
2280 else:
2280 else:
2281 getbundle2partsorder.insert(idx, stepname)
2281 getbundle2partsorder.insert(idx, stepname)
2282 return func
2282 return func
2283
2283
2284 return dec
2284 return dec
2285
2285
2286
2286
2287 def bundle2requested(bundlecaps):
2287 def bundle2requested(bundlecaps):
2288 if bundlecaps is not None:
2288 if bundlecaps is not None:
2289 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2289 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2290 return False
2290 return False
2291
2291
2292
2292
2293 def getbundlechunks(
2293 def getbundlechunks(
2294 repo,
2294 repo,
2295 source,
2295 source,
2296 heads=None,
2296 heads=None,
2297 common=None,
2297 common=None,
2298 bundlecaps=None,
2298 bundlecaps=None,
2299 remote_sidedata=None,
2299 remote_sidedata=None,
2300 **kwargs
2300 **kwargs
2301 ):
2301 ):
2302 """Return chunks constituting a bundle's raw data.
2302 """Return chunks constituting a bundle's raw data.
2303
2303
2304 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2304 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2305 passed.
2305 passed.
2306
2306
2307 Returns a 2-tuple of a dict with metadata about the generated bundle
2307 Returns a 2-tuple of a dict with metadata about the generated bundle
2308 and an iterator over raw chunks (of varying sizes).
2308 and an iterator over raw chunks (of varying sizes).
2309 """
2309 """
2310 kwargs = pycompat.byteskwargs(kwargs)
2310 kwargs = pycompat.byteskwargs(kwargs)
2311 info = {}
2311 info = {}
2312 usebundle2 = bundle2requested(bundlecaps)
2312 usebundle2 = bundle2requested(bundlecaps)
2313 # bundle10 case
2313 # bundle10 case
2314 if not usebundle2:
2314 if not usebundle2:
2315 if bundlecaps and not kwargs.get(b'cg', True):
2315 if bundlecaps and not kwargs.get(b'cg', True):
2316 raise ValueError(
2316 raise ValueError(
2317 _(b'request for bundle10 must include changegroup')
2317 _(b'request for bundle10 must include changegroup')
2318 )
2318 )
2319
2319
2320 if kwargs:
2320 if kwargs:
2321 raise ValueError(
2321 raise ValueError(
2322 _(b'unsupported getbundle arguments: %s')
2322 _(b'unsupported getbundle arguments: %s')
2323 % b', '.join(sorted(kwargs.keys()))
2323 % b', '.join(sorted(kwargs.keys()))
2324 )
2324 )
2325 outgoing = _computeoutgoing(repo, heads, common)
2325 outgoing = _computeoutgoing(repo, heads, common)
2326 info[b'bundleversion'] = 1
2326 info[b'bundleversion'] = 1
2327 return (
2327 return (
2328 info,
2328 info,
2329 changegroup.makestream(
2329 changegroup.makestream(
2330 repo,
2330 repo,
2331 outgoing,
2331 outgoing,
2332 b'01',
2332 b'01',
2333 source,
2333 source,
2334 bundlecaps=bundlecaps,
2334 bundlecaps=bundlecaps,
2335 remote_sidedata=remote_sidedata,
2335 remote_sidedata=remote_sidedata,
2336 ),
2336 ),
2337 )
2337 )
2338
2338
2339 # bundle20 case
2339 # bundle20 case
2340 info[b'bundleversion'] = 2
2340 info[b'bundleversion'] = 2
2341 b2caps = {}
2341 b2caps = {}
2342 for bcaps in bundlecaps:
2342 for bcaps in bundlecaps:
2343 if bcaps.startswith(b'bundle2='):
2343 if bcaps.startswith(b'bundle2='):
2344 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2344 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2345 b2caps.update(bundle2.decodecaps(blob))
2345 b2caps.update(bundle2.decodecaps(blob))
2346 bundler = bundle2.bundle20(repo.ui, b2caps)
2346 bundler = bundle2.bundle20(repo.ui, b2caps)
2347
2347
2348 kwargs[b'heads'] = heads
2348 kwargs[b'heads'] = heads
2349 kwargs[b'common'] = common
2349 kwargs[b'common'] = common
2350
2350
2351 for name in getbundle2partsorder:
2351 for name in getbundle2partsorder:
2352 func = getbundle2partsmapping[name]
2352 func = getbundle2partsmapping[name]
2353 func(
2353 func(
2354 bundler,
2354 bundler,
2355 repo,
2355 repo,
2356 source,
2356 source,
2357 bundlecaps=bundlecaps,
2357 bundlecaps=bundlecaps,
2358 b2caps=b2caps,
2358 b2caps=b2caps,
2359 remote_sidedata=remote_sidedata,
2359 remote_sidedata=remote_sidedata,
2360 **pycompat.strkwargs(kwargs)
2360 **pycompat.strkwargs(kwargs)
2361 )
2361 )
2362
2362
2363 info[b'prefercompressed'] = bundler.prefercompressed
2363 info[b'prefercompressed'] = bundler.prefercompressed
2364
2364
2365 return info, bundler.getchunks()
2365 return info, bundler.getchunks()
2366
2366
2367
2367
2368 @getbundle2partsgenerator(b'stream2')
2368 @getbundle2partsgenerator(b'stream2')
2369 def _getbundlestream2(bundler, repo, *args, **kwargs):
2369 def _getbundlestream2(bundler, repo, *args, **kwargs):
2370 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2370 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2371
2371
2372
2372
2373 @getbundle2partsgenerator(b'changegroup')
2373 @getbundle2partsgenerator(b'changegroup')
2374 def _getbundlechangegrouppart(
2374 def _getbundlechangegrouppart(
2375 bundler,
2375 bundler,
2376 repo,
2376 repo,
2377 source,
2377 source,
2378 bundlecaps=None,
2378 bundlecaps=None,
2379 b2caps=None,
2379 b2caps=None,
2380 heads=None,
2380 heads=None,
2381 common=None,
2381 common=None,
2382 remote_sidedata=None,
2382 remote_sidedata=None,
2383 **kwargs
2383 **kwargs
2384 ):
2384 ):
2385 """add a changegroup part to the requested bundle"""
2385 """add a changegroup part to the requested bundle"""
2386 if not kwargs.get('cg', True) or not b2caps:
2386 if not kwargs.get('cg', True) or not b2caps:
2387 return
2387 return
2388
2388
2389 version = b'01'
2389 version = b'01'
2390 cgversions = b2caps.get(b'changegroup')
2390 cgversions = b2caps.get(b'changegroup')
2391 if cgversions: # 3.1 and 3.2 ship with an empty value
2391 if cgversions: # 3.1 and 3.2 ship with an empty value
2392 cgversions = [
2392 cgversions = [
2393 v
2393 v
2394 for v in cgversions
2394 for v in cgversions
2395 if v in changegroup.supportedoutgoingversions(repo)
2395 if v in changegroup.supportedoutgoingversions(repo)
2396 ]
2396 ]
2397 if not cgversions:
2397 if not cgversions:
2398 raise error.Abort(_(b'no common changegroup version'))
2398 raise error.Abort(_(b'no common changegroup version'))
2399 version = max(cgversions)
2399 version = max(cgversions)
2400
2400
2401 outgoing = _computeoutgoing(repo, heads, common)
2401 outgoing = _computeoutgoing(repo, heads, common)
2402 if not outgoing.missing:
2402 if not outgoing.missing:
2403 return
2403 return
2404
2404
2405 if kwargs.get('narrow', False):
2405 if kwargs.get('narrow', False):
2406 include = sorted(filter(bool, kwargs.get('includepats', [])))
2406 include = sorted(filter(bool, kwargs.get('includepats', [])))
2407 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2407 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2408 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2408 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2409 else:
2409 else:
2410 matcher = None
2410 matcher = None
2411
2411
2412 cgstream = changegroup.makestream(
2412 cgstream = changegroup.makestream(
2413 repo,
2413 repo,
2414 outgoing,
2414 outgoing,
2415 version,
2415 version,
2416 source,
2416 source,
2417 bundlecaps=bundlecaps,
2417 bundlecaps=bundlecaps,
2418 matcher=matcher,
2418 matcher=matcher,
2419 remote_sidedata=remote_sidedata,
2419 remote_sidedata=remote_sidedata,
2420 )
2420 )
2421
2421
2422 part = bundler.newpart(b'changegroup', data=cgstream)
2422 part = bundler.newpart(b'changegroup', data=cgstream)
2423 if cgversions:
2423 if cgversions:
2424 part.addparam(b'version', version)
2424 part.addparam(b'version', version)
2425
2425
2426 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2426 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2427
2427
2428 if scmutil.istreemanifest(repo):
2428 if scmutil.istreemanifest(repo):
2429 part.addparam(b'treemanifest', b'1')
2429 part.addparam(b'treemanifest', b'1')
2430
2430
2431 if b'exp-sidedata-flag' in repo.requirements:
2431 if b'exp-sidedata-flag' in repo.requirements:
2432 part.addparam(b'exp-sidedata', b'1')
2432 part.addparam(b'exp-sidedata', b'1')
2433 sidedata = bundle2.format_remote_wanted_sidedata(repo)
2433 sidedata = bundle2.format_remote_wanted_sidedata(repo)
2434 part.addparam(b'exp-wanted-sidedata', sidedata)
2434 part.addparam(b'exp-wanted-sidedata', sidedata)
2435
2435
2436 if (
2436 if (
2437 kwargs.get('narrow', False)
2437 kwargs.get('narrow', False)
2438 and kwargs.get('narrow_acl', False)
2438 and kwargs.get('narrow_acl', False)
2439 and (include or exclude)
2439 and (include or exclude)
2440 ):
2440 ):
2441 # this is mandatory because otherwise ACL clients won't work
2441 # this is mandatory because otherwise ACL clients won't work
2442 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2442 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2443 narrowspecpart.data = b'%s\0%s' % (
2443 narrowspecpart.data = b'%s\0%s' % (
2444 b'\n'.join(include),
2444 b'\n'.join(include),
2445 b'\n'.join(exclude),
2445 b'\n'.join(exclude),
2446 )
2446 )
2447
2447
2448
2448
2449 @getbundle2partsgenerator(b'bookmarks')
2449 @getbundle2partsgenerator(b'bookmarks')
2450 def _getbundlebookmarkpart(
2450 def _getbundlebookmarkpart(
2451 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2451 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2452 ):
2452 ):
2453 """add a bookmark part to the requested bundle"""
2453 """add a bookmark part to the requested bundle"""
2454 if not kwargs.get('bookmarks', False):
2454 if not kwargs.get('bookmarks', False):
2455 return
2455 return
2456 if not b2caps or b'bookmarks' not in b2caps:
2456 if not b2caps or b'bookmarks' not in b2caps:
2457 raise error.Abort(_(b'no common bookmarks exchange method'))
2457 raise error.Abort(_(b'no common bookmarks exchange method'))
2458 books = bookmod.listbinbookmarks(repo)
2458 books = bookmod.listbinbookmarks(repo)
2459 data = bookmod.binaryencode(repo, books)
2459 data = bookmod.binaryencode(repo, books)
2460 if data:
2460 if data:
2461 bundler.newpart(b'bookmarks', data=data)
2461 bundler.newpart(b'bookmarks', data=data)
2462
2462
2463
2463
2464 @getbundle2partsgenerator(b'listkeys')
2464 @getbundle2partsgenerator(b'listkeys')
2465 def _getbundlelistkeysparts(
2465 def _getbundlelistkeysparts(
2466 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2466 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2467 ):
2467 ):
2468 """add parts containing listkeys namespaces to the requested bundle"""
2468 """add parts containing listkeys namespaces to the requested bundle"""
2469 listkeys = kwargs.get('listkeys', ())
2469 listkeys = kwargs.get('listkeys', ())
2470 for namespace in listkeys:
2470 for namespace in listkeys:
2471 part = bundler.newpart(b'listkeys')
2471 part = bundler.newpart(b'listkeys')
2472 part.addparam(b'namespace', namespace)
2472 part.addparam(b'namespace', namespace)
2473 keys = repo.listkeys(namespace).items()
2473 keys = repo.listkeys(namespace).items()
2474 part.data = pushkey.encodekeys(keys)
2474 part.data = pushkey.encodekeys(keys)
2475
2475
2476
2476
2477 @getbundle2partsgenerator(b'obsmarkers')
2477 @getbundle2partsgenerator(b'obsmarkers')
2478 def _getbundleobsmarkerpart(
2478 def _getbundleobsmarkerpart(
2479 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2479 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2480 ):
2480 ):
2481 """add an obsolescence markers part to the requested bundle"""
2481 """add an obsolescence markers part to the requested bundle"""
2482 if kwargs.get('obsmarkers', False):
2482 if kwargs.get('obsmarkers', False):
2483 if heads is None:
2483 if heads is None:
2484 heads = repo.heads()
2484 heads = repo.heads()
2485 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2485 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2486 markers = repo.obsstore.relevantmarkers(subset)
2486 markers = repo.obsstore.relevantmarkers(subset)
2487 markers = obsutil.sortedmarkers(markers)
2487 markers = obsutil.sortedmarkers(markers)
2488 bundle2.buildobsmarkerspart(bundler, markers)
2488 bundle2.buildobsmarkerspart(bundler, markers)
2489
2489
2490
2490
2491 @getbundle2partsgenerator(b'phases')
2491 @getbundle2partsgenerator(b'phases')
2492 def _getbundlephasespart(
2492 def _getbundlephasespart(
2493 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2493 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2494 ):
2494 ):
2495 """add phase heads part to the requested bundle"""
2495 """add phase heads part to the requested bundle"""
2496 if kwargs.get('phases', False):
2496 if kwargs.get('phases', False):
2497 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2497 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2498 raise error.Abort(_(b'no common phases exchange method'))
2498 raise error.Abort(_(b'no common phases exchange method'))
2499 if heads is None:
2499 if heads is None:
2500 heads = repo.heads()
2500 heads = repo.heads()
2501
2501
2502 headsbyphase = collections.defaultdict(set)
2502 headsbyphase = collections.defaultdict(set)
2503 if repo.publishing():
2503 if repo.publishing():
2504 headsbyphase[phases.public] = heads
2504 headsbyphase[phases.public] = heads
2505 else:
2505 else:
2506 # find the appropriate heads to move
2506 # find the appropriate heads to move
2507
2507
2508 phase = repo._phasecache.phase
2508 phase = repo._phasecache.phase
2509 node = repo.changelog.node
2509 node = repo.changelog.node
2510 rev = repo.changelog.rev
2510 rev = repo.changelog.rev
2511 for h in heads:
2511 for h in heads:
2512 headsbyphase[phase(repo, rev(h))].add(h)
2512 headsbyphase[phase(repo, rev(h))].add(h)
2513 seenphases = list(headsbyphase.keys())
2513 seenphases = list(headsbyphase.keys())
2514
2514
2515 # We do not handle anything but public and draft phase for now)
2515 # We do not handle anything but public and draft phase for now)
2516 if seenphases:
2516 if seenphases:
2517 assert max(seenphases) <= phases.draft
2517 assert max(seenphases) <= phases.draft
2518
2518
2519 # if client is pulling non-public changesets, we need to find
2519 # if client is pulling non-public changesets, we need to find
2520 # intermediate public heads.
2520 # intermediate public heads.
2521 draftheads = headsbyphase.get(phases.draft, set())
2521 draftheads = headsbyphase.get(phases.draft, set())
2522 if draftheads:
2522 if draftheads:
2523 publicheads = headsbyphase.get(phases.public, set())
2523 publicheads = headsbyphase.get(phases.public, set())
2524
2524
2525 revset = b'heads(only(%ln, %ln) and public())'
2525 revset = b'heads(only(%ln, %ln) and public())'
2526 extraheads = repo.revs(revset, draftheads, publicheads)
2526 extraheads = repo.revs(revset, draftheads, publicheads)
2527 for r in extraheads:
2527 for r in extraheads:
2528 headsbyphase[phases.public].add(node(r))
2528 headsbyphase[phases.public].add(node(r))
2529
2529
2530 # transform data in a format used by the encoding function
2530 # transform data in a format used by the encoding function
2531 phasemapping = {
2531 phasemapping = {
2532 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2532 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2533 }
2533 }
2534
2534
2535 # generate the actual part
2535 # generate the actual part
2536 phasedata = phases.binaryencode(phasemapping)
2536 phasedata = phases.binaryencode(phasemapping)
2537 bundler.newpart(b'phase-heads', data=phasedata)
2537 bundler.newpart(b'phase-heads', data=phasedata)
2538
2538
2539
2539
2540 @getbundle2partsgenerator(b'hgtagsfnodes')
2540 @getbundle2partsgenerator(b'hgtagsfnodes')
2541 def _getbundletagsfnodes(
2541 def _getbundletagsfnodes(
2542 bundler,
2542 bundler,
2543 repo,
2543 repo,
2544 source,
2544 source,
2545 bundlecaps=None,
2545 bundlecaps=None,
2546 b2caps=None,
2546 b2caps=None,
2547 heads=None,
2547 heads=None,
2548 common=None,
2548 common=None,
2549 **kwargs
2549 **kwargs
2550 ):
2550 ):
2551 """Transfer the .hgtags filenodes mapping.
2551 """Transfer the .hgtags filenodes mapping.
2552
2552
2553 Only values for heads in this bundle will be transferred.
2553 Only values for heads in this bundle will be transferred.
2554
2554
2555 The part data consists of pairs of 20 byte changeset node and .hgtags
2555 The part data consists of pairs of 20 byte changeset node and .hgtags
2556 filenodes raw values.
2556 filenodes raw values.
2557 """
2557 """
2558 # Don't send unless:
2558 # Don't send unless:
2559 # - changeset are being exchanged,
2559 # - changeset are being exchanged,
2560 # - the client supports it.
2560 # - the client supports it.
2561 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2561 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2562 return
2562 return
2563
2563
2564 outgoing = _computeoutgoing(repo, heads, common)
2564 outgoing = _computeoutgoing(repo, heads, common)
2565 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2565 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2566
2566
2567
2567
2568 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2568 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2569 def _getbundlerevbranchcache(
2569 def _getbundlerevbranchcache(
2570 bundler,
2570 bundler,
2571 repo,
2571 repo,
2572 source,
2572 source,
2573 bundlecaps=None,
2573 bundlecaps=None,
2574 b2caps=None,
2574 b2caps=None,
2575 heads=None,
2575 heads=None,
2576 common=None,
2576 common=None,
2577 **kwargs
2577 **kwargs
2578 ):
2578 ):
2579 """Transfer the rev-branch-cache mapping
2579 """Transfer the rev-branch-cache mapping
2580
2580
2581 The payload is a series of data related to each branch
2581 The payload is a series of data related to each branch
2582
2582
2583 1) branch name length
2583 1) branch name length
2584 2) number of open heads
2584 2) number of open heads
2585 3) number of closed heads
2585 3) number of closed heads
2586 4) open heads nodes
2586 4) open heads nodes
2587 5) closed heads nodes
2587 5) closed heads nodes
2588 """
2588 """
2589 # Don't send unless:
2589 # Don't send unless:
2590 # - changeset are being exchanged,
2590 # - changeset are being exchanged,
2591 # - the client supports it.
2591 # - the client supports it.
2592 # - narrow bundle isn't in play (not currently compatible).
2592 # - narrow bundle isn't in play (not currently compatible).
2593 if (
2593 if (
2594 not kwargs.get('cg', True)
2594 not kwargs.get('cg', True)
2595 or not b2caps
2595 or not b2caps
2596 or b'rev-branch-cache' not in b2caps
2596 or b'rev-branch-cache' not in b2caps
2597 or kwargs.get('narrow', False)
2597 or kwargs.get('narrow', False)
2598 or repo.ui.has_section(_NARROWACL_SECTION)
2598 or repo.ui.has_section(_NARROWACL_SECTION)
2599 ):
2599 ):
2600 return
2600 return
2601
2601
2602 outgoing = _computeoutgoing(repo, heads, common)
2602 outgoing = _computeoutgoing(repo, heads, common)
2603 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2603 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2604
2604
2605
2605
2606 def check_heads(repo, their_heads, context):
2606 def check_heads(repo, their_heads, context):
2607 """check if the heads of a repo have been modified
2607 """check if the heads of a repo have been modified
2608
2608
2609 Used by peer for unbundling.
2609 Used by peer for unbundling.
2610 """
2610 """
2611 heads = repo.heads()
2611 heads = repo.heads()
2612 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2612 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2613 if not (
2613 if not (
2614 their_heads == [b'force']
2614 their_heads == [b'force']
2615 or their_heads == heads
2615 or their_heads == heads
2616 or their_heads == [b'hashed', heads_hash]
2616 or their_heads == [b'hashed', heads_hash]
2617 ):
2617 ):
2618 # someone else committed/pushed/unbundled while we
2618 # someone else committed/pushed/unbundled while we
2619 # were transferring data
2619 # were transferring data
2620 raise error.PushRaced(
2620 raise error.PushRaced(
2621 b'repository changed while %s - please try again' % context
2621 b'repository changed while %s - please try again' % context
2622 )
2622 )
2623
2623
2624
2624
2625 def unbundle(repo, cg, heads, source, url):
2625 def unbundle(repo, cg, heads, source, url):
2626 """Apply a bundle to a repo.
2626 """Apply a bundle to a repo.
2627
2627
2628 this function makes sure the repo is locked during the application and have
2628 this function makes sure the repo is locked during the application and have
2629 mechanism to check that no push race occurred between the creation of the
2629 mechanism to check that no push race occurred between the creation of the
2630 bundle and its application.
2630 bundle and its application.
2631
2631
2632 If the push was raced as PushRaced exception is raised."""
2632 If the push was raced as PushRaced exception is raised."""
2633 r = 0
2633 r = 0
2634 # need a transaction when processing a bundle2 stream
2634 # need a transaction when processing a bundle2 stream
2635 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2635 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2636 lockandtr = [None, None, None]
2636 lockandtr = [None, None, None]
2637 recordout = None
2637 recordout = None
2638 # quick fix for output mismatch with bundle2 in 3.4
2638 # quick fix for output mismatch with bundle2 in 3.4
2639 captureoutput = repo.ui.configbool(
2639 captureoutput = repo.ui.configbool(
2640 b'experimental', b'bundle2-output-capture'
2640 b'experimental', b'bundle2-output-capture'
2641 )
2641 )
2642 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2642 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2643 captureoutput = True
2643 captureoutput = True
2644 try:
2644 try:
2645 # note: outside bundle1, 'heads' is expected to be empty and this
2645 # note: outside bundle1, 'heads' is expected to be empty and this
2646 # 'check_heads' call wil be a no-op
2646 # 'check_heads' call wil be a no-op
2647 check_heads(repo, heads, b'uploading changes')
2647 check_heads(repo, heads, b'uploading changes')
2648 # push can proceed
2648 # push can proceed
2649 if not isinstance(cg, bundle2.unbundle20):
2649 if not isinstance(cg, bundle2.unbundle20):
2650 # legacy case: bundle1 (changegroup 01)
2650 # legacy case: bundle1 (changegroup 01)
2651 txnname = b"\n".join([source, urlutil.hidepassword(url)])
2651 txnname = b"\n".join([source, urlutil.hidepassword(url)])
2652 with repo.lock(), repo.transaction(txnname) as tr:
2652 with repo.lock(), repo.transaction(txnname) as tr:
2653 op = bundle2.applybundle(repo, cg, tr, source, url)
2653 op = bundle2.applybundle(repo, cg, tr, source, url)
2654 r = bundle2.combinechangegroupresults(op)
2654 r = bundle2.combinechangegroupresults(op)
2655 else:
2655 else:
2656 r = None
2656 r = None
2657 try:
2657 try:
2658
2658
2659 def gettransaction():
2659 def gettransaction():
2660 if not lockandtr[2]:
2660 if not lockandtr[2]:
2661 if not bookmod.bookmarksinstore(repo):
2661 if not bookmod.bookmarksinstore(repo):
2662 lockandtr[0] = repo.wlock()
2662 lockandtr[0] = repo.wlock()
2663 lockandtr[1] = repo.lock()
2663 lockandtr[1] = repo.lock()
2664 lockandtr[2] = repo.transaction(source)
2664 lockandtr[2] = repo.transaction(source)
2665 lockandtr[2].hookargs[b'source'] = source
2665 lockandtr[2].hookargs[b'source'] = source
2666 lockandtr[2].hookargs[b'url'] = url
2666 lockandtr[2].hookargs[b'url'] = url
2667 lockandtr[2].hookargs[b'bundle2'] = b'1'
2667 lockandtr[2].hookargs[b'bundle2'] = b'1'
2668 return lockandtr[2]
2668 return lockandtr[2]
2669
2669
2670 # Do greedy locking by default until we're satisfied with lazy
2670 # Do greedy locking by default until we're satisfied with lazy
2671 # locking.
2671 # locking.
2672 if not repo.ui.configbool(
2672 if not repo.ui.configbool(
2673 b'experimental', b'bundle2lazylocking'
2673 b'experimental', b'bundle2lazylocking'
2674 ):
2674 ):
2675 gettransaction()
2675 gettransaction()
2676
2676
2677 op = bundle2.bundleoperation(
2677 op = bundle2.bundleoperation(
2678 repo,
2678 repo,
2679 gettransaction,
2679 gettransaction,
2680 captureoutput=captureoutput,
2680 captureoutput=captureoutput,
2681 source=b'push',
2681 source=b'push',
2682 )
2682 )
2683 try:
2683 try:
2684 op = bundle2.processbundle(repo, cg, op=op)
2684 op = bundle2.processbundle(repo, cg, op=op)
2685 finally:
2685 finally:
2686 r = op.reply
2686 r = op.reply
2687 if captureoutput and r is not None:
2687 if captureoutput and r is not None:
2688 repo.ui.pushbuffer(error=True, subproc=True)
2688 repo.ui.pushbuffer(error=True, subproc=True)
2689
2689
2690 def recordout(output):
2690 def recordout(output):
2691 r.newpart(b'output', data=output, mandatory=False)
2691 r.newpart(b'output', data=output, mandatory=False)
2692
2692
2693 if lockandtr[2] is not None:
2693 if lockandtr[2] is not None:
2694 lockandtr[2].close()
2694 lockandtr[2].close()
2695 except BaseException as exc:
2695 except BaseException as exc:
2696 exc.duringunbundle2 = True
2696 exc.duringunbundle2 = True
2697 if captureoutput and r is not None:
2697 if captureoutput and r is not None:
2698 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2698 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2699
2699
2700 def recordout(output):
2700 def recordout(output):
2701 part = bundle2.bundlepart(
2701 part = bundle2.bundlepart(
2702 b'output', data=output, mandatory=False
2702 b'output', data=output, mandatory=False
2703 )
2703 )
2704 parts.append(part)
2704 parts.append(part)
2705
2705
2706 raise
2706 raise
2707 finally:
2707 finally:
2708 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2708 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2709 if recordout is not None:
2709 if recordout is not None:
2710 recordout(repo.ui.popbuffer())
2710 recordout(repo.ui.popbuffer())
2711 return r
2711 return r
2712
2712
2713
2713
2714 def _maybeapplyclonebundle(pullop):
2714 def _maybeapplyclonebundle(pullop):
2715 """Apply a clone bundle from a remote, if possible."""
2715 """Apply a clone bundle from a remote, if possible."""
2716
2716
2717 repo = pullop.repo
2717 repo = pullop.repo
2718 remote = pullop.remote
2718 remote = pullop.remote
2719
2719
2720 if not repo.ui.configbool(b'ui', b'clonebundles'):
2720 if not repo.ui.configbool(b'ui', b'clonebundles'):
2721 return
2721 return
2722
2722
2723 # Only run if local repo is empty.
2723 # Only run if local repo is empty.
2724 if len(repo):
2724 if len(repo):
2725 return
2725 return
2726
2726
2727 if pullop.heads:
2727 if pullop.heads:
2728 return
2728 return
2729
2729
2730 if not remote.capable(b'clonebundles'):
2730 if not remote.capable(b'clonebundles'):
2731 return
2731 return
2732
2732
2733 with remote.commandexecutor() as e:
2733 with remote.commandexecutor() as e:
2734 res = e.callcommand(b'clonebundles', {}).result()
2734 res = e.callcommand(b'clonebundles', {}).result()
2735
2735
2736 # If we call the wire protocol command, that's good enough to record the
2736 # If we call the wire protocol command, that's good enough to record the
2737 # attempt.
2737 # attempt.
2738 pullop.clonebundleattempted = True
2738 pullop.clonebundleattempted = True
2739
2739
2740 entries = bundlecaches.parseclonebundlesmanifest(repo, res)
2740 entries = bundlecaches.parseclonebundlesmanifest(repo, res)
2741 if not entries:
2741 if not entries:
2742 repo.ui.note(
2742 repo.ui.note(
2743 _(
2743 _(
2744 b'no clone bundles available on remote; '
2744 b'no clone bundles available on remote; '
2745 b'falling back to regular clone\n'
2745 b'falling back to regular clone\n'
2746 )
2746 )
2747 )
2747 )
2748 return
2748 return
2749
2749
2750 entries = bundlecaches.filterclonebundleentries(
2750 entries = bundlecaches.filterclonebundleentries(
2751 repo, entries, streamclonerequested=pullop.streamclonerequested
2751 repo, entries, streamclonerequested=pullop.streamclonerequested
2752 )
2752 )
2753
2753
2754 if not entries:
2754 if not entries:
2755 # There is a thundering herd concern here. However, if a server
2755 # There is a thundering herd concern here. However, if a server
2756 # operator doesn't advertise bundles appropriate for its clients,
2756 # operator doesn't advertise bundles appropriate for its clients,
2757 # they deserve what's coming. Furthermore, from a client's
2757 # they deserve what's coming. Furthermore, from a client's
2758 # perspective, no automatic fallback would mean not being able to
2758 # perspective, no automatic fallback would mean not being able to
2759 # clone!
2759 # clone!
2760 repo.ui.warn(
2760 repo.ui.warn(
2761 _(
2761 _(
2762 b'no compatible clone bundles available on server; '
2762 b'no compatible clone bundles available on server; '
2763 b'falling back to regular clone\n'
2763 b'falling back to regular clone\n'
2764 )
2764 )
2765 )
2765 )
2766 repo.ui.warn(
2766 repo.ui.warn(
2767 _(b'(you may want to report this to the server operator)\n')
2767 _(b'(you may want to report this to the server operator)\n')
2768 )
2768 )
2769 return
2769 return
2770
2770
2771 entries = bundlecaches.sortclonebundleentries(repo.ui, entries)
2771 entries = bundlecaches.sortclonebundleentries(repo.ui, entries)
2772
2772
2773 url = entries[0][b'URL']
2773 url = entries[0][b'URL']
2774 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2774 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2775 if trypullbundlefromurl(repo.ui, repo, url):
2775 if trypullbundlefromurl(repo.ui, repo, url):
2776 repo.ui.status(_(b'finished applying clone bundle\n'))
2776 repo.ui.status(_(b'finished applying clone bundle\n'))
2777 # Bundle failed.
2777 # Bundle failed.
2778 #
2778 #
2779 # We abort by default to avoid the thundering herd of
2779 # We abort by default to avoid the thundering herd of
2780 # clients flooding a server that was expecting expensive
2780 # clients flooding a server that was expecting expensive
2781 # clone load to be offloaded.
2781 # clone load to be offloaded.
2782 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2782 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2783 repo.ui.warn(_(b'falling back to normal clone\n'))
2783 repo.ui.warn(_(b'falling back to normal clone\n'))
2784 else:
2784 else:
2785 raise error.Abort(
2785 raise error.Abort(
2786 _(b'error applying bundle'),
2786 _(b'error applying bundle'),
2787 hint=_(
2787 hint=_(
2788 b'if this error persists, consider contacting '
2788 b'if this error persists, consider contacting '
2789 b'the server operator or disable clone '
2789 b'the server operator or disable clone '
2790 b'bundles via '
2790 b'bundles via '
2791 b'"--config ui.clonebundles=false"'
2791 b'"--config ui.clonebundles=false"'
2792 ),
2792 ),
2793 )
2793 )
2794
2794
2795
2795
2796 def trypullbundlefromurl(ui, repo, url):
2796 def trypullbundlefromurl(ui, repo, url):
2797 """Attempt to apply a bundle from a URL."""
2797 """Attempt to apply a bundle from a URL."""
2798 with repo.lock(), repo.transaction(b'bundleurl') as tr:
2798 with repo.lock(), repo.transaction(b'bundleurl') as tr:
2799 try:
2799 try:
2800 fh = urlmod.open(ui, url)
2800 fh = urlmod.open(ui, url)
2801 cg = readbundle(ui, fh, b'stream')
2801 cg = readbundle(ui, fh, b'stream')
2802
2802
2803 if isinstance(cg, streamclone.streamcloneapplier):
2803 if isinstance(cg, streamclone.streamcloneapplier):
2804 cg.apply(repo)
2804 cg.apply(repo)
2805 else:
2805 else:
2806 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
2806 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
2807 return True
2807 return True
2808 except urlerr.httperror as e:
2808 except urlerr.httperror as e:
2809 ui.warn(
2809 ui.warn(
2810 _(b'HTTP error fetching bundle: %s\n')
2810 _(b'HTTP error fetching bundle: %s\n')
2811 % stringutil.forcebytestr(e)
2811 % stringutil.forcebytestr(e)
2812 )
2812 )
2813 except urlerr.urlerror as e:
2813 except urlerr.urlerror as e:
2814 ui.warn(
2814 ui.warn(
2815 _(b'error fetching bundle: %s\n')
2815 _(b'error fetching bundle: %s\n')
2816 % stringutil.forcebytestr(e.reason)
2816 % stringutil.forcebytestr(e.reason)
2817 )
2817 )
2818
2818
2819 return False
2819 return False
@@ -1,1396 +1,1396 b''
1 #testcases b2-pushkey b2-binary
1 #testcases b2-pushkey b2-binary
2
2
3 #if b2-pushkey
3 #if b2-pushkey
4 $ cat << EOF >> $HGRCPATH
4 $ cat << EOF >> $HGRCPATH
5 > [devel]
5 > [devel]
6 > legacy.exchange=bookmarks
6 > legacy.exchange=bookmarks
7 > EOF
7 > EOF
8 #endif
8 #endif
9
9
10 #require serve
10 #require serve
11
11
12 $ cat << EOF >> $HGRCPATH
12 $ cat << EOF >> $HGRCPATH
13 > [command-templates]
13 > [command-templates]
14 > log={rev}:{node|short} {desc|firstline}
14 > log={rev}:{node|short} {desc|firstline}
15 > [phases]
15 > [phases]
16 > publish=False
16 > publish=False
17 > [experimental]
17 > [experimental]
18 > evolution.createmarkers=True
18 > evolution.createmarkers=True
19 > evolution.exchange=True
19 > evolution.exchange=True
20 > EOF
20 > EOF
21
21
22 $ cat > $TESTTMP/hook.sh <<'EOF'
22 $ cat > $TESTTMP/hook.sh <<'EOF'
23 > echo "test-hook-bookmark: $HG_BOOKMARK: $HG_OLDNODE -> $HG_NODE"
23 > echo "test-hook-bookmark: $HG_BOOKMARK: $HG_OLDNODE -> $HG_NODE"
24 > EOF
24 > EOF
25 $ TESTHOOK="hooks.txnclose-bookmark.test=sh $TESTTMP/hook.sh"
25 $ TESTHOOK="hooks.txnclose-bookmark.test=sh $TESTTMP/hook.sh"
26
26
27 initialize
27 initialize
28
28
29 $ hg init a
29 $ hg init a
30 $ cd a
30 $ cd a
31 $ echo 'test' > test
31 $ echo 'test' > test
32 $ hg commit -Am'test'
32 $ hg commit -Am'test'
33 adding test
33 adding test
34
34
35 set bookmarks
35 set bookmarks
36
36
37 $ hg bookmark X
37 $ hg bookmark X
38 $ hg bookmark Y
38 $ hg bookmark Y
39 $ hg bookmark Z
39 $ hg bookmark Z
40
40
41 import bookmark by name
41 import bookmark by name
42
42
43 $ hg init ../b
43 $ hg init ../b
44 $ cd ../b
44 $ cd ../b
45 $ hg book Y
45 $ hg book Y
46 $ hg book
46 $ hg book
47 * Y -1:000000000000
47 * Y -1:000000000000
48 $ hg pull ../a --config "$TESTHOOK"
48 $ hg pull ../a --config "$TESTHOOK"
49 pulling from ../a
49 pulling from ../a
50 requesting all changes
50 requesting all changes
51 adding changesets
51 adding changesets
52 adding manifests
52 adding manifests
53 adding file changes
53 adding file changes
54 adding remote bookmark X
54 adding remote bookmark X
55 updating bookmark Y
55 updating bookmark Y
56 adding remote bookmark Z
56 adding remote bookmark Z
57 added 1 changesets with 1 changes to 1 files
57 added 1 changesets with 1 changes to 1 files
58 new changesets 4e3505fd9583 (1 drafts)
58 new changesets 4e3505fd9583 (1 drafts)
59 test-hook-bookmark: X: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
59 test-hook-bookmark: X: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
60 test-hook-bookmark: Y: 0000000000000000000000000000000000000000 -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
60 test-hook-bookmark: Y: 0000000000000000000000000000000000000000 -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
61 test-hook-bookmark: Z: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
61 test-hook-bookmark: Z: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
62 (run 'hg update' to get a working copy)
62 (run 'hg update' to get a working copy)
63 $ hg bookmarks
63 $ hg bookmarks
64 X 0:4e3505fd9583
64 X 0:4e3505fd9583
65 * Y 0:4e3505fd9583
65 * Y 0:4e3505fd9583
66 Z 0:4e3505fd9583
66 Z 0:4e3505fd9583
67 $ hg debugpushkey ../a namespaces
67 $ hg debugpushkey ../a namespaces
68 bookmarks
68 bookmarks
69 namespaces
69 namespaces
70 obsolete
70 obsolete
71 phases
71 phases
72 $ hg debugpushkey ../a bookmarks
72 $ hg debugpushkey ../a bookmarks
73 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
73 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
74 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
74 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
75 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
75 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
76
76
77 delete the bookmark to re-pull it
77 delete the bookmark to re-pull it
78
78
79 $ hg book -d X
79 $ hg book -d X
80 $ hg pull -B X ../a
80 $ hg pull -B X ../a
81 pulling from ../a
81 pulling from ../a
82 no changes found
82 no changes found
83 adding remote bookmark X
83 adding remote bookmark X
84
84
85 finally no-op pull
85 finally no-op pull
86
86
87 $ hg pull -B X ../a
87 $ hg pull -B X ../a
88 pulling from ../a
88 pulling from ../a
89 no changes found
89 no changes found
90 $ hg bookmark
90 $ hg bookmark
91 X 0:4e3505fd9583
91 X 0:4e3505fd9583
92 * Y 0:4e3505fd9583
92 * Y 0:4e3505fd9583
93 Z 0:4e3505fd9583
93 Z 0:4e3505fd9583
94
94
95 export bookmark by name
95 export bookmark by name
96
96
97 $ hg bookmark W
97 $ hg bookmark W
98 $ hg bookmark foo
98 $ hg bookmark foo
99 $ hg bookmark foobar
99 $ hg bookmark foobar
100 $ hg push -B W ../a
100 $ hg push -B W ../a
101 pushing to ../a
101 pushing to ../a
102 searching for changes
102 searching for changes
103 no changes found
103 no changes found
104 exporting bookmark W
104 exporting bookmark W
105 [1]
105 [1]
106 $ hg -R ../a bookmarks
106 $ hg -R ../a bookmarks
107 W -1:000000000000
107 W -1:000000000000
108 X 0:4e3505fd9583
108 X 0:4e3505fd9583
109 Y 0:4e3505fd9583
109 Y 0:4e3505fd9583
110 * Z 0:4e3505fd9583
110 * Z 0:4e3505fd9583
111
111
112 delete a remote bookmark
112 delete a remote bookmark
113
113
114 $ hg book -d W
114 $ hg book -d W
115
115
116 #if b2-pushkey
116 #if b2-pushkey
117
117
118 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
118 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
119 pushing to ../a
119 pushing to ../a
120 query 1; heads
120 query 1; heads
121 searching for changes
121 searching for changes
122 all remote heads known locally
122 all remote heads known locally
123 listing keys for "phases"
123 listing keys for "phases"
124 checking for updated bookmarks
124 checking for updated bookmarks
125 listing keys for "bookmarks"
125 listing keys for "bookmarks"
126 no changes found
126 no changes found
127 bundle2-output-bundle: "HG20", 4 parts total
127 bundle2-output-bundle: "HG20", 4 parts total
128 bundle2-output: start emission of HG20 stream
128 bundle2-output: start emission of HG20 stream
129 bundle2-output: bundle parameter:
129 bundle2-output: bundle parameter:
130 bundle2-output: start of parts
130 bundle2-output: start of parts
131 bundle2-output: bundle part: "replycaps"
131 bundle2-output: bundle part: "replycaps"
132 bundle2-output-part: "replycaps" 224 bytes payload
132 bundle2-output-part: "replycaps" 224 bytes payload
133 bundle2-output: part 0: "REPLYCAPS"
133 bundle2-output: part 0: "REPLYCAPS"
134 bundle2-output: header chunk size: 16
134 bundle2-output: header chunk size: 16
135 bundle2-output: payload chunk size: 224
135 bundle2-output: payload chunk size: 224
136 bundle2-output: closing payload chunk
136 bundle2-output: closing payload chunk
137 bundle2-output: bundle part: "check:bookmarks"
137 bundle2-output: bundle part: "check:bookmarks"
138 bundle2-output-part: "check:bookmarks" 23 bytes payload
138 bundle2-output-part: "check:bookmarks" 23 bytes payload
139 bundle2-output: part 1: "CHECK:BOOKMARKS"
139 bundle2-output: part 1: "CHECK:BOOKMARKS"
140 bundle2-output: header chunk size: 22
140 bundle2-output: header chunk size: 22
141 bundle2-output: payload chunk size: 23
141 bundle2-output: payload chunk size: 23
142 bundle2-output: closing payload chunk
142 bundle2-output: closing payload chunk
143 bundle2-output: bundle part: "check:phases"
143 bundle2-output: bundle part: "check:phases"
144 bundle2-output-part: "check:phases" 24 bytes payload
144 bundle2-output-part: "check:phases" 24 bytes payload
145 bundle2-output: part 2: "CHECK:PHASES"
145 bundle2-output: part 2: "CHECK:PHASES"
146 bundle2-output: header chunk size: 19
146 bundle2-output: header chunk size: 19
147 bundle2-output: payload chunk size: 24
147 bundle2-output: payload chunk size: 24
148 bundle2-output: closing payload chunk
148 bundle2-output: closing payload chunk
149 bundle2-output: bundle part: "pushkey"
149 bundle2-output: bundle part: "pushkey"
150 bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
150 bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
151 bundle2-output: part 3: "PUSHKEY"
151 bundle2-output: part 3: "PUSHKEY"
152 bundle2-output: header chunk size: 90
152 bundle2-output: header chunk size: 90
153 bundle2-output: closing payload chunk
153 bundle2-output: closing payload chunk
154 bundle2-output: end of bundle
154 bundle2-output: end of bundle
155 bundle2-input: start processing of HG20 stream
155 bundle2-input: start processing of HG20 stream
156 bundle2-input: reading bundle2 stream parameters
156 bundle2-input: reading bundle2 stream parameters
157 bundle2-input-bundle: with-transaction
157 bundle2-input-bundle: with-transaction
158 bundle2-input: start extraction of bundle2 parts
158 bundle2-input: start extraction of bundle2 parts
159 bundle2-input: part header size: 16
159 bundle2-input: part header size: 16
160 bundle2-input: part type: "REPLYCAPS"
160 bundle2-input: part type: "REPLYCAPS"
161 bundle2-input: part id: "0"
161 bundle2-input: part id: "0"
162 bundle2-input: part parameters: 0
162 bundle2-input: part parameters: 0
163 bundle2-input: found a handler for part replycaps
163 bundle2-input: found a handler for part replycaps
164 bundle2-input-part: "replycaps" supported
164 bundle2-input-part: "replycaps" supported
165 bundle2-input: payload chunk size: 224
165 bundle2-input: payload chunk size: 224
166 bundle2-input: payload chunk size: 0
166 bundle2-input: payload chunk size: 0
167 bundle2-input-part: total payload size 224
167 bundle2-input-part: total payload size 224
168 bundle2-input: part header size: 22
168 bundle2-input: part header size: 22
169 bundle2-input: part type: "CHECK:BOOKMARKS"
169 bundle2-input: part type: "CHECK:BOOKMARKS"
170 bundle2-input: part id: "1"
170 bundle2-input: part id: "1"
171 bundle2-input: part parameters: 0
171 bundle2-input: part parameters: 0
172 bundle2-input: found a handler for part check:bookmarks
172 bundle2-input: found a handler for part check:bookmarks
173 bundle2-input-part: "check:bookmarks" supported
173 bundle2-input-part: "check:bookmarks" supported
174 bundle2-input: payload chunk size: 23
174 bundle2-input: payload chunk size: 23
175 bundle2-input: payload chunk size: 0
175 bundle2-input: payload chunk size: 0
176 bundle2-input-part: total payload size 23
176 bundle2-input-part: total payload size 23
177 bundle2-input: part header size: 19
177 bundle2-input: part header size: 19
178 bundle2-input: part type: "CHECK:PHASES"
178 bundle2-input: part type: "CHECK:PHASES"
179 bundle2-input: part id: "2"
179 bundle2-input: part id: "2"
180 bundle2-input: part parameters: 0
180 bundle2-input: part parameters: 0
181 bundle2-input: found a handler for part check:phases
181 bundle2-input: found a handler for part check:phases
182 bundle2-input-part: "check:phases" supported
182 bundle2-input-part: "check:phases" supported
183 bundle2-input: payload chunk size: 24
183 bundle2-input: payload chunk size: 24
184 bundle2-input: payload chunk size: 0
184 bundle2-input: payload chunk size: 0
185 bundle2-input-part: total payload size 24
185 bundle2-input-part: total payload size 24
186 bundle2-input: part header size: 90
186 bundle2-input: part header size: 90
187 bundle2-input: part type: "PUSHKEY"
187 bundle2-input: part type: "PUSHKEY"
188 bundle2-input: part id: "3"
188 bundle2-input: part id: "3"
189 bundle2-input: part parameters: 4
189 bundle2-input: part parameters: 4
190 bundle2-input: found a handler for part pushkey
190 bundle2-input: found a handler for part pushkey
191 bundle2-input-part: "pushkey" (params: 4 mandatory) supported
191 bundle2-input-part: "pushkey" (params: 4 mandatory) supported
192 pushing key for "bookmarks:W"
192 pushing key for "bookmarks:W"
193 bundle2-input: payload chunk size: 0
193 bundle2-input: payload chunk size: 0
194 bundle2-input: part header size: 0
194 bundle2-input: part header size: 0
195 bundle2-input: end of bundle2 stream
195 bundle2-input: end of bundle2 stream
196 bundle2-input-bundle: 4 parts total
196 bundle2-input-bundle: 4 parts total
197 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
197 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
198 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
198 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
199 bundle2-output-bundle: "HG20", 1 parts total
199 bundle2-output-bundle: "HG20", 1 parts total
200 bundle2-output: start emission of HG20 stream
200 bundle2-output: start emission of HG20 stream
201 bundle2-output: bundle parameter:
201 bundle2-output: bundle parameter:
202 bundle2-output: start of parts
202 bundle2-output: start of parts
203 bundle2-output: bundle part: "reply:pushkey"
203 bundle2-output: bundle part: "reply:pushkey"
204 bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
204 bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
205 bundle2-output: part 0: "REPLY:PUSHKEY"
205 bundle2-output: part 0: "REPLY:PUSHKEY"
206 bundle2-output: header chunk size: 43
206 bundle2-output: header chunk size: 43
207 bundle2-output: closing payload chunk
207 bundle2-output: closing payload chunk
208 bundle2-output: end of bundle
208 bundle2-output: end of bundle
209 bundle2-input: start processing of HG20 stream
209 bundle2-input: start processing of HG20 stream
210 bundle2-input: reading bundle2 stream parameters
210 bundle2-input: reading bundle2 stream parameters
211 bundle2-input-bundle: no-transaction
211 bundle2-input-bundle: no-transaction
212 bundle2-input: start extraction of bundle2 parts
212 bundle2-input: start extraction of bundle2 parts
213 bundle2-input: part header size: 43
213 bundle2-input: part header size: 43
214 bundle2-input: part type: "REPLY:PUSHKEY"
214 bundle2-input: part type: "REPLY:PUSHKEY"
215 bundle2-input: part id: "0"
215 bundle2-input: part id: "0"
216 bundle2-input: part parameters: 2
216 bundle2-input: part parameters: 2
217 bundle2-input: found a handler for part reply:pushkey
217 bundle2-input: found a handler for part reply:pushkey
218 bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
218 bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
219 bundle2-input: payload chunk size: 0
219 bundle2-input: payload chunk size: 0
220 bundle2-input: part header size: 0
220 bundle2-input: part header size: 0
221 bundle2-input: end of bundle2 stream
221 bundle2-input: end of bundle2 stream
222 bundle2-input-bundle: 1 parts total
222 bundle2-input-bundle: 1 parts total
223 deleting remote bookmark W
223 deleting remote bookmark W
224 listing keys for "phases"
224 listing keys for "phases"
225 [1]
225 [1]
226
226
227 #endif
227 #endif
228 #if b2-binary
228 #if b2-binary
229
229
230 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
230 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
231 pushing to ../a
231 pushing to ../a
232 query 1; heads
232 query 1; heads
233 searching for changes
233 searching for changes
234 all remote heads known locally
234 all remote heads known locally
235 listing keys for "phases"
235 listing keys for "phases"
236 checking for updated bookmarks
236 checking for updated bookmarks
237 listing keys for "bookmarks"
237 listing keys for "bookmarks"
238 no changes found
238 no changes found
239 bundle2-output-bundle: "HG20", 4 parts total
239 bundle2-output-bundle: "HG20", 4 parts total
240 bundle2-output: start emission of HG20 stream
240 bundle2-output: start emission of HG20 stream
241 bundle2-output: bundle parameter:
241 bundle2-output: bundle parameter:
242 bundle2-output: start of parts
242 bundle2-output: start of parts
243 bundle2-output: bundle part: "replycaps"
243 bundle2-output: bundle part: "replycaps"
244 bundle2-output-part: "replycaps" 224 bytes payload
244 bundle2-output-part: "replycaps" 224 bytes payload
245 bundle2-output: part 0: "REPLYCAPS"
245 bundle2-output: part 0: "REPLYCAPS"
246 bundle2-output: header chunk size: 16
246 bundle2-output: header chunk size: 16
247 bundle2-output: payload chunk size: 224
247 bundle2-output: payload chunk size: 224
248 bundle2-output: closing payload chunk
248 bundle2-output: closing payload chunk
249 bundle2-output: bundle part: "check:bookmarks"
249 bundle2-output: bundle part: "check:bookmarks"
250 bundle2-output-part: "check:bookmarks" 23 bytes payload
250 bundle2-output-part: "check:bookmarks" 23 bytes payload
251 bundle2-output: part 1: "CHECK:BOOKMARKS"
251 bundle2-output: part 1: "CHECK:BOOKMARKS"
252 bundle2-output: header chunk size: 22
252 bundle2-output: header chunk size: 22
253 bundle2-output: payload chunk size: 23
253 bundle2-output: payload chunk size: 23
254 bundle2-output: closing payload chunk
254 bundle2-output: closing payload chunk
255 bundle2-output: bundle part: "check:phases"
255 bundle2-output: bundle part: "check:phases"
256 bundle2-output-part: "check:phases" 24 bytes payload
256 bundle2-output-part: "check:phases" 24 bytes payload
257 bundle2-output: part 2: "CHECK:PHASES"
257 bundle2-output: part 2: "CHECK:PHASES"
258 bundle2-output: header chunk size: 19
258 bundle2-output: header chunk size: 19
259 bundle2-output: payload chunk size: 24
259 bundle2-output: payload chunk size: 24
260 bundle2-output: closing payload chunk
260 bundle2-output: closing payload chunk
261 bundle2-output: bundle part: "bookmarks"
261 bundle2-output: bundle part: "bookmarks"
262 bundle2-output-part: "bookmarks" 23 bytes payload
262 bundle2-output-part: "bookmarks" 23 bytes payload
263 bundle2-output: part 3: "BOOKMARKS"
263 bundle2-output: part 3: "BOOKMARKS"
264 bundle2-output: header chunk size: 16
264 bundle2-output: header chunk size: 16
265 bundle2-output: payload chunk size: 23
265 bundle2-output: payload chunk size: 23
266 bundle2-output: closing payload chunk
266 bundle2-output: closing payload chunk
267 bundle2-output: end of bundle
267 bundle2-output: end of bundle
268 bundle2-input: start processing of HG20 stream
268 bundle2-input: start processing of HG20 stream
269 bundle2-input: reading bundle2 stream parameters
269 bundle2-input: reading bundle2 stream parameters
270 bundle2-input-bundle: with-transaction
270 bundle2-input-bundle: with-transaction
271 bundle2-input: start extraction of bundle2 parts
271 bundle2-input: start extraction of bundle2 parts
272 bundle2-input: part header size: 16
272 bundle2-input: part header size: 16
273 bundle2-input: part type: "REPLYCAPS"
273 bundle2-input: part type: "REPLYCAPS"
274 bundle2-input: part id: "0"
274 bundle2-input: part id: "0"
275 bundle2-input: part parameters: 0
275 bundle2-input: part parameters: 0
276 bundle2-input: found a handler for part replycaps
276 bundle2-input: found a handler for part replycaps
277 bundle2-input-part: "replycaps" supported
277 bundle2-input-part: "replycaps" supported
278 bundle2-input: payload chunk size: 224
278 bundle2-input: payload chunk size: 224
279 bundle2-input: payload chunk size: 0
279 bundle2-input: payload chunk size: 0
280 bundle2-input-part: total payload size 224
280 bundle2-input-part: total payload size 224
281 bundle2-input: part header size: 22
281 bundle2-input: part header size: 22
282 bundle2-input: part type: "CHECK:BOOKMARKS"
282 bundle2-input: part type: "CHECK:BOOKMARKS"
283 bundle2-input: part id: "1"
283 bundle2-input: part id: "1"
284 bundle2-input: part parameters: 0
284 bundle2-input: part parameters: 0
285 bundle2-input: found a handler for part check:bookmarks
285 bundle2-input: found a handler for part check:bookmarks
286 bundle2-input-part: "check:bookmarks" supported
286 bundle2-input-part: "check:bookmarks" supported
287 bundle2-input: payload chunk size: 23
287 bundle2-input: payload chunk size: 23
288 bundle2-input: payload chunk size: 0
288 bundle2-input: payload chunk size: 0
289 bundle2-input-part: total payload size 23
289 bundle2-input-part: total payload size 23
290 bundle2-input: part header size: 19
290 bundle2-input: part header size: 19
291 bundle2-input: part type: "CHECK:PHASES"
291 bundle2-input: part type: "CHECK:PHASES"
292 bundle2-input: part id: "2"
292 bundle2-input: part id: "2"
293 bundle2-input: part parameters: 0
293 bundle2-input: part parameters: 0
294 bundle2-input: found a handler for part check:phases
294 bundle2-input: found a handler for part check:phases
295 bundle2-input-part: "check:phases" supported
295 bundle2-input-part: "check:phases" supported
296 bundle2-input: payload chunk size: 24
296 bundle2-input: payload chunk size: 24
297 bundle2-input: payload chunk size: 0
297 bundle2-input: payload chunk size: 0
298 bundle2-input-part: total payload size 24
298 bundle2-input-part: total payload size 24
299 bundle2-input: part header size: 16
299 bundle2-input: part header size: 16
300 bundle2-input: part type: "BOOKMARKS"
300 bundle2-input: part type: "BOOKMARKS"
301 bundle2-input: part id: "3"
301 bundle2-input: part id: "3"
302 bundle2-input: part parameters: 0
302 bundle2-input: part parameters: 0
303 bundle2-input: found a handler for part bookmarks
303 bundle2-input: found a handler for part bookmarks
304 bundle2-input-part: "bookmarks" supported
304 bundle2-input-part: "bookmarks" supported
305 bundle2-input: payload chunk size: 23
305 bundle2-input: payload chunk size: 23
306 bundle2-input: payload chunk size: 0
306 bundle2-input: payload chunk size: 0
307 bundle2-input-part: total payload size 23
307 bundle2-input-part: total payload size 23
308 bundle2-input: part header size: 0
308 bundle2-input: part header size: 0
309 bundle2-input: end of bundle2 stream
309 bundle2-input: end of bundle2 stream
310 bundle2-input-bundle: 4 parts total
310 bundle2-input-bundle: 4 parts total
311 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
311 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
312 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
312 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
313 bundle2-output-bundle: "HG20", 0 parts total
313 bundle2-output-bundle: "HG20", 0 parts total
314 bundle2-output: start emission of HG20 stream
314 bundle2-output: start emission of HG20 stream
315 bundle2-output: bundle parameter:
315 bundle2-output: bundle parameter:
316 bundle2-output: start of parts
316 bundle2-output: start of parts
317 bundle2-output: end of bundle
317 bundle2-output: end of bundle
318 bundle2-input: start processing of HG20 stream
318 bundle2-input: start processing of HG20 stream
319 bundle2-input: reading bundle2 stream parameters
319 bundle2-input: reading bundle2 stream parameters
320 bundle2-input-bundle: no-transaction
320 bundle2-input-bundle: no-transaction
321 bundle2-input: start extraction of bundle2 parts
321 bundle2-input: start extraction of bundle2 parts
322 bundle2-input: part header size: 0
322 bundle2-input: part header size: 0
323 bundle2-input: end of bundle2 stream
323 bundle2-input: end of bundle2 stream
324 bundle2-input-bundle: 0 parts total
324 bundle2-input-bundle: 0 parts total
325 deleting remote bookmark W
325 deleting remote bookmark W
326 listing keys for "phases"
326 listing keys for "phases"
327 [1]
327 [1]
328
328
329 #endif
329 #endif
330
330
331 Divergent bookmark cannot be exported
331 Divergent bookmark cannot be exported
332
332
333 $ hg book W@default
333 $ hg book W@default
334 $ hg push -B W@default ../a
334 $ hg push -B W@default ../a
335 pushing to ../a
335 pushing to ../a
336 searching for changes
336 searching for changes
337 cannot push divergent bookmark W@default!
337 cannot push divergent bookmark W@default!
338 no changes found
338 no changes found
339 [2]
339 [2]
340 $ hg book -d W@default
340 $ hg book -d W@default
341
341
342 export the active bookmark
342 export the active bookmark
343
343
344 $ hg bookmark V
344 $ hg bookmark V
345 $ hg push -B . ../a
345 $ hg push -B . ../a
346 pushing to ../a
346 pushing to ../a
347 searching for changes
347 searching for changes
348 no changes found
348 no changes found
349 exporting bookmark V
349 exporting bookmark V
350 [1]
350 [1]
351
351
352 exporting the active bookmark with 'push -B .'
352 exporting the active bookmark with 'push -B .'
353 demand that one of the bookmarks is activated
353 demand that one of the bookmarks is activated
354
354
355 $ hg update -r default
355 $ hg update -r default
356 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
356 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
357 (leaving bookmark V)
357 (leaving bookmark V)
358 $ hg push -B . ../a
358 $ hg push -B . ../a
359 abort: no active bookmark
359 abort: no active bookmark
360 [255]
360 [255]
361 $ hg update -r V
361 $ hg update -r V
362 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
362 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
363 (activating bookmark V)
363 (activating bookmark V)
364
364
365 delete the bookmark
365 delete the bookmark
366
366
367 $ hg book -d V
367 $ hg book -d V
368 $ hg push -B V ../a
368 $ hg push -B V ../a
369 pushing to ../a
369 pushing to ../a
370 searching for changes
370 searching for changes
371 no changes found
371 no changes found
372 deleting remote bookmark V
372 deleting remote bookmark V
373 [1]
373 [1]
374 $ hg up foobar
374 $ hg up foobar
375 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
375 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
376 (activating bookmark foobar)
376 (activating bookmark foobar)
377
377
378 push/pull name that doesn't exist
378 push/pull name that doesn't exist
379
379
380 $ hg push -B badname ../a
380 $ hg push -B badname ../a
381 pushing to ../a
381 pushing to ../a
382 searching for changes
382 searching for changes
383 bookmark badname does not exist on the local or remote repository!
383 bookmark badname does not exist on the local or remote repository!
384 no changes found
384 no changes found
385 [2]
385 [2]
386 $ hg pull -B anotherbadname ../a
386 $ hg pull -B anotherbadname ../a
387 pulling from ../a
387 pulling from ../a
388 abort: remote bookmark anotherbadname not found!
388 abort: remote bookmark anotherbadname not found!
389 [10]
389 [10]
390
390
391 divergent bookmarks
391 divergent bookmarks
392
392
393 $ cd ../a
393 $ cd ../a
394 $ echo c1 > f1
394 $ echo c1 > f1
395 $ hg ci -Am1
395 $ hg ci -Am1
396 adding f1
396 adding f1
397 $ hg book -f @
397 $ hg book -f @
398 $ hg book -f X
398 $ hg book -f X
399 $ hg book
399 $ hg book
400 @ 1:0d2164f0ce0d
400 @ 1:0d2164f0ce0d
401 * X 1:0d2164f0ce0d
401 * X 1:0d2164f0ce0d
402 Y 0:4e3505fd9583
402 Y 0:4e3505fd9583
403 Z 1:0d2164f0ce0d
403 Z 1:0d2164f0ce0d
404
404
405 $ cd ../b
405 $ cd ../b
406 $ hg up
406 $ hg up
407 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
407 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
408 updating bookmark foobar
408 updating bookmark foobar
409 $ echo c2 > f2
409 $ echo c2 > f2
410 $ hg ci -Am2
410 $ hg ci -Am2
411 adding f2
411 adding f2
412 $ hg book -if @
412 $ hg book -if @
413 $ hg book -if X
413 $ hg book -if X
414 $ hg book
414 $ hg book
415 @ 1:9b140be10808
415 @ 1:9b140be10808
416 X 1:9b140be10808
416 X 1:9b140be10808
417 Y 0:4e3505fd9583
417 Y 0:4e3505fd9583
418 Z 0:4e3505fd9583
418 Z 0:4e3505fd9583
419 foo -1:000000000000
419 foo -1:000000000000
420 * foobar 1:9b140be10808
420 * foobar 1:9b140be10808
421
421
422 $ hg pull --config paths.foo=../a foo --config "$TESTHOOK"
422 $ hg pull --config paths.foo=../a foo --config "$TESTHOOK"
423 pulling from $TESTTMP/a
423 pulling from $TESTTMP/a
424 searching for changes
424 searching for changes
425 adding changesets
425 adding changesets
426 adding manifests
426 adding manifests
427 adding file changes
427 adding file changes
428 divergent bookmark @ stored as @foo
428 divergent bookmark @ stored as @foo
429 divergent bookmark X stored as X@foo
429 divergent bookmark X stored as X@foo
430 updating bookmark Z
430 updating bookmark Z
431 added 1 changesets with 1 changes to 1 files (+1 heads)
431 added 1 changesets with 1 changes to 1 files (+1 heads)
432 new changesets 0d2164f0ce0d (1 drafts)
432 new changesets 0d2164f0ce0d (1 drafts)
433 test-hook-bookmark: @foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
433 test-hook-bookmark: @foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
434 test-hook-bookmark: X@foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
434 test-hook-bookmark: X@foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
435 test-hook-bookmark: Z: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
435 test-hook-bookmark: Z: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
436 (run 'hg heads' to see heads, 'hg merge' to merge)
436 (run 'hg heads' to see heads, 'hg merge' to merge)
437 $ hg book
437 $ hg book
438 @ 1:9b140be10808
438 @ 1:9b140be10808
439 @foo 2:0d2164f0ce0d
439 @foo 2:0d2164f0ce0d
440 X 1:9b140be10808
440 X 1:9b140be10808
441 X@foo 2:0d2164f0ce0d
441 X@foo 2:0d2164f0ce0d
442 Y 0:4e3505fd9583
442 Y 0:4e3505fd9583
443 Z 2:0d2164f0ce0d
443 Z 2:0d2164f0ce0d
444 foo -1:000000000000
444 foo -1:000000000000
445 * foobar 1:9b140be10808
445 * foobar 1:9b140be10808
446
446
447 (test that too many divergence of bookmark)
447 (test that too many divergence of bookmark)
448
448
449 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
449 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
450 $ hg pull ../a
450 $ hg pull ../a
451 pulling from ../a
451 pulling from ../a
452 searching for changes
452 searching for changes
453 no changes found
453 no changes found
454 warning: failed to assign numbered name to divergent bookmark X
454 warning: failed to assign numbered name to divergent bookmark X
455 divergent bookmark @ stored as @1
455 divergent bookmark @ stored as @1
456 $ hg bookmarks | grep '^ X' | grep -v ':000000000000'
456 $ hg bookmarks | grep '^ X' | grep -v ':000000000000'
457 X 1:9b140be10808
457 X 1:9b140be10808
458 X@foo 2:0d2164f0ce0d
458 X@foo 2:0d2164f0ce0d
459
459
460 (test that remotely diverged bookmarks are reused if they aren't changed)
460 (test that remotely diverged bookmarks are reused if they aren't changed)
461
461
462 $ hg bookmarks | grep '^ @'
462 $ hg bookmarks | grep '^ @'
463 @ 1:9b140be10808
463 @ 1:9b140be10808
464 @1 2:0d2164f0ce0d
464 @1 2:0d2164f0ce0d
465 @foo 2:0d2164f0ce0d
465 @foo 2:0d2164f0ce0d
466 $ hg pull ../a
466 $ hg pull ../a
467 pulling from ../a
467 pulling from ../a
468 searching for changes
468 searching for changes
469 no changes found
469 no changes found
470 warning: failed to assign numbered name to divergent bookmark X
470 warning: failed to assign numbered name to divergent bookmark X
471 divergent bookmark @ stored as @1
471 divergent bookmark @ stored as @1
472 $ hg bookmarks | grep '^ @'
472 $ hg bookmarks | grep '^ @'
473 @ 1:9b140be10808
473 @ 1:9b140be10808
474 @1 2:0d2164f0ce0d
474 @1 2:0d2164f0ce0d
475 @foo 2:0d2164f0ce0d
475 @foo 2:0d2164f0ce0d
476
476
477 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
477 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
478 $ hg bookmarks -d "@1"
478 $ hg bookmarks -d "@1"
479
479
480 $ hg push -f ../a
480 $ hg push -f ../a
481 pushing to ../a
481 pushing to ../a
482 searching for changes
482 searching for changes
483 adding changesets
483 adding changesets
484 adding manifests
484 adding manifests
485 adding file changes
485 adding file changes
486 added 1 changesets with 1 changes to 1 files (+1 heads)
486 added 1 changesets with 1 changes to 1 files (+1 heads)
487 $ hg -R ../a book
487 $ hg -R ../a book
488 @ 1:0d2164f0ce0d
488 @ 1:0d2164f0ce0d
489 * X 1:0d2164f0ce0d
489 * X 1:0d2164f0ce0d
490 Y 0:4e3505fd9583
490 Y 0:4e3505fd9583
491 Z 1:0d2164f0ce0d
491 Z 1:0d2164f0ce0d
492
492
493 explicit pull should overwrite the local version (issue4439)
493 explicit pull should overwrite the local version (issue4439)
494
494
495 $ hg update -r X
495 $ hg update -r X
496 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
496 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
497 (activating bookmark X)
497 (activating bookmark X)
498 $ hg pull --config paths.foo=../a foo -B . --config "$TESTHOOK"
498 $ hg pull --config paths.foo=../a foo -B . --config "$TESTHOOK"
499 pulling from $TESTTMP/a
499 pulling from $TESTTMP/a
500 no changes found
500 no changes found
501 divergent bookmark @ stored as @foo
501 divergent bookmark @ stored as @foo
502 importing bookmark X
502 importing bookmark X
503 test-hook-bookmark: @foo: 0d2164f0ce0d8f1d6f94351eba04b794909be66c -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
503 test-hook-bookmark: @foo: 0d2164f0ce0d8f1d6f94351eba04b794909be66c -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
504 test-hook-bookmark: X: 9b140be1080824d768c5a4691a564088eede71f9 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
504 test-hook-bookmark: X: 9b140be1080824d768c5a4691a564088eede71f9 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
505
505
506 reinstall state for further testing:
506 reinstall state for further testing:
507
507
508 $ hg book -fr 9b140be10808 X
508 $ hg book -fr 9b140be10808 X
509
509
510 revsets should not ignore divergent bookmarks
510 revsets should not ignore divergent bookmarks
511
511
512 $ hg bookmark -fr 1 Z
512 $ hg bookmark -fr 1 Z
513 $ hg log -r 'bookmark()' --template '{rev}:{node|short} {bookmarks}\n'
513 $ hg log -r 'bookmark()' --template '{rev}:{node|short} {bookmarks}\n'
514 0:4e3505fd9583 Y
514 0:4e3505fd9583 Y
515 1:9b140be10808 @ X Z foobar
515 1:9b140be10808 @ X Z foobar
516 2:0d2164f0ce0d @foo X@foo
516 2:0d2164f0ce0d @foo X@foo
517 $ hg log -r 'bookmark("X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
517 $ hg log -r 'bookmark("X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
518 2:0d2164f0ce0d @foo X@foo
518 2:0d2164f0ce0d @foo X@foo
519 $ hg log -r 'bookmark("re:X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
519 $ hg log -r 'bookmark("re:X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
520 2:0d2164f0ce0d @foo X@foo
520 2:0d2164f0ce0d @foo X@foo
521
521
522 update a remote bookmark from a non-head to a head
522 update a remote bookmark from a non-head to a head
523
523
524 $ hg up -q Y
524 $ hg up -q Y
525 $ echo c3 > f2
525 $ echo c3 > f2
526 $ hg ci -Am3
526 $ hg ci -Am3
527 adding f2
527 adding f2
528 created new head
528 created new head
529 $ hg push ../a --config "$TESTHOOK"
529 $ hg push ../a --config "$TESTHOOK"
530 pushing to ../a
530 pushing to ../a
531 searching for changes
531 searching for changes
532 adding changesets
532 adding changesets
533 adding manifests
533 adding manifests
534 adding file changes
534 adding file changes
535 added 1 changesets with 1 changes to 1 files (+1 heads)
535 added 1 changesets with 1 changes to 1 files (+1 heads)
536 test-hook-bookmark: Y: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
536 test-hook-bookmark: Y: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
537 updating bookmark Y
537 updating bookmark Y
538 $ hg -R ../a book
538 $ hg -R ../a book
539 @ 1:0d2164f0ce0d
539 @ 1:0d2164f0ce0d
540 * X 1:0d2164f0ce0d
540 * X 1:0d2164f0ce0d
541 Y 3:f6fc62dde3c0
541 Y 3:f6fc62dde3c0
542 Z 1:0d2164f0ce0d
542 Z 1:0d2164f0ce0d
543
543
544 update a bookmark in the middle of a client pulling changes
544 update a bookmark in the middle of a client pulling changes
545
545
546 $ cd ..
546 $ cd ..
547 $ hg clone -q a pull-race
547 $ hg clone -q a pull-race
548
548
549 We want to use http because it is stateless and therefore more susceptible to
549 We want to use http because it is stateless and therefore more susceptible to
550 race conditions
550 race conditions
551
551
552 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
552 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
553 $ cat pull-race.pid >> $DAEMON_PIDS
553 $ cat pull-race.pid >> $DAEMON_PIDS
554
554
555 $ cat <<EOF > $TESTTMP/out_makecommit.sh
555 $ cat <<EOF > $TESTTMP/out_makecommit.sh
556 > #!/bin/sh
556 > #!/bin/sh
557 > hg ci -Am5
557 > hg ci -Am5
558 > echo committed in pull-race
558 > echo committed in pull-race
559 > EOF
559 > EOF
560
560
561 $ hg clone -q http://localhost:$HGPORT/ pull-race2 --config "$TESTHOOK"
561 $ hg clone -q http://localhost:$HGPORT/ pull-race2 --config "$TESTHOOK"
562 test-hook-bookmark: @: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
562 test-hook-bookmark: @: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
563 test-hook-bookmark: X: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
563 test-hook-bookmark: X: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
564 test-hook-bookmark: Y: -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
564 test-hook-bookmark: Y: -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
565 test-hook-bookmark: Z: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
565 test-hook-bookmark: Z: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
566 $ cd pull-race
566 $ cd pull-race
567 $ hg up -q Y
567 $ hg up -q Y
568 $ echo c4 > f2
568 $ echo c4 > f2
569 $ hg ci -Am4
569 $ hg ci -Am4
570 $ echo c5 > f3
570 $ echo c5 > f3
571 $ cat <<EOF > .hg/hgrc
571 $ cat <<EOF > .hg/hgrc
572 > [hooks]
572 > [hooks]
573 > outgoing.makecommit = sh $TESTTMP/out_makecommit.sh
573 > outgoing.makecommit = sh $TESTTMP/out_makecommit.sh
574 > EOF
574 > EOF
575
575
576 (new config needs a server restart)
576 (new config needs a server restart)
577
577
578 $ cd ..
578 $ cd ..
579 $ killdaemons.py
579 $ killdaemons.py
580 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
580 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
581 $ cat pull-race.pid >> $DAEMON_PIDS
581 $ cat pull-race.pid >> $DAEMON_PIDS
582 $ cd pull-race2
582 $ cd pull-race2
583 $ hg -R $TESTTMP/pull-race book
583 $ hg -R $TESTTMP/pull-race book
584 @ 1:0d2164f0ce0d
584 @ 1:0d2164f0ce0d
585 X 1:0d2164f0ce0d
585 X 1:0d2164f0ce0d
586 * Y 4:b0a5eff05604
586 * Y 4:b0a5eff05604
587 Z 1:0d2164f0ce0d
587 Z 1:0d2164f0ce0d
588 $ hg pull
588 $ hg pull
589 pulling from http://localhost:$HGPORT/
589 pulling from http://localhost:$HGPORT/
590 searching for changes
590 searching for changes
591 adding changesets
591 adding changesets
592 adding manifests
592 adding manifests
593 adding file changes
593 adding file changes
594 updating bookmark Y
594 updating bookmark Y
595 added 1 changesets with 1 changes to 1 files
595 added 1 changesets with 1 changes to 1 files
596 new changesets b0a5eff05604 (1 drafts)
596 new changesets b0a5eff05604 (1 drafts)
597 (run 'hg update' to get a working copy)
597 (run 'hg update' to get a working copy)
598 $ hg book
598 $ hg book
599 * @ 1:0d2164f0ce0d
599 * @ 1:0d2164f0ce0d
600 X 1:0d2164f0ce0d
600 X 1:0d2164f0ce0d
601 Y 4:b0a5eff05604
601 Y 4:b0a5eff05604
602 Z 1:0d2164f0ce0d
602 Z 1:0d2164f0ce0d
603
603
604 Update a bookmark right after the initial lookup -B (issue4689)
604 Update a bookmark right after the initial lookup -B (issue4689)
605
605
606 $ echo c6 > ../pull-race/f3 # to be committed during the race
606 $ echo c6 > ../pull-race/f3 # to be committed during the race
607 $ cat <<EOF > $TESTTMP/listkeys_makecommit.sh
607 $ cat <<EOF > $TESTTMP/listkeys_makecommit.sh
608 > #!/bin/sh
608 > #!/bin/sh
609 > if hg st | grep -q M; then
609 > if hg st | grep -q M; then
610 > hg commit -m race
610 > hg commit -m race
611 > echo committed in pull-race
611 > echo committed in pull-race
612 > else
612 > else
613 > exit 0
613 > exit 0
614 > fi
614 > fi
615 > EOF
615 > EOF
616 $ cat <<EOF > ../pull-race/.hg/hgrc
616 $ cat <<EOF > ../pull-race/.hg/hgrc
617 > [hooks]
617 > [hooks]
618 > # If anything to commit, commit it right after the first key listing used
618 > # If anything to commit, commit it right after the first key listing used
619 > # during lookup. This makes the commit appear before the actual getbundle
619 > # during lookup. This makes the commit appear before the actual getbundle
620 > # call.
620 > # call.
621 > listkeys.makecommit= sh $TESTTMP/listkeys_makecommit.sh
621 > listkeys.makecommit= sh $TESTTMP/listkeys_makecommit.sh
622 > EOF
622 > EOF
623 $ restart_server() {
623 $ restart_server() {
624 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
624 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
625 > hg serve -R ../pull-race -p $HGPORT -d --pid-file=../pull-race.pid -E main-error.log
625 > hg serve -R ../pull-race -p $HGPORT -d --pid-file=../pull-race.pid -E main-error.log
626 > cat ../pull-race.pid >> $DAEMON_PIDS
626 > cat ../pull-race.pid >> $DAEMON_PIDS
627 > }
627 > }
628 $ restart_server # new config need server restart
628 $ restart_server # new config need server restart
629 $ hg -R $TESTTMP/pull-race book
629 $ hg -R $TESTTMP/pull-race book
630 @ 1:0d2164f0ce0d
630 @ 1:0d2164f0ce0d
631 X 1:0d2164f0ce0d
631 X 1:0d2164f0ce0d
632 * Y 5:35d1ef0a8d1b
632 * Y 5:35d1ef0a8d1b
633 Z 1:0d2164f0ce0d
633 Z 1:0d2164f0ce0d
634 $ hg update -r Y
634 $ hg update -r Y
635 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
635 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
636 (activating bookmark Y)
636 (activating bookmark Y)
637 $ hg pull -B .
637 $ hg pull -B .
638 pulling from http://localhost:$HGPORT/
638 pulling from http://localhost:$HGPORT/
639 searching for changes
639 searching for changes
640 adding changesets
640 adding changesets
641 adding manifests
641 adding manifests
642 adding file changes
642 adding file changes
643 updating bookmark Y
643 updating bookmark Y
644 added 1 changesets with 1 changes to 1 files
644 added 1 changesets with 1 changes to 1 files
645 new changesets 35d1ef0a8d1b (1 drafts)
645 new changesets 35d1ef0a8d1b (1 drafts)
646 (run 'hg update' to get a working copy)
646 (run 'hg update' to get a working copy)
647 $ hg book
647 $ hg book
648 @ 1:0d2164f0ce0d
648 @ 1:0d2164f0ce0d
649 X 1:0d2164f0ce0d
649 X 1:0d2164f0ce0d
650 * Y 5:35d1ef0a8d1b
650 * Y 5:35d1ef0a8d1b
651 Z 1:0d2164f0ce0d
651 Z 1:0d2164f0ce0d
652
652
653 Update a bookmark right after the initial lookup -r (issue4700)
653 Update a bookmark right after the initial lookup -r (issue4700)
654
654
655 $ echo c7 > ../pull-race/f3 # to be committed during the race
655 $ echo c7 > ../pull-race/f3 # to be committed during the race
656 $ cat <<EOF > ../lookuphook.py
656 $ cat <<EOF > ../lookuphook.py
657 > """small extensions adding a hook after wireprotocol lookup to test race"""
657 > """small extensions adding a hook after wireprotocol lookup to test race"""
658 > import functools
658 > import functools
659 > from mercurial import wireprotov1server, wireprotov2server
659 > from mercurial import wireprotov1server, wireprotov2server
660 >
660 >
661 > def wrappedlookup(orig, repo, *args, **kwargs):
661 > def wrappedlookup(orig, repo, *args, **kwargs):
662 > ret = orig(repo, *args, **kwargs)
662 > ret = orig(repo, *args, **kwargs)
663 > repo.hook(b'lookup')
663 > repo.hook(b'lookup')
664 > return ret
664 > return ret
665 > for table in [wireprotov1server.commands, wireprotov2server.COMMANDS]:
665 > for table in [wireprotov1server.commands, wireprotov2server.COMMANDS]:
666 > table[b'lookup'].func = functools.partial(wrappedlookup, table[b'lookup'].func)
666 > table[b'lookup'].func = functools.partial(wrappedlookup, table[b'lookup'].func)
667 > EOF
667 > EOF
668 $ cat <<EOF > ../pull-race/.hg/hgrc
668 $ cat <<EOF > ../pull-race/.hg/hgrc
669 > [extensions]
669 > [extensions]
670 > lookuphook=$TESTTMP/lookuphook.py
670 > lookuphook=$TESTTMP/lookuphook.py
671 > [hooks]
671 > [hooks]
672 > lookup.makecommit= sh $TESTTMP/listkeys_makecommit.sh
672 > lookup.makecommit= sh $TESTTMP/listkeys_makecommit.sh
673 > EOF
673 > EOF
674 $ restart_server # new config need server restart
674 $ restart_server # new config need server restart
675 $ hg -R $TESTTMP/pull-race book
675 $ hg -R $TESTTMP/pull-race book
676 @ 1:0d2164f0ce0d
676 @ 1:0d2164f0ce0d
677 X 1:0d2164f0ce0d
677 X 1:0d2164f0ce0d
678 * Y 6:0d60821d2197
678 * Y 6:0d60821d2197
679 Z 1:0d2164f0ce0d
679 Z 1:0d2164f0ce0d
680 $ hg pull -r Y
680 $ hg pull -r Y
681 pulling from http://localhost:$HGPORT/
681 pulling from http://localhost:$HGPORT/
682 searching for changes
682 searching for changes
683 adding changesets
683 adding changesets
684 adding manifests
684 adding manifests
685 adding file changes
685 adding file changes
686 updating bookmark Y
686 updating bookmark Y
687 added 1 changesets with 1 changes to 1 files
687 added 1 changesets with 1 changes to 1 files
688 new changesets 0d60821d2197 (1 drafts)
688 new changesets 0d60821d2197 (1 drafts)
689 (run 'hg update' to get a working copy)
689 (run 'hg update' to get a working copy)
690 $ hg book
690 $ hg book
691 @ 1:0d2164f0ce0d
691 @ 1:0d2164f0ce0d
692 X 1:0d2164f0ce0d
692 X 1:0d2164f0ce0d
693 * Y 6:0d60821d2197
693 * Y 6:0d60821d2197
694 Z 1:0d2164f0ce0d
694 Z 1:0d2164f0ce0d
695 $ hg -R $TESTTMP/pull-race book
695 $ hg -R $TESTTMP/pull-race book
696 @ 1:0d2164f0ce0d
696 @ 1:0d2164f0ce0d
697 X 1:0d2164f0ce0d
697 X 1:0d2164f0ce0d
698 * Y 7:714424d9e8b8
698 * Y 7:714424d9e8b8
699 Z 1:0d2164f0ce0d
699 Z 1:0d2164f0ce0d
700
700
701 (done with this section of the test)
701 (done with this section of the test)
702
702
703 $ killdaemons.py
703 $ killdaemons.py
704 $ cd ../b
704 $ cd ../b
705
705
706 diverging a remote bookmark fails
706 diverging a remote bookmark fails
707
707
708 $ hg up -q 4e3505fd9583
708 $ hg up -q 4e3505fd9583
709 $ echo c4 > f2
709 $ echo c4 > f2
710 $ hg ci -Am4
710 $ hg ci -Am4
711 adding f2
711 adding f2
712 created new head
712 created new head
713 $ echo c5 > f2
713 $ echo c5 > f2
714 $ hg ci -Am5
714 $ hg ci -Am5
715 $ hg log -G
715 $ hg log -G
716 @ 5:c922c0139ca0 5
716 @ 5:c922c0139ca0 5
717 |
717 |
718 o 4:4efff6d98829 4
718 o 4:4efff6d98829 4
719 |
719 |
720 | o 3:f6fc62dde3c0 3
720 | o 3:f6fc62dde3c0 3
721 |/
721 |/
722 | o 2:0d2164f0ce0d 1
722 | o 2:0d2164f0ce0d 1
723 |/
723 |/
724 | o 1:9b140be10808 2
724 | o 1:9b140be10808 2
725 |/
725 |/
726 o 0:4e3505fd9583 test
726 o 0:4e3505fd9583 test
727
727
728
728
729 $ hg book -f Y
729 $ hg book -f Y
730
730
731 $ cat <<EOF > ../a/.hg/hgrc
731 $ cat <<EOF > ../a/.hg/hgrc
732 > [web]
732 > [web]
733 > push_ssl = false
733 > push_ssl = false
734 > allow_push = *
734 > allow_push = *
735 > EOF
735 > EOF
736
736
737 $ hg serve -R ../a -p $HGPORT2 -d --pid-file=../hg2.pid
737 $ hg serve -R ../a -p $HGPORT2 -d --pid-file=../hg2.pid
738 $ cat ../hg2.pid >> $DAEMON_PIDS
738 $ cat ../hg2.pid >> $DAEMON_PIDS
739
739
740 $ hg push http://localhost:$HGPORT2/
740 $ hg push http://localhost:$HGPORT2/
741 pushing to http://localhost:$HGPORT2/
741 pushing to http://localhost:$HGPORT2/
742 searching for changes
742 searching for changes
743 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'
743 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'
744 (merge or see 'hg help push' for details about pushing new heads)
744 (merge or see 'hg help push' for details about pushing new heads)
745 [20]
745 [20]
746 $ hg -R ../a book
746 $ hg -R ../a book
747 @ 1:0d2164f0ce0d
747 @ 1:0d2164f0ce0d
748 * X 1:0d2164f0ce0d
748 * X 1:0d2164f0ce0d
749 Y 3:f6fc62dde3c0
749 Y 3:f6fc62dde3c0
750 Z 1:0d2164f0ce0d
750 Z 1:0d2164f0ce0d
751
751
752
752
753 Unrelated marker does not alter the decision
753 Unrelated marker does not alter the decision
754
754
755 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
755 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
756 1 new obsolescence markers
756 1 new obsolescence markers
757 $ hg push http://localhost:$HGPORT2/
757 $ hg push http://localhost:$HGPORT2/
758 pushing to http://localhost:$HGPORT2/
758 pushing to http://localhost:$HGPORT2/
759 searching for changes
759 searching for changes
760 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'
760 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'
761 (merge or see 'hg help push' for details about pushing new heads)
761 (merge or see 'hg help push' for details about pushing new heads)
762 [20]
762 [20]
763 $ hg -R ../a book
763 $ hg -R ../a book
764 @ 1:0d2164f0ce0d
764 @ 1:0d2164f0ce0d
765 * X 1:0d2164f0ce0d
765 * X 1:0d2164f0ce0d
766 Y 3:f6fc62dde3c0
766 Y 3:f6fc62dde3c0
767 Z 1:0d2164f0ce0d
767 Z 1:0d2164f0ce0d
768
768
769 Update to a successor works
769 Update to a successor works
770
770
771 $ hg id --debug -r 3
771 $ hg id --debug -r 3
772 f6fc62dde3c0771e29704af56ba4d8af77abcc2f
772 f6fc62dde3c0771e29704af56ba4d8af77abcc2f
773 $ hg id --debug -r 4
773 $ hg id --debug -r 4
774 4efff6d98829d9c824c621afd6e3f01865f5439f
774 4efff6d98829d9c824c621afd6e3f01865f5439f
775 $ hg id --debug -r 5
775 $ hg id --debug -r 5
776 c922c0139ca03858f655e4a2af4dd02796a63969 tip Y
776 c922c0139ca03858f655e4a2af4dd02796a63969 tip Y
777 $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc
777 $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc
778 1 new obsolescence markers
778 1 new obsolescence markers
779 obsoleted 1 changesets
779 obsoleted 1 changesets
780 $ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f
780 $ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f
781 1 new obsolescence markers
781 1 new obsolescence markers
782 $ hg push http://localhost:$HGPORT2/
782 $ hg push http://localhost:$HGPORT2/
783 pushing to http://localhost:$HGPORT2/
783 pushing to http://localhost:$HGPORT2/
784 searching for changes
784 searching for changes
785 remote: adding changesets
785 remote: adding changesets
786 remote: adding manifests
786 remote: adding manifests
787 remote: adding file changes
787 remote: adding file changes
788 remote: added 2 changesets with 2 changes to 1 files (+1 heads)
788 remote: added 2 changesets with 2 changes to 1 files (+1 heads)
789 remote: 2 new obsolescence markers
789 remote: 2 new obsolescence markers
790 remote: obsoleted 1 changesets
790 remote: obsoleted 1 changesets
791 updating bookmark Y
791 updating bookmark Y
792 $ hg -R ../a book
792 $ hg -R ../a book
793 @ 1:0d2164f0ce0d
793 @ 1:0d2164f0ce0d
794 * X 1:0d2164f0ce0d
794 * X 1:0d2164f0ce0d
795 Y 5:c922c0139ca0
795 Y 5:c922c0139ca0
796 Z 1:0d2164f0ce0d
796 Z 1:0d2164f0ce0d
797
797
798 hgweb
798 hgweb
799
799
800 $ cat <<EOF > .hg/hgrc
800 $ cat <<EOF > .hg/hgrc
801 > [web]
801 > [web]
802 > push_ssl = false
802 > push_ssl = false
803 > allow_push = *
803 > allow_push = *
804 > EOF
804 > EOF
805
805
806 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
806 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
807 $ cat ../hg.pid >> $DAEMON_PIDS
807 $ cat ../hg.pid >> $DAEMON_PIDS
808 $ cd ../a
808 $ cd ../a
809
809
810 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
810 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
811 bookmarks
811 bookmarks
812 namespaces
812 namespaces
813 obsolete
813 obsolete
814 phases
814 phases
815 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
815 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
816 @ 9b140be1080824d768c5a4691a564088eede71f9
816 @ 9b140be1080824d768c5a4691a564088eede71f9
817 X 9b140be1080824d768c5a4691a564088eede71f9
817 X 9b140be1080824d768c5a4691a564088eede71f9
818 Y c922c0139ca03858f655e4a2af4dd02796a63969
818 Y c922c0139ca03858f655e4a2af4dd02796a63969
819 Z 9b140be1080824d768c5a4691a564088eede71f9
819 Z 9b140be1080824d768c5a4691a564088eede71f9
820 foo 0000000000000000000000000000000000000000
820 foo 0000000000000000000000000000000000000000
821 foobar 9b140be1080824d768c5a4691a564088eede71f9
821 foobar 9b140be1080824d768c5a4691a564088eede71f9
822 $ hg out -B http://localhost:$HGPORT/
822 $ hg out -B http://localhost:$HGPORT/
823 comparing with http://localhost:$HGPORT/
823 comparing with http://localhost:$HGPORT/
824 searching for changed bookmarks
824 searching for changed bookmarks
825 @ 0d2164f0ce0d
825 @ 0d2164f0ce0d
826 X 0d2164f0ce0d
826 X 0d2164f0ce0d
827 Z 0d2164f0ce0d
827 Z 0d2164f0ce0d
828 foo
828 foo
829 foobar
829 foobar
830 $ hg push -B Z http://localhost:$HGPORT/
830 $ hg push -B Z http://localhost:$HGPORT/
831 pushing to http://localhost:$HGPORT/
831 pushing to http://localhost:$HGPORT/
832 searching for changes
832 searching for changes
833 no changes found
833 no changes found
834 updating bookmark Z
834 updating bookmark Z
835 [1]
835 [1]
836 $ hg book -d Z
836 $ hg book -d Z
837 $ hg in -B http://localhost:$HGPORT/
837 $ hg in -B http://localhost:$HGPORT/
838 comparing with http://localhost:$HGPORT/
838 comparing with http://localhost:$HGPORT/
839 searching for changed bookmarks
839 searching for changed bookmarks
840 @ 9b140be10808
840 @ 9b140be10808
841 X 9b140be10808
841 X 9b140be10808
842 Z 0d2164f0ce0d
842 Z 0d2164f0ce0d
843 foo 000000000000
843 foo 000000000000
844 foobar 9b140be10808
844 foobar 9b140be10808
845 $ hg pull -B Z http://localhost:$HGPORT/
845 $ hg pull -B Z http://localhost:$HGPORT/
846 pulling from http://localhost:$HGPORT/
846 pulling from http://localhost:$HGPORT/
847 no changes found
847 no changes found
848 divergent bookmark @ stored as @1
848 divergent bookmark @ stored as @1
849 divergent bookmark X stored as X@1
849 divergent bookmark X stored as X@1
850 adding remote bookmark Z
850 adding remote bookmark Z
851 adding remote bookmark foo
851 adding remote bookmark foo
852 adding remote bookmark foobar
852 adding remote bookmark foobar
853 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
853 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
854 requesting all changes
854 requesting all changes
855 adding changesets
855 adding changesets
856 adding manifests
856 adding manifests
857 adding file changes
857 adding file changes
858 added 5 changesets with 5 changes to 3 files (+2 heads)
858 added 5 changesets with 5 changes to 3 files (+2 heads)
859 2 new obsolescence markers
859 2 new obsolescence markers
860 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
860 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
861 updating to bookmark @
861 updating to bookmark @
862 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
862 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
863 $ hg -R cloned-bookmarks bookmarks
863 $ hg -R cloned-bookmarks bookmarks
864 * @ 1:9b140be10808
864 * @ 1:9b140be10808
865 X 1:9b140be10808
865 X 1:9b140be10808
866 Y 4:c922c0139ca0
866 Y 4:c922c0139ca0
867 Z 2:0d2164f0ce0d
867 Z 2:0d2164f0ce0d
868 foo -1:000000000000
868 foo -1:000000000000
869 foobar 1:9b140be10808
869 foobar 1:9b140be10808
870
870
871 $ cd ..
871 $ cd ..
872
872
873 Test to show result of bookmarks comparison
873 Test to show result of bookmarks comparison
874
874
875 $ mkdir bmcomparison
875 $ mkdir bmcomparison
876 $ cd bmcomparison
876 $ cd bmcomparison
877
877
878 $ hg init source
878 $ hg init source
879 $ hg -R source debugbuilddag '+2*2*3*4'
879 $ hg -R source debugbuilddag '+2*2*3*4'
880 $ hg -R source log -G --template '{rev}:{node|short}'
880 $ hg -R source log -G --template '{rev}:{node|short}'
881 o 4:e7bd5218ca15
881 o 4:e7bd5218ca15
882 |
882 |
883 | o 3:6100d3090acf
883 | o 3:6100d3090acf
884 |/
884 |/
885 | o 2:fa942426a6fd
885 | o 2:fa942426a6fd
886 |/
886 |/
887 | o 1:66f7d451a68b
887 | o 1:66f7d451a68b
888 |/
888 |/
889 o 0:1ea73414a91b
889 o 0:1ea73414a91b
890
890
891 $ hg -R source bookmarks -r 0 SAME
891 $ hg -R source bookmarks -r 0 SAME
892 $ hg -R source bookmarks -r 0 ADV_ON_REPO1
892 $ hg -R source bookmarks -r 0 ADV_ON_REPO1
893 $ hg -R source bookmarks -r 0 ADV_ON_REPO2
893 $ hg -R source bookmarks -r 0 ADV_ON_REPO2
894 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO1
894 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO1
895 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO2
895 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO2
896 $ hg -R source bookmarks -r 1 DIVERGED
896 $ hg -R source bookmarks -r 1 DIVERGED
897
897
898 $ hg clone -U source repo1
898 $ hg clone -U source repo1
899
899
900 (test that incoming/outgoing exit with 1, if there is no bookmark to
900 (test that incoming/outgoing exit with 1, if there is no bookmark to
901 be exchanged)
901 be exchanged)
902
902
903 $ hg -R repo1 incoming -B
903 $ hg -R repo1 incoming -B
904 comparing with $TESTTMP/bmcomparison/source
904 comparing with $TESTTMP/bmcomparison/source
905 searching for changed bookmarks
905 searching for changed bookmarks
906 no changed bookmarks found
906 no changed bookmarks found
907 [1]
907 [1]
908 $ hg -R repo1 outgoing -B
908 $ hg -R repo1 outgoing -B
909 comparing with $TESTTMP/bmcomparison/source
909 comparing with $TESTTMP/bmcomparison/source
910 searching for changed bookmarks
910 searching for changed bookmarks
911 no changed bookmarks found
911 no changed bookmarks found
912 [1]
912 [1]
913
913
914 $ hg -R repo1 bookmarks -f -r 1 ADD_ON_REPO1
914 $ hg -R repo1 bookmarks -f -r 1 ADD_ON_REPO1
915 $ hg -R repo1 bookmarks -f -r 2 ADV_ON_REPO1
915 $ hg -R repo1 bookmarks -f -r 2 ADV_ON_REPO1
916 $ hg -R repo1 bookmarks -f -r 3 DIFF_ADV_ON_REPO1
916 $ hg -R repo1 bookmarks -f -r 3 DIFF_ADV_ON_REPO1
917 $ hg -R repo1 bookmarks -f -r 3 DIFF_DIVERGED
917 $ hg -R repo1 bookmarks -f -r 3 DIFF_DIVERGED
918 $ hg -R repo1 -q --config extensions.mq= strip 4
918 $ hg -R repo1 -q --config extensions.mq= strip 4
919 $ hg -R repo1 log -G --template '{node|short} ({bookmarks})'
919 $ hg -R repo1 log -G --template '{node|short} ({bookmarks})'
920 o 6100d3090acf (DIFF_ADV_ON_REPO1 DIFF_DIVERGED)
920 o 6100d3090acf (DIFF_ADV_ON_REPO1 DIFF_DIVERGED)
921 |
921 |
922 | o fa942426a6fd (ADV_ON_REPO1)
922 | o fa942426a6fd (ADV_ON_REPO1)
923 |/
923 |/
924 | o 66f7d451a68b (ADD_ON_REPO1 DIVERGED)
924 | o 66f7d451a68b (ADD_ON_REPO1 DIVERGED)
925 |/
925 |/
926 o 1ea73414a91b (ADV_ON_REPO2 DIFF_ADV_ON_REPO2 SAME)
926 o 1ea73414a91b (ADV_ON_REPO2 DIFF_ADV_ON_REPO2 SAME)
927
927
928
928
929 $ hg clone -U source repo2
929 $ hg clone -U source repo2
930 $ hg -R repo2 bookmarks -f -r 1 ADD_ON_REPO2
930 $ hg -R repo2 bookmarks -f -r 1 ADD_ON_REPO2
931 $ hg -R repo2 bookmarks -f -r 1 ADV_ON_REPO2
931 $ hg -R repo2 bookmarks -f -r 1 ADV_ON_REPO2
932 $ hg -R repo2 bookmarks -f -r 2 DIVERGED
932 $ hg -R repo2 bookmarks -f -r 2 DIVERGED
933 $ hg -R repo2 bookmarks -f -r 4 DIFF_ADV_ON_REPO2
933 $ hg -R repo2 bookmarks -f -r 4 DIFF_ADV_ON_REPO2
934 $ hg -R repo2 bookmarks -f -r 4 DIFF_DIVERGED
934 $ hg -R repo2 bookmarks -f -r 4 DIFF_DIVERGED
935 $ hg -R repo2 -q --config extensions.mq= strip 3
935 $ hg -R repo2 -q --config extensions.mq= strip 3
936 $ hg -R repo2 log -G --template '{node|short} ({bookmarks})'
936 $ hg -R repo2 log -G --template '{node|short} ({bookmarks})'
937 o e7bd5218ca15 (DIFF_ADV_ON_REPO2 DIFF_DIVERGED)
937 o e7bd5218ca15 (DIFF_ADV_ON_REPO2 DIFF_DIVERGED)
938 |
938 |
939 | o fa942426a6fd (DIVERGED)
939 | o fa942426a6fd (DIVERGED)
940 |/
940 |/
941 | o 66f7d451a68b (ADD_ON_REPO2 ADV_ON_REPO2)
941 | o 66f7d451a68b (ADD_ON_REPO2 ADV_ON_REPO2)
942 |/
942 |/
943 o 1ea73414a91b (ADV_ON_REPO1 DIFF_ADV_ON_REPO1 SAME)
943 o 1ea73414a91b (ADV_ON_REPO1 DIFF_ADV_ON_REPO1 SAME)
944
944
945
945
946 (test that difference of bookmarks between repositories are fully shown)
946 (test that difference of bookmarks between repositories are fully shown)
947
947
948 $ hg -R repo1 incoming -B repo2 -v
948 $ hg -R repo1 incoming -B repo2 -v
949 comparing with repo2
949 comparing with repo2
950 searching for changed bookmarks
950 searching for changed bookmarks
951 ADD_ON_REPO2 66f7d451a68b added
951 ADD_ON_REPO2 66f7d451a68b added
952 ADV_ON_REPO2 66f7d451a68b advanced
952 ADV_ON_REPO2 66f7d451a68b advanced
953 DIFF_ADV_ON_REPO2 e7bd5218ca15 changed
953 DIFF_ADV_ON_REPO2 e7bd5218ca15 changed
954 DIFF_DIVERGED e7bd5218ca15 changed
954 DIFF_DIVERGED e7bd5218ca15 changed
955 DIVERGED fa942426a6fd diverged
955 DIVERGED fa942426a6fd diverged
956 $ hg -R repo1 outgoing -B repo2 -v
956 $ hg -R repo1 outgoing -B repo2 -v
957 comparing with repo2
957 comparing with repo2
958 searching for changed bookmarks
958 searching for changed bookmarks
959 ADD_ON_REPO1 66f7d451a68b added
959 ADD_ON_REPO1 66f7d451a68b added
960 ADD_ON_REPO2 deleted
960 ADD_ON_REPO2 deleted
961 ADV_ON_REPO1 fa942426a6fd advanced
961 ADV_ON_REPO1 fa942426a6fd advanced
962 DIFF_ADV_ON_REPO1 6100d3090acf advanced
962 DIFF_ADV_ON_REPO1 6100d3090acf advanced
963 DIFF_ADV_ON_REPO2 1ea73414a91b changed
963 DIFF_ADV_ON_REPO2 1ea73414a91b changed
964 DIFF_DIVERGED 6100d3090acf changed
964 DIFF_DIVERGED 6100d3090acf changed
965 DIVERGED 66f7d451a68b diverged
965 DIVERGED 66f7d451a68b diverged
966
966
967 $ hg -R repo2 incoming -B repo1 -v
967 $ hg -R repo2 incoming -B repo1 -v
968 comparing with repo1
968 comparing with repo1
969 searching for changed bookmarks
969 searching for changed bookmarks
970 ADD_ON_REPO1 66f7d451a68b added
970 ADD_ON_REPO1 66f7d451a68b added
971 ADV_ON_REPO1 fa942426a6fd advanced
971 ADV_ON_REPO1 fa942426a6fd advanced
972 DIFF_ADV_ON_REPO1 6100d3090acf changed
972 DIFF_ADV_ON_REPO1 6100d3090acf changed
973 DIFF_DIVERGED 6100d3090acf changed
973 DIFF_DIVERGED 6100d3090acf changed
974 DIVERGED 66f7d451a68b diverged
974 DIVERGED 66f7d451a68b diverged
975 $ hg -R repo2 outgoing -B repo1 -v
975 $ hg -R repo2 outgoing -B repo1 -v
976 comparing with repo1
976 comparing with repo1
977 searching for changed bookmarks
977 searching for changed bookmarks
978 ADD_ON_REPO1 deleted
978 ADD_ON_REPO1 deleted
979 ADD_ON_REPO2 66f7d451a68b added
979 ADD_ON_REPO2 66f7d451a68b added
980 ADV_ON_REPO2 66f7d451a68b advanced
980 ADV_ON_REPO2 66f7d451a68b advanced
981 DIFF_ADV_ON_REPO1 1ea73414a91b changed
981 DIFF_ADV_ON_REPO1 1ea73414a91b changed
982 DIFF_ADV_ON_REPO2 e7bd5218ca15 advanced
982 DIFF_ADV_ON_REPO2 e7bd5218ca15 advanced
983 DIFF_DIVERGED e7bd5218ca15 changed
983 DIFF_DIVERGED e7bd5218ca15 changed
984 DIVERGED fa942426a6fd diverged
984 DIVERGED fa942426a6fd diverged
985
985
986 $ cd ..
986 $ cd ..
987
987
988 Pushing a bookmark should only push the changes required by that
988 Pushing a bookmark should only push the changes required by that
989 bookmark, not all outgoing changes:
989 bookmark, not all outgoing changes:
990 $ hg clone http://localhost:$HGPORT/ addmarks
990 $ hg clone http://localhost:$HGPORT/ addmarks
991 requesting all changes
991 requesting all changes
992 adding changesets
992 adding changesets
993 adding manifests
993 adding manifests
994 adding file changes
994 adding file changes
995 added 5 changesets with 5 changes to 3 files (+2 heads)
995 added 5 changesets with 5 changes to 3 files (+2 heads)
996 2 new obsolescence markers
996 2 new obsolescence markers
997 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
997 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
998 updating to bookmark @
998 updating to bookmark @
999 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
999 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1000 $ cd addmarks
1000 $ cd addmarks
1001 $ echo foo > foo
1001 $ echo foo > foo
1002 $ hg add foo
1002 $ hg add foo
1003 $ hg commit -m 'add foo'
1003 $ hg commit -m 'add foo'
1004 $ echo bar > bar
1004 $ echo bar > bar
1005 $ hg add bar
1005 $ hg add bar
1006 $ hg commit -m 'add bar'
1006 $ hg commit -m 'add bar'
1007 $ hg co "tip^"
1007 $ hg co "tip^"
1008 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1008 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1009 (leaving bookmark @)
1009 (leaving bookmark @)
1010 $ hg book add-foo
1010 $ hg book add-foo
1011 $ hg book -r tip add-bar
1011 $ hg book -r tip add-bar
1012 Note: this push *must* push only a single changeset, as that's the point
1012 Note: this push *must* push only a single changeset, as that's the point
1013 of this test.
1013 of this test.
1014 $ hg push -B add-foo --traceback
1014 $ hg push -B add-foo --traceback
1015 pushing to http://localhost:$HGPORT/
1015 pushing to http://localhost:$HGPORT/
1016 searching for changes
1016 searching for changes
1017 remote: adding changesets
1017 remote: adding changesets
1018 remote: adding manifests
1018 remote: adding manifests
1019 remote: adding file changes
1019 remote: adding file changes
1020 remote: added 1 changesets with 1 changes to 1 files
1020 remote: added 1 changesets with 1 changes to 1 files
1021 exporting bookmark add-foo
1021 exporting bookmark add-foo
1022
1022
1023 pushing a new bookmark on a new head does not require -f if -B is specified
1023 pushing a new bookmark on a new head does not require -f if -B is specified
1024
1024
1025 $ hg up -q X
1025 $ hg up -q X
1026 $ hg book W
1026 $ hg book W
1027 $ echo c5 > f2
1027 $ echo c5 > f2
1028 $ hg ci -Am5
1028 $ hg ci -Am5
1029 created new head
1029 created new head
1030 $ hg push -B .
1030 $ hg push -B .
1031 pushing to http://localhost:$HGPORT/
1031 pushing to http://localhost:$HGPORT/
1032 searching for changes
1032 searching for changes
1033 remote: adding changesets
1033 remote: adding changesets
1034 remote: adding manifests
1034 remote: adding manifests
1035 remote: adding file changes
1035 remote: adding file changes
1036 remote: added 1 changesets with 1 changes to 1 files (+1 heads)
1036 remote: added 1 changesets with 1 changes to 1 files (+1 heads)
1037 exporting bookmark W
1037 exporting bookmark W
1038 $ hg -R ../b id -r W
1038 $ hg -R ../b id -r W
1039 cc978a373a53 tip W
1039 cc978a373a53 tip W
1040
1040
1041 pushing an existing but divergent bookmark with -B still requires -f
1041 pushing an existing but divergent bookmark with -B still requires -f
1042
1042
1043 $ hg clone -q . ../r
1043 $ hg clone -q . ../r
1044 $ hg up -q X
1044 $ hg up -q X
1045 $ echo 1 > f2
1045 $ echo 1 > f2
1046 $ hg ci -qAml
1046 $ hg ci -qAml
1047
1047
1048 $ cd ../r
1048 $ cd ../r
1049 $ hg up -q X
1049 $ hg up -q X
1050 $ echo 2 > f2
1050 $ echo 2 > f2
1051 $ hg ci -qAmr
1051 $ hg ci -qAmr
1052 $ hg push -B X
1052 $ hg push -B X
1053 pushing to $TESTTMP/addmarks
1053 pushing to $TESTTMP/addmarks
1054 searching for changes
1054 searching for changes
1055 remote has heads on branch 'default' that are not known locally: a2a606d9ff1b
1055 remote has heads on branch 'default' that are not known locally: a2a606d9ff1b
1056 abort: push creates new remote head 54694f811df9 with bookmark 'X'
1056 abort: push creates new remote head 54694f811df9 with bookmark 'X'
1057 (pull and merge or see 'hg help push' for details about pushing new heads)
1057 (pull and merge or see 'hg help push' for details about pushing new heads)
1058 [20]
1058 [20]
1059 $ cd ../addmarks
1059 $ cd ../addmarks
1060
1060
1061 Check summary output for incoming/outgoing bookmarks
1061 Check summary output for incoming/outgoing bookmarks
1062
1062
1063 $ hg bookmarks -d X
1063 $ hg bookmarks -d X
1064 $ hg bookmarks -d Y
1064 $ hg bookmarks -d Y
1065 $ hg summary --remote | grep '^remote:'
1065 $ hg summary --remote | grep '^remote:'
1066 remote: *, 2 incoming bookmarks, 1 outgoing bookmarks (glob)
1066 remote: *, 2 incoming bookmarks, 1 outgoing bookmarks (glob)
1067
1067
1068 $ cd ..
1068 $ cd ..
1069
1069
1070 pushing an unchanged bookmark should result in no changes
1070 pushing an unchanged bookmark should result in no changes
1071
1071
1072 $ hg init unchanged-a
1072 $ hg init unchanged-a
1073 $ hg init unchanged-b
1073 $ hg init unchanged-b
1074 $ cd unchanged-a
1074 $ cd unchanged-a
1075 $ echo initial > foo
1075 $ echo initial > foo
1076 $ hg commit -A -m initial
1076 $ hg commit -A -m initial
1077 adding foo
1077 adding foo
1078 $ hg bookmark @
1078 $ hg bookmark @
1079 $ hg push -B @ ../unchanged-b
1079 $ hg push -B @ ../unchanged-b
1080 pushing to ../unchanged-b
1080 pushing to ../unchanged-b
1081 searching for changes
1081 searching for changes
1082 adding changesets
1082 adding changesets
1083 adding manifests
1083 adding manifests
1084 adding file changes
1084 adding file changes
1085 added 1 changesets with 1 changes to 1 files
1085 added 1 changesets with 1 changes to 1 files
1086 exporting bookmark @
1086 exporting bookmark @
1087
1087
1088 $ hg push -B @ ../unchanged-b
1088 $ hg push -B @ ../unchanged-b
1089 pushing to ../unchanged-b
1089 pushing to ../unchanged-b
1090 searching for changes
1090 searching for changes
1091 no changes found
1091 no changes found
1092 [1]
1092 [1]
1093
1093
1094 Pushing a really long bookmark should work fine (issue5165)
1094 Pushing a really long bookmark should work fine (issue5165)
1095 ===============================================
1095 ===============================================
1096
1096
1097 #if b2-binary
1097 #if b2-binary
1098 >>> with open('longname', 'w') as f:
1098 >>> with open('longname', 'w') as f:
1099 ... f.write('wat' * 100) and None
1099 ... f.write('wat' * 100) and None
1100 $ hg book `cat longname`
1100 $ hg book `cat longname`
1101 $ hg push -B `cat longname` ../unchanged-b
1101 $ hg push -B `cat longname` ../unchanged-b
1102 pushing to ../unchanged-b
1102 pushing to ../unchanged-b
1103 searching for changes
1103 searching for changes
1104 no changes found
1104 no changes found
1105 exporting bookmark (wat){100} (re)
1105 exporting bookmark (wat){100} (re)
1106 [1]
1106 [1]
1107 $ hg -R ../unchanged-b book --delete `cat longname`
1107 $ hg -R ../unchanged-b book --delete `cat longname`
1108
1108
1109 Test again but forcing bundle2 exchange to make sure that doesn't regress.
1109 Test again but forcing bundle2 exchange to make sure that doesn't regress.
1110
1110
1111 $ hg push -B `cat longname` ../unchanged-b --config devel.legacy.exchange=bundle1
1111 $ hg push -B `cat longname` ../unchanged-b --config devel.legacy.exchange=bundle1
1112 pushing to ../unchanged-b
1112 pushing to ../unchanged-b
1113 searching for changes
1113 searching for changes
1114 no changes found
1114 no changes found
1115 exporting bookmark (wat){100} (re)
1115 exporting bookmark (wat){100} (re)
1116 [1]
1116 [1]
1117 $ hg -R ../unchanged-b book --delete `cat longname`
1117 $ hg -R ../unchanged-b book --delete `cat longname`
1118 $ hg book --delete `cat longname`
1118 $ hg book --delete `cat longname`
1119 $ hg co @
1119 $ hg co @
1120 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1120 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1121 (activating bookmark @)
1121 (activating bookmark @)
1122 #endif
1122 #endif
1123
1123
1124 Check hook preventing push (issue4455)
1124 Check hook preventing push (issue4455)
1125 ======================================
1125 ======================================
1126
1126
1127 $ hg bookmarks
1127 $ hg bookmarks
1128 * @ 0:55482a6fb4b1
1128 * @ 0:55482a6fb4b1
1129 $ hg log -G
1129 $ hg log -G
1130 @ 0:55482a6fb4b1 initial
1130 @ 0:55482a6fb4b1 initial
1131
1131
1132 $ hg init ../issue4455-dest
1132 $ hg init ../issue4455-dest
1133 $ hg push ../issue4455-dest # changesets only
1133 $ hg push ../issue4455-dest # changesets only
1134 pushing to ../issue4455-dest
1134 pushing to ../issue4455-dest
1135 searching for changes
1135 searching for changes
1136 adding changesets
1136 adding changesets
1137 adding manifests
1137 adding manifests
1138 adding file changes
1138 adding file changes
1139 added 1 changesets with 1 changes to 1 files
1139 added 1 changesets with 1 changes to 1 files
1140 $ cat >> .hg/hgrc << EOF
1140 $ cat >> .hg/hgrc << EOF
1141 > [paths]
1141 > [paths]
1142 > local=../issue4455-dest/
1142 > local=../issue4455-dest/
1143 > ssh=ssh://user@dummy/issue4455-dest
1143 > ssh=ssh://user@dummy/issue4455-dest
1144 > http=http://localhost:$HGPORT/
1144 > http=http://localhost:$HGPORT/
1145 > [ui]
1145 > [ui]
1146 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1146 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1147 > EOF
1147 > EOF
1148 $ cat >> ../issue4455-dest/.hg/hgrc << EOF
1148 $ cat >> ../issue4455-dest/.hg/hgrc << EOF
1149 > [hooks]
1149 > [hooks]
1150 > prepushkey=false
1150 > prepushkey=false
1151 > [web]
1151 > [web]
1152 > push_ssl = false
1152 > push_ssl = false
1153 > allow_push = *
1153 > allow_push = *
1154 > EOF
1154 > EOF
1155 $ killdaemons.py
1155 $ killdaemons.py
1156 $ hg serve -R ../issue4455-dest -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log
1156 $ hg serve -R ../issue4455-dest -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log
1157 $ cat ../issue4455.pid >> $DAEMON_PIDS
1157 $ cat ../issue4455.pid >> $DAEMON_PIDS
1158
1158
1159 Local push
1159 Local push
1160 ----------
1160 ----------
1161
1161
1162 #if b2-pushkey
1162 #if b2-pushkey
1163
1163
1164 $ hg push -B @ local
1164 $ hg push -B @ local
1165 pushing to $TESTTMP/issue4455-dest
1165 pushing to $TESTTMP/issue4455-dest
1166 searching for changes
1166 searching for changes
1167 no changes found
1167 no changes found
1168 pushkey-abort: prepushkey hook exited with status 1
1168 pushkey-abort: prepushkey hook exited with status 1
1169 abort: exporting bookmark @ failed
1169 abort: exporting bookmark @ failed
1170 [255]
1170 [255]
1171
1171
1172 #endif
1172 #endif
1173 #if b2-binary
1173 #if b2-binary
1174
1174
1175 $ hg push -B @ local
1175 $ hg push -B @ local
1176 pushing to $TESTTMP/issue4455-dest
1176 pushing to $TESTTMP/issue4455-dest
1177 searching for changes
1177 searching for changes
1178 no changes found
1178 no changes found
1179 abort: prepushkey hook exited with status 1
1179 abort: prepushkey hook exited with status 1
1180 [40]
1180 [40]
1181
1181
1182 #endif
1182 #endif
1183
1183
1184 $ hg -R ../issue4455-dest/ bookmarks
1184 $ hg -R ../issue4455-dest/ bookmarks
1185 no bookmarks set
1185 no bookmarks set
1186
1186
1187 Using ssh
1187 Using ssh
1188 ---------
1188 ---------
1189
1189
1190 #if b2-pushkey
1190 #if b2-pushkey
1191
1191
1192 $ hg push -B @ ssh # bundle2+
1192 $ hg push -B @ ssh # bundle2+
1193 pushing to ssh://user@dummy/issue4455-dest
1193 pushing to ssh://user@dummy/issue4455-dest
1194 searching for changes
1194 searching for changes
1195 no changes found
1195 no changes found
1196 remote: pushkey-abort: prepushkey hook exited with status 1
1196 remote: pushkey-abort: prepushkey hook exited with status 1
1197 abort: exporting bookmark @ failed
1197 abort: exporting bookmark @ failed
1198 [255]
1198 [255]
1199
1199
1200 $ hg -R ../issue4455-dest/ bookmarks
1200 $ hg -R ../issue4455-dest/ bookmarks
1201 no bookmarks set
1201 no bookmarks set
1202
1202
1203 $ hg push -B @ ssh --config devel.legacy.exchange=bundle1
1203 $ hg push -B @ ssh --config devel.legacy.exchange=bundle1
1204 pushing to ssh://user@dummy/issue4455-dest
1204 pushing to ssh://user@dummy/issue4455-dest
1205 searching for changes
1205 searching for changes
1206 no changes found
1206 no changes found
1207 remote: pushkey-abort: prepushkey hook exited with status 1
1207 remote: pushkey-abort: prepushkey hook exited with status 1
1208 exporting bookmark @ failed
1208 exporting bookmark @ failed
1209 [1]
1209 [1]
1210
1210
1211 #endif
1211 #endif
1212 #if b2-binary
1212 #if b2-binary
1213
1213
1214 $ hg push -B @ ssh # bundle2+
1214 $ hg push -B @ ssh # bundle2+
1215 pushing to ssh://user@dummy/issue4455-dest
1215 pushing to ssh://user@dummy/issue4455-dest
1216 searching for changes
1216 searching for changes
1217 no changes found
1217 no changes found
1218 remote: prepushkey hook exited with status 1
1218 remote: prepushkey hook exited with status 1
1219 abort: push failed on remote
1219 abort: push failed on remote
1220 [255]
1220 [100]
1221
1221
1222 #endif
1222 #endif
1223
1223
1224 $ hg -R ../issue4455-dest/ bookmarks
1224 $ hg -R ../issue4455-dest/ bookmarks
1225 no bookmarks set
1225 no bookmarks set
1226
1226
1227 Using http
1227 Using http
1228 ----------
1228 ----------
1229
1229
1230 #if b2-pushkey
1230 #if b2-pushkey
1231 $ hg push -B @ http # bundle2+
1231 $ hg push -B @ http # bundle2+
1232 pushing to http://localhost:$HGPORT/
1232 pushing to http://localhost:$HGPORT/
1233 searching for changes
1233 searching for changes
1234 no changes found
1234 no changes found
1235 remote: pushkey-abort: prepushkey hook exited with status 1
1235 remote: pushkey-abort: prepushkey hook exited with status 1
1236 abort: exporting bookmark @ failed
1236 abort: exporting bookmark @ failed
1237 [255]
1237 [255]
1238
1238
1239 $ hg -R ../issue4455-dest/ bookmarks
1239 $ hg -R ../issue4455-dest/ bookmarks
1240 no bookmarks set
1240 no bookmarks set
1241
1241
1242 $ hg push -B @ http --config devel.legacy.exchange=bundle1
1242 $ hg push -B @ http --config devel.legacy.exchange=bundle1
1243 pushing to http://localhost:$HGPORT/
1243 pushing to http://localhost:$HGPORT/
1244 searching for changes
1244 searching for changes
1245 no changes found
1245 no changes found
1246 remote: pushkey-abort: prepushkey hook exited with status 1
1246 remote: pushkey-abort: prepushkey hook exited with status 1
1247 exporting bookmark @ failed
1247 exporting bookmark @ failed
1248 [1]
1248 [1]
1249
1249
1250 #endif
1250 #endif
1251
1251
1252 #if b2-binary
1252 #if b2-binary
1253
1253
1254 $ hg push -B @ ssh # bundle2+
1254 $ hg push -B @ ssh # bundle2+
1255 pushing to ssh://user@dummy/issue4455-dest
1255 pushing to ssh://user@dummy/issue4455-dest
1256 searching for changes
1256 searching for changes
1257 no changes found
1257 no changes found
1258 remote: prepushkey hook exited with status 1
1258 remote: prepushkey hook exited with status 1
1259 abort: push failed on remote
1259 abort: push failed on remote
1260 [255]
1260 [100]
1261
1261
1262 #endif
1262 #endif
1263
1263
1264 $ hg -R ../issue4455-dest/ bookmarks
1264 $ hg -R ../issue4455-dest/ bookmarks
1265 no bookmarks set
1265 no bookmarks set
1266
1266
1267 $ cd ..
1267 $ cd ..
1268
1268
1269 Test that pre-pushkey compat for bookmark works as expected (issue5777)
1269 Test that pre-pushkey compat for bookmark works as expected (issue5777)
1270
1270
1271 $ cat << EOF >> $HGRCPATH
1271 $ cat << EOF >> $HGRCPATH
1272 > [ui]
1272 > [ui]
1273 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1273 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1274 > [server]
1274 > [server]
1275 > bookmarks-pushkey-compat = yes
1275 > bookmarks-pushkey-compat = yes
1276 > EOF
1276 > EOF
1277
1277
1278 $ hg init server
1278 $ hg init server
1279 $ echo foo > server/a
1279 $ echo foo > server/a
1280 $ hg -R server book foo
1280 $ hg -R server book foo
1281 $ hg -R server commit -Am a
1281 $ hg -R server commit -Am a
1282 adding a
1282 adding a
1283 $ hg clone ssh://user@dummy/server client
1283 $ hg clone ssh://user@dummy/server client
1284 requesting all changes
1284 requesting all changes
1285 adding changesets
1285 adding changesets
1286 adding manifests
1286 adding manifests
1287 adding file changes
1287 adding file changes
1288 added 1 changesets with 1 changes to 1 files
1288 added 1 changesets with 1 changes to 1 files
1289 new changesets 79513d0d7716 (1 drafts)
1289 new changesets 79513d0d7716 (1 drafts)
1290 updating to branch default
1290 updating to branch default
1291 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1291 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1292
1292
1293 Forbid bookmark move on the server
1293 Forbid bookmark move on the server
1294
1294
1295 $ cat << EOF >> $TESTTMP/no-bm-move.sh
1295 $ cat << EOF >> $TESTTMP/no-bm-move.sh
1296 > #!/bin/sh
1296 > #!/bin/sh
1297 > echo \$HG_NAMESPACE | grep -v bookmarks
1297 > echo \$HG_NAMESPACE | grep -v bookmarks
1298 > EOF
1298 > EOF
1299 $ cat << EOF >> server/.hg/hgrc
1299 $ cat << EOF >> server/.hg/hgrc
1300 > [hooks]
1300 > [hooks]
1301 > prepushkey.no-bm-move= sh $TESTTMP/no-bm-move.sh
1301 > prepushkey.no-bm-move= sh $TESTTMP/no-bm-move.sh
1302 > EOF
1302 > EOF
1303
1303
1304 pushing changeset is okay
1304 pushing changeset is okay
1305
1305
1306 $ echo bar >> client/a
1306 $ echo bar >> client/a
1307 $ hg -R client commit -m b
1307 $ hg -R client commit -m b
1308 $ hg -R client push
1308 $ hg -R client push
1309 pushing to ssh://user@dummy/server
1309 pushing to ssh://user@dummy/server
1310 searching for changes
1310 searching for changes
1311 remote: adding changesets
1311 remote: adding changesets
1312 remote: adding manifests
1312 remote: adding manifests
1313 remote: adding file changes
1313 remote: adding file changes
1314 remote: added 1 changesets with 1 changes to 1 files
1314 remote: added 1 changesets with 1 changes to 1 files
1315
1315
1316 attempt to move the bookmark is rejected
1316 attempt to move the bookmark is rejected
1317
1317
1318 $ hg -R client book foo -r .
1318 $ hg -R client book foo -r .
1319 moving bookmark 'foo' forward from 79513d0d7716
1319 moving bookmark 'foo' forward from 79513d0d7716
1320
1320
1321 #if b2-pushkey
1321 #if b2-pushkey
1322 $ hg -R client push
1322 $ hg -R client push
1323 pushing to ssh://user@dummy/server
1323 pushing to ssh://user@dummy/server
1324 searching for changes
1324 searching for changes
1325 no changes found
1325 no changes found
1326 remote: pushkey-abort: prepushkey.no-bm-move hook exited with status 1
1326 remote: pushkey-abort: prepushkey.no-bm-move hook exited with status 1
1327 abort: updating bookmark foo failed
1327 abort: updating bookmark foo failed
1328 [255]
1328 [255]
1329 #endif
1329 #endif
1330 #if b2-binary
1330 #if b2-binary
1331 $ hg -R client push
1331 $ hg -R client push
1332 pushing to ssh://user@dummy/server
1332 pushing to ssh://user@dummy/server
1333 searching for changes
1333 searching for changes
1334 no changes found
1334 no changes found
1335 remote: prepushkey.no-bm-move hook exited with status 1
1335 remote: prepushkey.no-bm-move hook exited with status 1
1336 abort: push failed on remote
1336 abort: push failed on remote
1337 [255]
1337 [100]
1338 #endif
1338 #endif
1339
1339
1340 -- test for pushing bookmarks pointing to secret changesets
1340 -- test for pushing bookmarks pointing to secret changesets
1341
1341
1342 Set up a "remote" repo
1342 Set up a "remote" repo
1343 $ hg init issue6159remote
1343 $ hg init issue6159remote
1344 $ cd issue6159remote
1344 $ cd issue6159remote
1345 $ echo a > a
1345 $ echo a > a
1346 $ hg add a
1346 $ hg add a
1347 $ hg commit -m_
1347 $ hg commit -m_
1348 $ hg bookmark foo
1348 $ hg bookmark foo
1349 $ cd ..
1349 $ cd ..
1350
1350
1351 Clone a local repo
1351 Clone a local repo
1352 $ hg clone -q issue6159remote issue6159local
1352 $ hg clone -q issue6159remote issue6159local
1353 $ cd issue6159local
1353 $ cd issue6159local
1354 $ hg up -qr foo
1354 $ hg up -qr foo
1355 $ echo b > b
1355 $ echo b > b
1356
1356
1357 Move the bookmark "foo" to point at a secret changeset
1357 Move the bookmark "foo" to point at a secret changeset
1358 $ hg commit -qAm_ --config phases.new-commit=secret
1358 $ hg commit -qAm_ --config phases.new-commit=secret
1359
1359
1360 Pushing the bookmark "foo" now fails as it contains a secret changeset
1360 Pushing the bookmark "foo" now fails as it contains a secret changeset
1361 $ hg push -r foo
1361 $ hg push -r foo
1362 pushing to $TESTTMP/issue6159remote
1362 pushing to $TESTTMP/issue6159remote
1363 searching for changes
1363 searching for changes
1364 no changes found (ignored 1 secret changesets)
1364 no changes found (ignored 1 secret changesets)
1365 abort: cannot push bookmark foo as it points to a secret changeset
1365 abort: cannot push bookmark foo as it points to a secret changeset
1366 [255]
1366 [255]
1367
1367
1368 Test pushing all bookmarks
1368 Test pushing all bookmarks
1369
1369
1370 $ hg init $TESTTMP/ab1
1370 $ hg init $TESTTMP/ab1
1371 $ cd $TESTTMP/ab1
1371 $ cd $TESTTMP/ab1
1372 $ "$PYTHON" $TESTDIR/seq.py 1 5 | while read i; do
1372 $ "$PYTHON" $TESTDIR/seq.py 1 5 | while read i; do
1373 > echo $i > test && hg ci -Am test
1373 > echo $i > test && hg ci -Am test
1374 > done
1374 > done
1375 adding test
1375 adding test
1376 $ hg clone -U . ../ab2
1376 $ hg clone -U . ../ab2
1377 $ hg book -r 1 A; hg book -r 2 B; hg book -r 3 C
1377 $ hg book -r 1 A; hg book -r 2 B; hg book -r 3 C
1378 $ hg push ../ab2
1378 $ hg push ../ab2
1379 pushing to ../ab2
1379 pushing to ../ab2
1380 searching for changes
1380 searching for changes
1381 no changes found
1381 no changes found
1382 [1]
1382 [1]
1383 $ hg push --all-bookmarks -r 1 ../ab2
1383 $ hg push --all-bookmarks -r 1 ../ab2
1384 abort: cannot specify both --all-bookmarks and --rev
1384 abort: cannot specify both --all-bookmarks and --rev
1385 [10]
1385 [10]
1386 $ hg push --all-bookmarks -B A ../ab2
1386 $ hg push --all-bookmarks -B A ../ab2
1387 abort: cannot specify both --all-bookmarks and --bookmark
1387 abort: cannot specify both --all-bookmarks and --bookmark
1388 [10]
1388 [10]
1389 $ hg push --all-bookmarks ../ab2
1389 $ hg push --all-bookmarks ../ab2
1390 pushing to ../ab2
1390 pushing to ../ab2
1391 searching for changes
1391 searching for changes
1392 no changes found
1392 no changes found
1393 exporting bookmark A
1393 exporting bookmark A
1394 exporting bookmark B
1394 exporting bookmark B
1395 exporting bookmark C
1395 exporting bookmark C
1396 [1]
1396 [1]
@@ -1,1152 +1,1152 b''
1 #testcases sshv1 sshv2
1 #testcases sshv1 sshv2
2
2
3 #if sshv2
3 #if sshv2
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [experimental]
5 > [experimental]
6 > sshpeer.advertise-v2 = true
6 > sshpeer.advertise-v2 = true
7 > sshserver.support-v2 = true
7 > sshserver.support-v2 = true
8 > EOF
8 > EOF
9 #endif
9 #endif
10
10
11 Test exchange of common information using bundle2
11 Test exchange of common information using bundle2
12
12
13
13
14 $ getmainid() {
14 $ getmainid() {
15 > hg -R main log --template '{node}\n' --rev "$1"
15 > hg -R main log --template '{node}\n' --rev "$1"
16 > }
16 > }
17
17
18 enable obsolescence
18 enable obsolescence
19
19
20 $ cp $HGRCPATH $TESTTMP/hgrc.orig
20 $ cp $HGRCPATH $TESTTMP/hgrc.orig
21 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
21 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
22 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
22 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
23 > hg debuglock
23 > hg debuglock
24 > EOF
24 > EOF
25
25
26 $ cat >> $HGRCPATH << EOF
26 $ cat >> $HGRCPATH << EOF
27 > [experimental]
27 > [experimental]
28 > evolution.createmarkers=True
28 > evolution.createmarkers=True
29 > evolution.exchange=True
29 > evolution.exchange=True
30 > bundle2-output-capture=True
30 > bundle2-output-capture=True
31 > [ui]
31 > [ui]
32 > ssh="$PYTHON" "$TESTDIR/dummyssh"
32 > ssh="$PYTHON" "$TESTDIR/dummyssh"
33 > [command-templates]
33 > [command-templates]
34 > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
34 > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
35 > [web]
35 > [web]
36 > push_ssl = false
36 > push_ssl = false
37 > allow_push = *
37 > allow_push = *
38 > [phases]
38 > [phases]
39 > publish=False
39 > publish=False
40 > [hooks]
40 > [hooks]
41 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
41 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
42 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
42 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
43 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
43 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
44 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
44 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
45 > EOF
45 > EOF
46
46
47 The extension requires a repo (currently unused)
47 The extension requires a repo (currently unused)
48
48
49 $ hg init main
49 $ hg init main
50 $ cd main
50 $ cd main
51 $ touch a
51 $ touch a
52 $ hg add a
52 $ hg add a
53 $ hg commit -m 'a'
53 $ hg commit -m 'a'
54 pre-close-tip:3903775176ed draft
54 pre-close-tip:3903775176ed draft
55 postclose-tip:3903775176ed draft
55 postclose-tip:3903775176ed draft
56 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
56 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
57
57
58 $ hg unbundle $TESTDIR/bundles/rebase.hg
58 $ hg unbundle $TESTDIR/bundles/rebase.hg
59 adding changesets
59 adding changesets
60 adding manifests
60 adding manifests
61 adding file changes
61 adding file changes
62 pre-close-tip:02de42196ebe draft
62 pre-close-tip:02de42196ebe draft
63 added 8 changesets with 7 changes to 7 files (+3 heads)
63 added 8 changesets with 7 changes to 7 files (+3 heads)
64 new changesets cd010b8cd998:02de42196ebe (8 drafts)
64 new changesets cd010b8cd998:02de42196ebe (8 drafts)
65 postclose-tip:02de42196ebe draft
65 postclose-tip:02de42196ebe draft
66 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:$ID$ HG_TXNNAME=unbundle
66 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:$ID$ HG_TXNNAME=unbundle
67 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
67 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
68 (run 'hg heads' to see heads, 'hg merge' to merge)
68 (run 'hg heads' to see heads, 'hg merge' to merge)
69
69
70 $ cd ..
70 $ cd ..
71
71
72 Real world exchange
72 Real world exchange
73 =====================
73 =====================
74
74
75 Add more obsolescence information
75 Add more obsolescence information
76
76
77 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
77 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
78 pre-close-tip:02de42196ebe draft
78 pre-close-tip:02de42196ebe draft
79 1 new obsolescence markers
79 1 new obsolescence markers
80 postclose-tip:02de42196ebe draft
80 postclose-tip:02de42196ebe draft
81 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
81 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
82 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
82 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
83 pre-close-tip:02de42196ebe draft
83 pre-close-tip:02de42196ebe draft
84 1 new obsolescence markers
84 1 new obsolescence markers
85 postclose-tip:02de42196ebe draft
85 postclose-tip:02de42196ebe draft
86 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
86 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
87
87
88 clone --pull
88 clone --pull
89
89
90 $ hg -R main phase --public cd010b8cd998
90 $ hg -R main phase --public cd010b8cd998
91 pre-close-tip:02de42196ebe draft
91 pre-close-tip:02de42196ebe draft
92 postclose-tip:02de42196ebe draft
92 postclose-tip:02de42196ebe draft
93 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
93 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
94 $ hg clone main other --pull --rev 9520eea781bc
94 $ hg clone main other --pull --rev 9520eea781bc
95 adding changesets
95 adding changesets
96 adding manifests
96 adding manifests
97 adding file changes
97 adding file changes
98 pre-close-tip:9520eea781bc draft
98 pre-close-tip:9520eea781bc draft
99 added 2 changesets with 2 changes to 2 files
99 added 2 changesets with 2 changes to 2 files
100 1 new obsolescence markers
100 1 new obsolescence markers
101 new changesets cd010b8cd998:9520eea781bc (1 drafts)
101 new changesets cd010b8cd998:9520eea781bc (1 drafts)
102 postclose-tip:9520eea781bc draft
102 postclose-tip:9520eea781bc draft
103 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
103 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
104 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
104 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
105 updating to branch default
105 updating to branch default
106 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
106 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
107 $ hg -R other log -G
107 $ hg -R other log -G
108 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
108 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
109 |
109 |
110 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
110 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
111
111
112 $ hg -R other debugobsolete
112 $ hg -R other debugobsolete
113 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
113 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
114
114
115 pull
115 pull
116
116
117 $ hg -R main phase --public 9520eea781bc
117 $ hg -R main phase --public 9520eea781bc
118 pre-close-tip:02de42196ebe draft
118 pre-close-tip:02de42196ebe draft
119 postclose-tip:02de42196ebe draft
119 postclose-tip:02de42196ebe draft
120 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
120 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
121 $ hg -R other pull -r 24b6387c8c8c
121 $ hg -R other pull -r 24b6387c8c8c
122 pulling from $TESTTMP/main
122 pulling from $TESTTMP/main
123 searching for changes
123 searching for changes
124 adding changesets
124 adding changesets
125 adding manifests
125 adding manifests
126 adding file changes
126 adding file changes
127 pre-close-tip:24b6387c8c8c draft
127 pre-close-tip:24b6387c8c8c draft
128 added 1 changesets with 1 changes to 1 files (+1 heads)
128 added 1 changesets with 1 changes to 1 files (+1 heads)
129 1 new obsolescence markers
129 1 new obsolescence markers
130 new changesets 24b6387c8c8c (1 drafts)
130 new changesets 24b6387c8c8c (1 drafts)
131 postclose-tip:24b6387c8c8c draft
131 postclose-tip:24b6387c8c8c draft
132 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
132 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
133 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
133 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
134 (run 'hg heads' to see heads, 'hg merge' to merge)
134 (run 'hg heads' to see heads, 'hg merge' to merge)
135 $ hg -R other log -G
135 $ hg -R other log -G
136 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
136 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
137 |
137 |
138 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
138 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
139 |/
139 |/
140 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
140 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
141
141
142 $ hg -R other debugobsolete
142 $ hg -R other debugobsolete
143 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
143 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
144 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
144 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
145
145
146 pull empty (with phase movement)
146 pull empty (with phase movement)
147
147
148 $ hg -R main phase --public 24b6387c8c8c
148 $ hg -R main phase --public 24b6387c8c8c
149 pre-close-tip:02de42196ebe draft
149 pre-close-tip:02de42196ebe draft
150 postclose-tip:02de42196ebe draft
150 postclose-tip:02de42196ebe draft
151 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
151 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
152 $ hg -R other pull -r 24b6387c8c8c
152 $ hg -R other pull -r 24b6387c8c8c
153 pulling from $TESTTMP/main
153 pulling from $TESTTMP/main
154 no changes found
154 no changes found
155 pre-close-tip:24b6387c8c8c public
155 pre-close-tip:24b6387c8c8c public
156 1 local changesets published
156 1 local changesets published
157 postclose-tip:24b6387c8c8c public
157 postclose-tip:24b6387c8c8c public
158 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
158 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
159 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
159 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
160 $ hg -R other log -G
160 $ hg -R other log -G
161 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
161 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
162 |
162 |
163 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
163 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
164 |/
164 |/
165 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
165 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
166
166
167 $ hg -R other debugobsolete
167 $ hg -R other debugobsolete
168 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
168 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
169 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
169 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
170
170
171 pull empty
171 pull empty
172
172
173 $ hg -R other pull -r 24b6387c8c8c
173 $ hg -R other pull -r 24b6387c8c8c
174 pulling from $TESTTMP/main
174 pulling from $TESTTMP/main
175 no changes found
175 no changes found
176 pre-close-tip:24b6387c8c8c public
176 pre-close-tip:24b6387c8c8c public
177 postclose-tip:24b6387c8c8c public
177 postclose-tip:24b6387c8c8c public
178 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
178 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
179 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
179 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
180 $ hg -R other log -G
180 $ hg -R other log -G
181 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
181 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
182 |
182 |
183 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
183 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
184 |/
184 |/
185 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
185 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
186
186
187 $ hg -R other debugobsolete
187 $ hg -R other debugobsolete
188 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
188 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
189 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
189 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
190
190
191 add extra data to test their exchange during push
191 add extra data to test their exchange during push
192
192
193 $ hg -R main bookmark --rev eea13746799a book_eea1
193 $ hg -R main bookmark --rev eea13746799a book_eea1
194 pre-close-tip:02de42196ebe draft
194 pre-close-tip:02de42196ebe draft
195 postclose-tip:02de42196ebe draft
195 postclose-tip:02de42196ebe draft
196 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
196 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
197 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
197 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
198 pre-close-tip:02de42196ebe draft
198 pre-close-tip:02de42196ebe draft
199 1 new obsolescence markers
199 1 new obsolescence markers
200 postclose-tip:02de42196ebe draft
200 postclose-tip:02de42196ebe draft
201 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
201 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
202 $ hg -R main bookmark --rev 02de42196ebe book_02de
202 $ hg -R main bookmark --rev 02de42196ebe book_02de
203 pre-close-tip:02de42196ebe draft book_02de
203 pre-close-tip:02de42196ebe draft book_02de
204 postclose-tip:02de42196ebe draft book_02de
204 postclose-tip:02de42196ebe draft book_02de
205 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
205 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
206 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
206 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
207 pre-close-tip:02de42196ebe draft book_02de
207 pre-close-tip:02de42196ebe draft book_02de
208 1 new obsolescence markers
208 1 new obsolescence markers
209 postclose-tip:02de42196ebe draft book_02de
209 postclose-tip:02de42196ebe draft book_02de
210 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
210 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
211 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
211 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
212 pre-close-tip:02de42196ebe draft book_02de
212 pre-close-tip:02de42196ebe draft book_02de
213 postclose-tip:02de42196ebe draft book_02de
213 postclose-tip:02de42196ebe draft book_02de
214 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
214 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
215 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
215 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
216 pre-close-tip:02de42196ebe draft book_02de
216 pre-close-tip:02de42196ebe draft book_02de
217 1 new obsolescence markers
217 1 new obsolescence markers
218 postclose-tip:02de42196ebe draft book_02de
218 postclose-tip:02de42196ebe draft book_02de
219 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
219 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
220 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
220 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
221 pre-close-tip:02de42196ebe draft book_02de
221 pre-close-tip:02de42196ebe draft book_02de
222 postclose-tip:02de42196ebe draft book_02de
222 postclose-tip:02de42196ebe draft book_02de
223 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
223 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
224 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
224 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
225 pre-close-tip:02de42196ebe draft book_02de
225 pre-close-tip:02de42196ebe draft book_02de
226 1 new obsolescence markers
226 1 new obsolescence markers
227 postclose-tip:02de42196ebe draft book_02de
227 postclose-tip:02de42196ebe draft book_02de
228 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
228 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
229 $ hg -R main bookmark --rev 32af7686d403 book_32af
229 $ hg -R main bookmark --rev 32af7686d403 book_32af
230 pre-close-tip:02de42196ebe draft book_02de
230 pre-close-tip:02de42196ebe draft book_02de
231 postclose-tip:02de42196ebe draft book_02de
231 postclose-tip:02de42196ebe draft book_02de
232 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
232 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
233 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
233 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
234 pre-close-tip:02de42196ebe draft book_02de
234 pre-close-tip:02de42196ebe draft book_02de
235 1 new obsolescence markers
235 1 new obsolescence markers
236 postclose-tip:02de42196ebe draft book_02de
236 postclose-tip:02de42196ebe draft book_02de
237 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
237 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
238
238
239 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
239 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
240 pre-close-tip:24b6387c8c8c public
240 pre-close-tip:24b6387c8c8c public
241 postclose-tip:24b6387c8c8c public
241 postclose-tip:24b6387c8c8c public
242 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
242 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
243 $ hg -R other bookmark --rev cd010b8cd998 book_02de
243 $ hg -R other bookmark --rev cd010b8cd998 book_02de
244 pre-close-tip:24b6387c8c8c public
244 pre-close-tip:24b6387c8c8c public
245 postclose-tip:24b6387c8c8c public
245 postclose-tip:24b6387c8c8c public
246 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
246 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
247 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
247 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
248 pre-close-tip:24b6387c8c8c public
248 pre-close-tip:24b6387c8c8c public
249 postclose-tip:24b6387c8c8c public
249 postclose-tip:24b6387c8c8c public
250 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
250 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
251 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
251 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
252 pre-close-tip:24b6387c8c8c public
252 pre-close-tip:24b6387c8c8c public
253 postclose-tip:24b6387c8c8c public
253 postclose-tip:24b6387c8c8c public
254 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
254 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
255 $ hg -R other bookmark --rev cd010b8cd998 book_32af
255 $ hg -R other bookmark --rev cd010b8cd998 book_32af
256 pre-close-tip:24b6387c8c8c public
256 pre-close-tip:24b6387c8c8c public
257 postclose-tip:24b6387c8c8c public
257 postclose-tip:24b6387c8c8c public
258 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
258 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
259
259
260 $ hg -R main phase --public eea13746799a
260 $ hg -R main phase --public eea13746799a
261 pre-close-tip:02de42196ebe draft book_02de
261 pre-close-tip:02de42196ebe draft book_02de
262 postclose-tip:02de42196ebe draft book_02de
262 postclose-tip:02de42196ebe draft book_02de
263 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
263 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
264
264
265 push
265 push
266 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
266 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
267 pushing to other
267 pushing to other
268 searching for changes
268 searching for changes
269 remote: adding changesets
269 remote: adding changesets
270 remote: adding manifests
270 remote: adding manifests
271 remote: adding file changes
271 remote: adding file changes
272 remote: pre-close-tip:eea13746799a public book_eea1
272 remote: pre-close-tip:eea13746799a public book_eea1
273 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
273 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
274 remote: 1 new obsolescence markers
274 remote: 1 new obsolescence markers
275 remote: pushkey: lock state after "bookmarks"
275 remote: pushkey: lock state after "bookmarks"
276 remote: lock: free
276 remote: lock: free
277 remote: wlock: free
277 remote: wlock: free
278 remote: postclose-tip:eea13746799a public book_eea1
278 remote: postclose-tip:eea13746799a public book_eea1
279 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/other
279 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/other
280 updating bookmark book_eea1
280 updating bookmark book_eea1
281 pre-close-tip:02de42196ebe draft book_02de
281 pre-close-tip:02de42196ebe draft book_02de
282 postclose-tip:02de42196ebe draft book_02de
282 postclose-tip:02de42196ebe draft book_02de
283 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
283 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
284 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
284 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
285 $ hg -R other log -G
285 $ hg -R other log -G
286 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
286 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
287 |\
287 |\
288 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
288 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
289 | |
289 | |
290 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
290 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
291 |/
291 |/
292 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
292 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
293
293
294 $ hg -R other debugobsolete
294 $ hg -R other debugobsolete
295 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
295 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298
298
299 pull over ssh
299 pull over ssh
300
300
301 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
301 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
302 pulling from ssh://user@dummy/main
302 pulling from ssh://user@dummy/main
303 searching for changes
303 searching for changes
304 adding changesets
304 adding changesets
305 adding manifests
305 adding manifests
306 adding file changes
306 adding file changes
307 updating bookmark book_02de
307 updating bookmark book_02de
308 pre-close-tip:02de42196ebe draft book_02de
308 pre-close-tip:02de42196ebe draft book_02de
309 added 1 changesets with 1 changes to 1 files (+1 heads)
309 added 1 changesets with 1 changes to 1 files (+1 heads)
310 1 new obsolescence markers
310 1 new obsolescence markers
311 new changesets 02de42196ebe (1 drafts)
311 new changesets 02de42196ebe (1 drafts)
312 postclose-tip:02de42196ebe draft book_02de
312 postclose-tip:02de42196ebe draft book_02de
313 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
313 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
314 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
314 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
315 (run 'hg heads' to see heads, 'hg merge' to merge)
315 (run 'hg heads' to see heads, 'hg merge' to merge)
316 $ hg -R other debugobsolete
316 $ hg -R other debugobsolete
317 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
317 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
318 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
318 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
319 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
319 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
320 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
320 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
321
321
322 pull over http
322 pull over http
323
323
324 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
324 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
325 $ cat main.pid >> $DAEMON_PIDS
325 $ cat main.pid >> $DAEMON_PIDS
326
326
327 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
327 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
328 pulling from http://localhost:$HGPORT/
328 pulling from http://localhost:$HGPORT/
329 searching for changes
329 searching for changes
330 adding changesets
330 adding changesets
331 adding manifests
331 adding manifests
332 adding file changes
332 adding file changes
333 updating bookmark book_42cc
333 updating bookmark book_42cc
334 pre-close-tip:42ccdea3bb16 draft book_42cc
334 pre-close-tip:42ccdea3bb16 draft book_42cc
335 added 1 changesets with 1 changes to 1 files (+1 heads)
335 added 1 changesets with 1 changes to 1 files (+1 heads)
336 1 new obsolescence markers
336 1 new obsolescence markers
337 new changesets 42ccdea3bb16 (1 drafts)
337 new changesets 42ccdea3bb16 (1 drafts)
338 postclose-tip:42ccdea3bb16 draft book_42cc
338 postclose-tip:42ccdea3bb16 draft book_42cc
339 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
339 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
340 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
340 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
341 (run 'hg heads .' to see heads, 'hg merge' to merge)
341 (run 'hg heads .' to see heads, 'hg merge' to merge)
342 $ cat main-error.log
342 $ cat main-error.log
343 $ hg -R other debugobsolete
343 $ hg -R other debugobsolete
344 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
344 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
345 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
345 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
346 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
346 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
347 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
347 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
348 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
348 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
349
349
350 push over ssh
350 push over ssh
351
351
352 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
352 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
353 pushing to ssh://user@dummy/other
353 pushing to ssh://user@dummy/other
354 searching for changes
354 searching for changes
355 remote: adding changesets
355 remote: adding changesets
356 remote: adding manifests
356 remote: adding manifests
357 remote: adding file changes
357 remote: adding file changes
358 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
358 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
359 remote: added 1 changesets with 1 changes to 1 files
359 remote: added 1 changesets with 1 changes to 1 files
360 remote: 1 new obsolescence markers
360 remote: 1 new obsolescence markers
361 remote: pushkey: lock state after "bookmarks"
361 remote: pushkey: lock state after "bookmarks"
362 remote: lock: free
362 remote: lock: free
363 remote: wlock: free
363 remote: wlock: free
364 remote: postclose-tip:5fddd98957c8 draft book_5fdd
364 remote: postclose-tip:5fddd98957c8 draft book_5fdd
365 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:ssh:$LOCALIP
365 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:ssh:$LOCALIP
366 updating bookmark book_5fdd
366 updating bookmark book_5fdd
367 pre-close-tip:02de42196ebe draft book_02de
367 pre-close-tip:02de42196ebe draft book_02de
368 postclose-tip:02de42196ebe draft book_02de
368 postclose-tip:02de42196ebe draft book_02de
369 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
369 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
370 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
370 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
371 $ hg -R other log -G
371 $ hg -R other log -G
372 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
372 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
373 |
373 |
374 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
374 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
375 |
375 |
376 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
376 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
377 | |
377 | |
378 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
378 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
379 | |/|
379 | |/|
380 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
380 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
381 |/ /
381 |/ /
382 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
382 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
383 |/
383 |/
384 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
384 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
385
385
386 $ hg -R other debugobsolete
386 $ hg -R other debugobsolete
387 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
387 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
388 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
388 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
389 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
389 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
390 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
390 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
391 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
391 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
392 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
392 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
393
393
394 push over http
394 push over http
395
395
396 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
396 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
397 $ cat other.pid >> $DAEMON_PIDS
397 $ cat other.pid >> $DAEMON_PIDS
398
398
399 $ hg -R main phase --public 32af7686d403
399 $ hg -R main phase --public 32af7686d403
400 pre-close-tip:02de42196ebe draft book_02de
400 pre-close-tip:02de42196ebe draft book_02de
401 postclose-tip:02de42196ebe draft book_02de
401 postclose-tip:02de42196ebe draft book_02de
402 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
402 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
403 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
403 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
404 pushing to http://localhost:$HGPORT2/
404 pushing to http://localhost:$HGPORT2/
405 searching for changes
405 searching for changes
406 remote: adding changesets
406 remote: adding changesets
407 remote: adding manifests
407 remote: adding manifests
408 remote: adding file changes
408 remote: adding file changes
409 remote: pre-close-tip:32af7686d403 public book_32af
409 remote: pre-close-tip:32af7686d403 public book_32af
410 remote: added 1 changesets with 1 changes to 1 files
410 remote: added 1 changesets with 1 changes to 1 files
411 remote: 1 new obsolescence markers
411 remote: 1 new obsolescence markers
412 remote: pushkey: lock state after "bookmarks"
412 remote: pushkey: lock state after "bookmarks"
413 remote: lock: free
413 remote: lock: free
414 remote: wlock: free
414 remote: wlock: free
415 remote: postclose-tip:32af7686d403 public book_32af
415 remote: postclose-tip:32af7686d403 public book_32af
416 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:http:$LOCALIP: (glob)
416 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:http:$LOCALIP: (glob)
417 updating bookmark book_32af
417 updating bookmark book_32af
418 pre-close-tip:02de42196ebe draft book_02de
418 pre-close-tip:02de42196ebe draft book_02de
419 postclose-tip:02de42196ebe draft book_02de
419 postclose-tip:02de42196ebe draft book_02de
420 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
420 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
421 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
421 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
422 $ cat other-error.log
422 $ cat other-error.log
423
423
424 Check final content.
424 Check final content.
425
425
426 $ hg -R other log -G
426 $ hg -R other log -G
427 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
427 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
428 |
428 |
429 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
429 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
430 |
430 |
431 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
431 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
432 |
432 |
433 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
433 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
434 | |
434 | |
435 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
435 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
436 | |/|
436 | |/|
437 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
437 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
438 |/ /
438 |/ /
439 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
439 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
440 |/
440 |/
441 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
441 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
442
442
443 $ hg -R other debugobsolete
443 $ hg -R other debugobsolete
444 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
444 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
445 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
445 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
446 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
446 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
447 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
447 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
448 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
448 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
449 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
449 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
450 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
450 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
451
451
452 (check that no 'pending' files remain)
452 (check that no 'pending' files remain)
453
453
454 $ ls -1 other/.hg/bookmarks*
454 $ ls -1 other/.hg/bookmarks*
455 other/.hg/bookmarks
455 other/.hg/bookmarks
456 $ ls -1 other/.hg/store/phaseroots*
456 $ ls -1 other/.hg/store/phaseroots*
457 other/.hg/store/phaseroots
457 other/.hg/store/phaseroots
458 $ ls -1 other/.hg/store/00changelog.i*
458 $ ls -1 other/.hg/store/00changelog.i*
459 other/.hg/store/00changelog.i
459 other/.hg/store/00changelog.i
460
460
461 Error Handling
461 Error Handling
462 ==============
462 ==============
463
463
464 Check that errors are properly returned to the client during push.
464 Check that errors are properly returned to the client during push.
465
465
466 Setting up
466 Setting up
467
467
468 $ cat > failpush.py << EOF
468 $ cat > failpush.py << EOF
469 > """A small extension that makes push fails when using bundle2
469 > """A small extension that makes push fails when using bundle2
470 >
470 >
471 > used to test error handling in bundle2
471 > used to test error handling in bundle2
472 > """
472 > """
473 >
473 >
474 > from mercurial import error
474 > from mercurial import error
475 > from mercurial import bundle2
475 > from mercurial import bundle2
476 > from mercurial import exchange
476 > from mercurial import exchange
477 > from mercurial import extensions
477 > from mercurial import extensions
478 > from mercurial import registrar
478 > from mercurial import registrar
479 > cmdtable = {}
479 > cmdtable = {}
480 > command = registrar.command(cmdtable)
480 > command = registrar.command(cmdtable)
481 >
481 >
482 > configtable = {}
482 > configtable = {}
483 > configitem = registrar.configitem(configtable)
483 > configitem = registrar.configitem(configtable)
484 > configitem(b'failpush', b'reason',
484 > configitem(b'failpush', b'reason',
485 > default=None,
485 > default=None,
486 > )
486 > )
487 >
487 >
488 > def _pushbundle2failpart(pushop, bundler):
488 > def _pushbundle2failpart(pushop, bundler):
489 > reason = pushop.ui.config(b'failpush', b'reason')
489 > reason = pushop.ui.config(b'failpush', b'reason')
490 > part = None
490 > part = None
491 > if reason == b'abort':
491 > if reason == b'abort':
492 > bundler.newpart(b'test:abort')
492 > bundler.newpart(b'test:abort')
493 > if reason == b'unknown':
493 > if reason == b'unknown':
494 > bundler.newpart(b'test:unknown')
494 > bundler.newpart(b'test:unknown')
495 > if reason == b'race':
495 > if reason == b'race':
496 > # 20 Bytes of crap
496 > # 20 Bytes of crap
497 > bundler.newpart(b'check:heads', data=b'01234567890123456789')
497 > bundler.newpart(b'check:heads', data=b'01234567890123456789')
498 >
498 >
499 > @bundle2.parthandler(b"test:abort")
499 > @bundle2.parthandler(b"test:abort")
500 > def handleabort(op, part):
500 > def handleabort(op, part):
501 > raise error.Abort(b'Abandon ship!', hint=b"don't panic")
501 > raise error.Abort(b'Abandon ship!', hint=b"don't panic")
502 >
502 >
503 > def uisetup(ui):
503 > def uisetup(ui):
504 > exchange.b2partsgenmapping[b'failpart'] = _pushbundle2failpart
504 > exchange.b2partsgenmapping[b'failpart'] = _pushbundle2failpart
505 > exchange.b2partsgenorder.insert(0, b'failpart')
505 > exchange.b2partsgenorder.insert(0, b'failpart')
506 >
506 >
507 > EOF
507 > EOF
508
508
509 $ cd main
509 $ cd main
510 $ hg up tip
510 $ hg up tip
511 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
511 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
512 $ echo 'I' > I
512 $ echo 'I' > I
513 $ hg add I
513 $ hg add I
514 $ hg ci -m 'I'
514 $ hg ci -m 'I'
515 pre-close-tip:e7ec4e813ba6 draft
515 pre-close-tip:e7ec4e813ba6 draft
516 postclose-tip:e7ec4e813ba6 draft
516 postclose-tip:e7ec4e813ba6 draft
517 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
517 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
518 $ hg id
518 $ hg id
519 e7ec4e813ba6 tip
519 e7ec4e813ba6 tip
520 $ cd ..
520 $ cd ..
521
521
522 $ cat << EOF >> $HGRCPATH
522 $ cat << EOF >> $HGRCPATH
523 > [extensions]
523 > [extensions]
524 > failpush=$TESTTMP/failpush.py
524 > failpush=$TESTTMP/failpush.py
525 > EOF
525 > EOF
526
526
527 $ killdaemons.py
527 $ killdaemons.py
528 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
528 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
529 $ cat other.pid >> $DAEMON_PIDS
529 $ cat other.pid >> $DAEMON_PIDS
530
530
531 Doing the actual push: Abort error
531 Doing the actual push: Abort error
532
532
533 $ cat << EOF >> $HGRCPATH
533 $ cat << EOF >> $HGRCPATH
534 > [failpush]
534 > [failpush]
535 > reason = abort
535 > reason = abort
536 > EOF
536 > EOF
537
537
538 $ hg -R main push other -r e7ec4e813ba6
538 $ hg -R main push other -r e7ec4e813ba6
539 pushing to other
539 pushing to other
540 searching for changes
540 searching for changes
541 abort: Abandon ship!
541 abort: Abandon ship!
542 (don't panic)
542 (don't panic)
543 [255]
543 [255]
544
544
545 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
545 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
546 pushing to ssh://user@dummy/other
546 pushing to ssh://user@dummy/other
547 searching for changes
547 searching for changes
548 remote: Abandon ship!
548 remote: Abandon ship!
549 remote: (don't panic)
549 remote: (don't panic)
550 abort: push failed on remote
550 abort: push failed on remote
551 [255]
551 [100]
552
552
553 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
553 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
554 pushing to http://localhost:$HGPORT2/
554 pushing to http://localhost:$HGPORT2/
555 searching for changes
555 searching for changes
556 remote: Abandon ship!
556 remote: Abandon ship!
557 remote: (don't panic)
557 remote: (don't panic)
558 abort: push failed on remote
558 abort: push failed on remote
559 [255]
559 [100]
560
560
561
561
562 Doing the actual push: unknown mandatory parts
562 Doing the actual push: unknown mandatory parts
563
563
564 $ cat << EOF >> $HGRCPATH
564 $ cat << EOF >> $HGRCPATH
565 > [failpush]
565 > [failpush]
566 > reason = unknown
566 > reason = unknown
567 > EOF
567 > EOF
568
568
569 $ hg -R main push other -r e7ec4e813ba6
569 $ hg -R main push other -r e7ec4e813ba6
570 pushing to other
570 pushing to other
571 searching for changes
571 searching for changes
572 abort: missing support for test:unknown
572 abort: missing support for test:unknown
573 [255]
573 [100]
574
574
575 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
575 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
576 pushing to ssh://user@dummy/other
576 pushing to ssh://user@dummy/other
577 searching for changes
577 searching for changes
578 abort: missing support for test:unknown
578 abort: missing support for test:unknown
579 [255]
579 [100]
580
580
581 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
581 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
582 pushing to http://localhost:$HGPORT2/
582 pushing to http://localhost:$HGPORT2/
583 searching for changes
583 searching for changes
584 abort: missing support for test:unknown
584 abort: missing support for test:unknown
585 [255]
585 [100]
586
586
587 Doing the actual push: race
587 Doing the actual push: race
588
588
589 $ cat << EOF >> $HGRCPATH
589 $ cat << EOF >> $HGRCPATH
590 > [failpush]
590 > [failpush]
591 > reason = race
591 > reason = race
592 > EOF
592 > EOF
593
593
594 $ hg -R main push other -r e7ec4e813ba6
594 $ hg -R main push other -r e7ec4e813ba6
595 pushing to other
595 pushing to other
596 searching for changes
596 searching for changes
597 abort: push failed:
597 abort: push failed:
598 'remote repository changed while pushing - please try again'
598 'remote repository changed while pushing - please try again'
599 [255]
599 [255]
600
600
601 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
601 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
602 pushing to ssh://user@dummy/other
602 pushing to ssh://user@dummy/other
603 searching for changes
603 searching for changes
604 abort: push failed:
604 abort: push failed:
605 'remote repository changed while pushing - please try again'
605 'remote repository changed while pushing - please try again'
606 [255]
606 [255]
607
607
608 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
608 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
609 pushing to http://localhost:$HGPORT2/
609 pushing to http://localhost:$HGPORT2/
610 searching for changes
610 searching for changes
611 abort: push failed:
611 abort: push failed:
612 'remote repository changed while pushing - please try again'
612 'remote repository changed while pushing - please try again'
613 [255]
613 [255]
614
614
615 Doing the actual push: hook abort
615 Doing the actual push: hook abort
616
616
617 $ cat << EOF >> $HGRCPATH
617 $ cat << EOF >> $HGRCPATH
618 > [failpush]
618 > [failpush]
619 > reason =
619 > reason =
620 > [hooks]
620 > [hooks]
621 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
621 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
622 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
622 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
623 > EOF
623 > EOF
624
624
625 $ killdaemons.py
625 $ killdaemons.py
626 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
626 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
627 $ cat other.pid >> $DAEMON_PIDS
627 $ cat other.pid >> $DAEMON_PIDS
628
628
629 $ hg -R main push other -r e7ec4e813ba6
629 $ hg -R main push other -r e7ec4e813ba6
630 pushing to other
630 pushing to other
631 searching for changes
631 searching for changes
632 remote: adding changesets
632 remote: adding changesets
633 remote: adding manifests
633 remote: adding manifests
634 remote: adding file changes
634 remote: adding file changes
635 remote: pre-close-tip:e7ec4e813ba6 draft
635 remote: pre-close-tip:e7ec4e813ba6 draft
636 remote: You shall not pass!
636 remote: You shall not pass!
637 remote: transaction abort!
637 remote: transaction abort!
638 remote: Cleaning up the mess...
638 remote: Cleaning up the mess...
639 remote: rollback completed
639 remote: rollback completed
640 abort: pretxnclose.failpush hook exited with status 1
640 abort: pretxnclose.failpush hook exited with status 1
641 [40]
641 [40]
642
642
643 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
643 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
644 pushing to ssh://user@dummy/other
644 pushing to ssh://user@dummy/other
645 searching for changes
645 searching for changes
646 remote: adding changesets
646 remote: adding changesets
647 remote: adding manifests
647 remote: adding manifests
648 remote: adding file changes
648 remote: adding file changes
649 remote: pre-close-tip:e7ec4e813ba6 draft
649 remote: pre-close-tip:e7ec4e813ba6 draft
650 remote: You shall not pass!
650 remote: You shall not pass!
651 remote: transaction abort!
651 remote: transaction abort!
652 remote: Cleaning up the mess...
652 remote: Cleaning up the mess...
653 remote: rollback completed
653 remote: rollback completed
654 remote: pretxnclose.failpush hook exited with status 1
654 remote: pretxnclose.failpush hook exited with status 1
655 abort: push failed on remote
655 abort: push failed on remote
656 [255]
656 [100]
657
657
658 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
658 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
659 pushing to http://localhost:$HGPORT2/
659 pushing to http://localhost:$HGPORT2/
660 searching for changes
660 searching for changes
661 remote: adding changesets
661 remote: adding changesets
662 remote: adding manifests
662 remote: adding manifests
663 remote: adding file changes
663 remote: adding file changes
664 remote: pre-close-tip:e7ec4e813ba6 draft
664 remote: pre-close-tip:e7ec4e813ba6 draft
665 remote: You shall not pass!
665 remote: You shall not pass!
666 remote: transaction abort!
666 remote: transaction abort!
667 remote: Cleaning up the mess...
667 remote: Cleaning up the mess...
668 remote: rollback completed
668 remote: rollback completed
669 remote: pretxnclose.failpush hook exited with status 1
669 remote: pretxnclose.failpush hook exited with status 1
670 abort: push failed on remote
670 abort: push failed on remote
671 [255]
671 [100]
672
672
673 (check that no 'pending' files remain)
673 (check that no 'pending' files remain)
674
674
675 $ ls -1 other/.hg/bookmarks*
675 $ ls -1 other/.hg/bookmarks*
676 other/.hg/bookmarks
676 other/.hg/bookmarks
677 $ ls -1 other/.hg/store/phaseroots*
677 $ ls -1 other/.hg/store/phaseroots*
678 other/.hg/store/phaseroots
678 other/.hg/store/phaseroots
679 $ ls -1 other/.hg/store/00changelog.i*
679 $ ls -1 other/.hg/store/00changelog.i*
680 other/.hg/store/00changelog.i
680 other/.hg/store/00changelog.i
681
681
682 Check error from hook during the unbundling process itself
682 Check error from hook during the unbundling process itself
683
683
684 $ cat << EOF >> $HGRCPATH
684 $ cat << EOF >> $HGRCPATH
685 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
685 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
686 > EOF
686 > EOF
687 $ killdaemons.py # reload http config
687 $ killdaemons.py # reload http config
688 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
688 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
689 $ cat other.pid >> $DAEMON_PIDS
689 $ cat other.pid >> $DAEMON_PIDS
690
690
691 $ hg -R main push other -r e7ec4e813ba6
691 $ hg -R main push other -r e7ec4e813ba6
692 pushing to other
692 pushing to other
693 searching for changes
693 searching for changes
694 remote: adding changesets
694 remote: adding changesets
695 remote: adding manifests
695 remote: adding manifests
696 remote: adding file changes
696 remote: adding file changes
697 remote: Fail early!
697 remote: Fail early!
698 remote: transaction abort!
698 remote: transaction abort!
699 remote: Cleaning up the mess...
699 remote: Cleaning up the mess...
700 remote: rollback completed
700 remote: rollback completed
701 abort: pretxnchangegroup hook exited with status 1
701 abort: pretxnchangegroup hook exited with status 1
702 [40]
702 [40]
703 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
703 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
704 pushing to ssh://user@dummy/other
704 pushing to ssh://user@dummy/other
705 searching for changes
705 searching for changes
706 remote: adding changesets
706 remote: adding changesets
707 remote: adding manifests
707 remote: adding manifests
708 remote: adding file changes
708 remote: adding file changes
709 remote: Fail early!
709 remote: Fail early!
710 remote: transaction abort!
710 remote: transaction abort!
711 remote: Cleaning up the mess...
711 remote: Cleaning up the mess...
712 remote: rollback completed
712 remote: rollback completed
713 remote: pretxnchangegroup hook exited with status 1
713 remote: pretxnchangegroup hook exited with status 1
714 abort: push failed on remote
714 abort: push failed on remote
715 [255]
715 [100]
716 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
716 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
717 pushing to http://localhost:$HGPORT2/
717 pushing to http://localhost:$HGPORT2/
718 searching for changes
718 searching for changes
719 remote: adding changesets
719 remote: adding changesets
720 remote: adding manifests
720 remote: adding manifests
721 remote: adding file changes
721 remote: adding file changes
722 remote: Fail early!
722 remote: Fail early!
723 remote: transaction abort!
723 remote: transaction abort!
724 remote: Cleaning up the mess...
724 remote: Cleaning up the mess...
725 remote: rollback completed
725 remote: rollback completed
726 remote: pretxnchangegroup hook exited with status 1
726 remote: pretxnchangegroup hook exited with status 1
727 abort: push failed on remote
727 abort: push failed on remote
728 [255]
728 [100]
729
729
730 Check output capture control.
730 Check output capture control.
731
731
732 (should be still forced for http, disabled for local and ssh)
732 (should be still forced for http, disabled for local and ssh)
733
733
734 $ cat >> $HGRCPATH << EOF
734 $ cat >> $HGRCPATH << EOF
735 > [experimental]
735 > [experimental]
736 > bundle2-output-capture=False
736 > bundle2-output-capture=False
737 > EOF
737 > EOF
738
738
739 $ hg -R main push other -r e7ec4e813ba6
739 $ hg -R main push other -r e7ec4e813ba6
740 pushing to other
740 pushing to other
741 searching for changes
741 searching for changes
742 adding changesets
742 adding changesets
743 adding manifests
743 adding manifests
744 adding file changes
744 adding file changes
745 Fail early!
745 Fail early!
746 transaction abort!
746 transaction abort!
747 Cleaning up the mess...
747 Cleaning up the mess...
748 rollback completed
748 rollback completed
749 abort: pretxnchangegroup hook exited with status 1
749 abort: pretxnchangegroup hook exited with status 1
750 [40]
750 [40]
751 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
751 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
752 pushing to ssh://user@dummy/other
752 pushing to ssh://user@dummy/other
753 searching for changes
753 searching for changes
754 remote: adding changesets
754 remote: adding changesets
755 remote: adding manifests
755 remote: adding manifests
756 remote: adding file changes
756 remote: adding file changes
757 remote: Fail early!
757 remote: Fail early!
758 remote: transaction abort!
758 remote: transaction abort!
759 remote: Cleaning up the mess...
759 remote: Cleaning up the mess...
760 remote: rollback completed
760 remote: rollback completed
761 remote: pretxnchangegroup hook exited with status 1
761 remote: pretxnchangegroup hook exited with status 1
762 abort: push failed on remote
762 abort: push failed on remote
763 [255]
763 [100]
764 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
764 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
765 pushing to http://localhost:$HGPORT2/
765 pushing to http://localhost:$HGPORT2/
766 searching for changes
766 searching for changes
767 remote: adding changesets
767 remote: adding changesets
768 remote: adding manifests
768 remote: adding manifests
769 remote: adding file changes
769 remote: adding file changes
770 remote: Fail early!
770 remote: Fail early!
771 remote: transaction abort!
771 remote: transaction abort!
772 remote: Cleaning up the mess...
772 remote: Cleaning up the mess...
773 remote: rollback completed
773 remote: rollback completed
774 remote: pretxnchangegroup hook exited with status 1
774 remote: pretxnchangegroup hook exited with status 1
775 abort: push failed on remote
775 abort: push failed on remote
776 [255]
776 [100]
777
777
778 Check abort from mandatory pushkey
778 Check abort from mandatory pushkey
779
779
780 $ cat > mandatorypart.py << EOF
780 $ cat > mandatorypart.py << EOF
781 > from mercurial import exchange
781 > from mercurial import exchange
782 > from mercurial import pushkey
782 > from mercurial import pushkey
783 > from mercurial import node
783 > from mercurial import node
784 > from mercurial import error
784 > from mercurial import error
785 > @exchange.b2partsgenerator(b'failingpuskey')
785 > @exchange.b2partsgenerator(b'failingpuskey')
786 > def addfailingpushey(pushop, bundler):
786 > def addfailingpushey(pushop, bundler):
787 > enc = pushkey.encode
787 > enc = pushkey.encode
788 > part = bundler.newpart(b'pushkey')
788 > part = bundler.newpart(b'pushkey')
789 > part.addparam(b'namespace', enc(b'phases'))
789 > part.addparam(b'namespace', enc(b'phases'))
790 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
790 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
791 > part.addparam(b'old', enc(b'0')) # successful update
791 > part.addparam(b'old', enc(b'0')) # successful update
792 > part.addparam(b'new', enc(b'0'))
792 > part.addparam(b'new', enc(b'0'))
793 > def fail(pushop, exc):
793 > def fail(pushop, exc):
794 > raise error.Abort(b'Correct phase push failed (because hooks)')
794 > raise error.Abort(b'Correct phase push failed (because hooks)')
795 > pushop.pkfailcb[part.id] = fail
795 > pushop.pkfailcb[part.id] = fail
796 > EOF
796 > EOF
797 $ cat >> $HGRCPATH << EOF
797 $ cat >> $HGRCPATH << EOF
798 > [hooks]
798 > [hooks]
799 > pretxnchangegroup=
799 > pretxnchangegroup=
800 > pretxnclose.failpush=
800 > pretxnclose.failpush=
801 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
801 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
802 > [extensions]
802 > [extensions]
803 > mandatorypart=$TESTTMP/mandatorypart.py
803 > mandatorypart=$TESTTMP/mandatorypart.py
804 > EOF
804 > EOF
805 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
805 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
806 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
806 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
807 $ cat other.pid >> $DAEMON_PIDS
807 $ cat other.pid >> $DAEMON_PIDS
808
808
809 (Failure from a hook)
809 (Failure from a hook)
810
810
811 $ hg -R main push other -r e7ec4e813ba6
811 $ hg -R main push other -r e7ec4e813ba6
812 pushing to other
812 pushing to other
813 searching for changes
813 searching for changes
814 adding changesets
814 adding changesets
815 adding manifests
815 adding manifests
816 adding file changes
816 adding file changes
817 do not push the key !
817 do not push the key !
818 pushkey-abort: prepushkey.failpush hook exited with status 1
818 pushkey-abort: prepushkey.failpush hook exited with status 1
819 transaction abort!
819 transaction abort!
820 Cleaning up the mess...
820 Cleaning up the mess...
821 rollback completed
821 rollback completed
822 abort: Correct phase push failed (because hooks)
822 abort: Correct phase push failed (because hooks)
823 [255]
823 [255]
824 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
824 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
825 pushing to ssh://user@dummy/other
825 pushing to ssh://user@dummy/other
826 searching for changes
826 searching for changes
827 remote: adding changesets
827 remote: adding changesets
828 remote: adding manifests
828 remote: adding manifests
829 remote: adding file changes
829 remote: adding file changes
830 remote: do not push the key !
830 remote: do not push the key !
831 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
831 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
832 remote: transaction abort!
832 remote: transaction abort!
833 remote: Cleaning up the mess...
833 remote: Cleaning up the mess...
834 remote: rollback completed
834 remote: rollback completed
835 abort: Correct phase push failed (because hooks)
835 abort: Correct phase push failed (because hooks)
836 [255]
836 [255]
837 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
837 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
838 pushing to http://localhost:$HGPORT2/
838 pushing to http://localhost:$HGPORT2/
839 searching for changes
839 searching for changes
840 remote: adding changesets
840 remote: adding changesets
841 remote: adding manifests
841 remote: adding manifests
842 remote: adding file changes
842 remote: adding file changes
843 remote: do not push the key !
843 remote: do not push the key !
844 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
844 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
845 remote: transaction abort!
845 remote: transaction abort!
846 remote: Cleaning up the mess...
846 remote: Cleaning up the mess...
847 remote: rollback completed
847 remote: rollback completed
848 abort: Correct phase push failed (because hooks)
848 abort: Correct phase push failed (because hooks)
849 [255]
849 [255]
850
850
851 (Failure from a the pushkey)
851 (Failure from a the pushkey)
852
852
853 $ cat > mandatorypart.py << EOF
853 $ cat > mandatorypart.py << EOF
854 > from mercurial import exchange
854 > from mercurial import exchange
855 > from mercurial import pushkey
855 > from mercurial import pushkey
856 > from mercurial import node
856 > from mercurial import node
857 > from mercurial import error
857 > from mercurial import error
858 > @exchange.b2partsgenerator(b'failingpuskey')
858 > @exchange.b2partsgenerator(b'failingpuskey')
859 > def addfailingpushey(pushop, bundler):
859 > def addfailingpushey(pushop, bundler):
860 > enc = pushkey.encode
860 > enc = pushkey.encode
861 > part = bundler.newpart(b'pushkey')
861 > part = bundler.newpart(b'pushkey')
862 > part.addparam(b'namespace', enc(b'phases'))
862 > part.addparam(b'namespace', enc(b'phases'))
863 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
863 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
864 > part.addparam(b'old', enc(b'4')) # will fail
864 > part.addparam(b'old', enc(b'4')) # will fail
865 > part.addparam(b'new', enc(b'3'))
865 > part.addparam(b'new', enc(b'3'))
866 > def fail(pushop, exc):
866 > def fail(pushop, exc):
867 > raise error.Abort(b'Clown phase push failed')
867 > raise error.Abort(b'Clown phase push failed')
868 > pushop.pkfailcb[part.id] = fail
868 > pushop.pkfailcb[part.id] = fail
869 > EOF
869 > EOF
870 $ cat >> $HGRCPATH << EOF
870 $ cat >> $HGRCPATH << EOF
871 > [hooks]
871 > [hooks]
872 > prepushkey.failpush =
872 > prepushkey.failpush =
873 > EOF
873 > EOF
874 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
874 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
875 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
875 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
876 $ cat other.pid >> $DAEMON_PIDS
876 $ cat other.pid >> $DAEMON_PIDS
877
877
878 $ hg -R main push other -r e7ec4e813ba6
878 $ hg -R main push other -r e7ec4e813ba6
879 pushing to other
879 pushing to other
880 searching for changes
880 searching for changes
881 adding changesets
881 adding changesets
882 adding manifests
882 adding manifests
883 adding file changes
883 adding file changes
884 transaction abort!
884 transaction abort!
885 Cleaning up the mess...
885 Cleaning up the mess...
886 rollback completed
886 rollback completed
887 pushkey: lock state after "phases"
887 pushkey: lock state after "phases"
888 lock: free
888 lock: free
889 wlock: free
889 wlock: free
890 abort: Clown phase push failed
890 abort: Clown phase push failed
891 [255]
891 [255]
892 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
892 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
893 pushing to ssh://user@dummy/other
893 pushing to ssh://user@dummy/other
894 searching for changes
894 searching for changes
895 remote: adding changesets
895 remote: adding changesets
896 remote: adding manifests
896 remote: adding manifests
897 remote: adding file changes
897 remote: adding file changes
898 remote: transaction abort!
898 remote: transaction abort!
899 remote: Cleaning up the mess...
899 remote: Cleaning up the mess...
900 remote: rollback completed
900 remote: rollback completed
901 remote: pushkey: lock state after "phases"
901 remote: pushkey: lock state after "phases"
902 remote: lock: free
902 remote: lock: free
903 remote: wlock: free
903 remote: wlock: free
904 abort: Clown phase push failed
904 abort: Clown phase push failed
905 [255]
905 [255]
906 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
906 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
907 pushing to http://localhost:$HGPORT2/
907 pushing to http://localhost:$HGPORT2/
908 searching for changes
908 searching for changes
909 remote: adding changesets
909 remote: adding changesets
910 remote: adding manifests
910 remote: adding manifests
911 remote: adding file changes
911 remote: adding file changes
912 remote: transaction abort!
912 remote: transaction abort!
913 remote: Cleaning up the mess...
913 remote: Cleaning up the mess...
914 remote: rollback completed
914 remote: rollback completed
915 remote: pushkey: lock state after "phases"
915 remote: pushkey: lock state after "phases"
916 remote: lock: free
916 remote: lock: free
917 remote: wlock: free
917 remote: wlock: free
918 abort: Clown phase push failed
918 abort: Clown phase push failed
919 [255]
919 [255]
920
920
921 Test lazily acquiring the lock during unbundle
921 Test lazily acquiring the lock during unbundle
922 $ cp $TESTTMP/hgrc.orig $HGRCPATH
922 $ cp $TESTTMP/hgrc.orig $HGRCPATH
923 $ cat >> $HGRCPATH <<EOF
923 $ cat >> $HGRCPATH <<EOF
924 > [ui]
924 > [ui]
925 > ssh="$PYTHON" "$TESTDIR/dummyssh"
925 > ssh="$PYTHON" "$TESTDIR/dummyssh"
926 > EOF
926 > EOF
927
927
928 $ cat >> $TESTTMP/locktester.py <<EOF
928 $ cat >> $TESTTMP/locktester.py <<EOF
929 > import os
929 > import os
930 > from mercurial import bundle2, error, extensions
930 > from mercurial import bundle2, error, extensions
931 > def checklock(orig, repo, *args, **kwargs):
931 > def checklock(orig, repo, *args, **kwargs):
932 > if repo.svfs.lexists(b"lock"):
932 > if repo.svfs.lexists(b"lock"):
933 > raise error.Abort(b"Lock should not be taken")
933 > raise error.Abort(b"Lock should not be taken")
934 > return orig(repo, *args, **kwargs)
934 > return orig(repo, *args, **kwargs)
935 > def extsetup(ui):
935 > def extsetup(ui):
936 > extensions.wrapfunction(bundle2, b'processbundle', checklock)
936 > extensions.wrapfunction(bundle2, b'processbundle', checklock)
937 > EOF
937 > EOF
938
938
939 $ hg init lazylock
939 $ hg init lazylock
940 $ cat >> lazylock/.hg/hgrc <<EOF
940 $ cat >> lazylock/.hg/hgrc <<EOF
941 > [extensions]
941 > [extensions]
942 > locktester=$TESTTMP/locktester.py
942 > locktester=$TESTTMP/locktester.py
943 > EOF
943 > EOF
944
944
945 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
945 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
946 $ cd lazylockclient
946 $ cd lazylockclient
947 $ touch a && hg ci -Aqm a
947 $ touch a && hg ci -Aqm a
948 $ hg push
948 $ hg push
949 pushing to ssh://user@dummy/lazylock
949 pushing to ssh://user@dummy/lazylock
950 searching for changes
950 searching for changes
951 remote: Lock should not be taken
951 remote: Lock should not be taken
952 abort: push failed on remote
952 abort: push failed on remote
953 [255]
953 [100]
954
954
955 $ cat >> ../lazylock/.hg/hgrc <<EOF
955 $ cat >> ../lazylock/.hg/hgrc <<EOF
956 > [experimental]
956 > [experimental]
957 > bundle2lazylocking=True
957 > bundle2lazylocking=True
958 > EOF
958 > EOF
959 $ hg push
959 $ hg push
960 pushing to ssh://user@dummy/lazylock
960 pushing to ssh://user@dummy/lazylock
961 searching for changes
961 searching for changes
962 remote: adding changesets
962 remote: adding changesets
963 remote: adding manifests
963 remote: adding manifests
964 remote: adding file changes
964 remote: adding file changes
965 remote: added 1 changesets with 1 changes to 1 files
965 remote: added 1 changesets with 1 changes to 1 files
966
966
967 $ cd ..
967 $ cd ..
968
968
969 Servers can disable bundle1 for clone/pull operations
969 Servers can disable bundle1 for clone/pull operations
970
970
971 $ killdaemons.py
971 $ killdaemons.py
972 $ hg init bundle2onlyserver
972 $ hg init bundle2onlyserver
973 $ cd bundle2onlyserver
973 $ cd bundle2onlyserver
974 $ cat > .hg/hgrc << EOF
974 $ cat > .hg/hgrc << EOF
975 > [server]
975 > [server]
976 > bundle1.pull = false
976 > bundle1.pull = false
977 > EOF
977 > EOF
978
978
979 $ touch foo
979 $ touch foo
980 $ hg -q commit -A -m initial
980 $ hg -q commit -A -m initial
981
981
982 $ hg serve -p $HGPORT -d --pid-file=hg.pid
982 $ hg serve -p $HGPORT -d --pid-file=hg.pid
983 $ cat hg.pid >> $DAEMON_PIDS
983 $ cat hg.pid >> $DAEMON_PIDS
984
984
985 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
985 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
986 requesting all changes
986 requesting all changes
987 abort: remote error:
987 abort: remote error:
988 incompatible Mercurial client; bundle2 required
988 incompatible Mercurial client; bundle2 required
989 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
989 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
990 [100]
990 [100]
991 $ killdaemons.py
991 $ killdaemons.py
992 $ cd ..
992 $ cd ..
993
993
994 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
994 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
995
995
996 $ hg --config format.usegeneraldelta=false init notgdserver
996 $ hg --config format.usegeneraldelta=false init notgdserver
997 $ cd notgdserver
997 $ cd notgdserver
998 $ cat > .hg/hgrc << EOF
998 $ cat > .hg/hgrc << EOF
999 > [server]
999 > [server]
1000 > bundle1gd.pull = false
1000 > bundle1gd.pull = false
1001 > EOF
1001 > EOF
1002
1002
1003 $ touch foo
1003 $ touch foo
1004 $ hg -q commit -A -m initial
1004 $ hg -q commit -A -m initial
1005 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1005 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1006 $ cat hg.pid >> $DAEMON_PIDS
1006 $ cat hg.pid >> $DAEMON_PIDS
1007
1007
1008 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-1
1008 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-1
1009 requesting all changes
1009 requesting all changes
1010 adding changesets
1010 adding changesets
1011 adding manifests
1011 adding manifests
1012 adding file changes
1012 adding file changes
1013 added 1 changesets with 1 changes to 1 files
1013 added 1 changesets with 1 changes to 1 files
1014 new changesets 96ee1d7354c4
1014 new changesets 96ee1d7354c4
1015 updating to branch default
1015 updating to branch default
1016 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1016 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1017
1017
1018 $ killdaemons.py
1018 $ killdaemons.py
1019 $ cd ../bundle2onlyserver
1019 $ cd ../bundle2onlyserver
1020
1020
1021 bundle1 pull can be disabled for generaldelta repos only
1021 bundle1 pull can be disabled for generaldelta repos only
1022
1022
1023 $ cat > .hg/hgrc << EOF
1023 $ cat > .hg/hgrc << EOF
1024 > [server]
1024 > [server]
1025 > bundle1gd.pull = false
1025 > bundle1gd.pull = false
1026 > EOF
1026 > EOF
1027
1027
1028 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1028 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1029 $ cat hg.pid >> $DAEMON_PIDS
1029 $ cat hg.pid >> $DAEMON_PIDS
1030 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1030 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1031 requesting all changes
1031 requesting all changes
1032 abort: remote error:
1032 abort: remote error:
1033 incompatible Mercurial client; bundle2 required
1033 incompatible Mercurial client; bundle2 required
1034 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1034 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1035 [100]
1035 [100]
1036
1036
1037 $ killdaemons.py
1037 $ killdaemons.py
1038
1038
1039 Verify the global server.bundle1 option works
1039 Verify the global server.bundle1 option works
1040
1040
1041 $ cd ..
1041 $ cd ..
1042 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1042 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1043 > [server]
1043 > [server]
1044 > bundle1 = false
1044 > bundle1 = false
1045 > EOF
1045 > EOF
1046 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1046 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1047 $ cat hg.pid >> $DAEMON_PIDS
1047 $ cat hg.pid >> $DAEMON_PIDS
1048 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT not-bundle2
1048 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT not-bundle2
1049 requesting all changes
1049 requesting all changes
1050 abort: remote error:
1050 abort: remote error:
1051 incompatible Mercurial client; bundle2 required
1051 incompatible Mercurial client; bundle2 required
1052 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1052 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1053 [100]
1053 [100]
1054 $ killdaemons.py
1054 $ killdaemons.py
1055
1055
1056 $ hg --config devel.legacy.exchange=bundle1 clone ssh://user@dummy/bundle2onlyserver not-bundle2-ssh
1056 $ hg --config devel.legacy.exchange=bundle1 clone ssh://user@dummy/bundle2onlyserver not-bundle2-ssh
1057 requesting all changes
1057 requesting all changes
1058 adding changesets
1058 adding changesets
1059 remote: abort: incompatible Mercurial client; bundle2 required
1059 remote: abort: incompatible Mercurial client; bundle2 required
1060 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1060 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1061 transaction abort!
1061 transaction abort!
1062 rollback completed
1062 rollback completed
1063 abort: stream ended unexpectedly (got 0 bytes, expected 4)
1063 abort: stream ended unexpectedly (got 0 bytes, expected 4)
1064 [255]
1064 [255]
1065
1065
1066 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1066 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1067 > [server]
1067 > [server]
1068 > bundle1gd = false
1068 > bundle1gd = false
1069 > EOF
1069 > EOF
1070 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1070 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1071 $ cat hg.pid >> $DAEMON_PIDS
1071 $ cat hg.pid >> $DAEMON_PIDS
1072
1072
1073 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1073 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1074 requesting all changes
1074 requesting all changes
1075 abort: remote error:
1075 abort: remote error:
1076 incompatible Mercurial client; bundle2 required
1076 incompatible Mercurial client; bundle2 required
1077 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1077 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1078 [100]
1078 [100]
1079
1079
1080 $ killdaemons.py
1080 $ killdaemons.py
1081
1081
1082 $ cd notgdserver
1082 $ cd notgdserver
1083 $ cat > .hg/hgrc << EOF
1083 $ cat > .hg/hgrc << EOF
1084 > [server]
1084 > [server]
1085 > bundle1gd = false
1085 > bundle1gd = false
1086 > EOF
1086 > EOF
1087 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1087 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1088 $ cat hg.pid >> $DAEMON_PIDS
1088 $ cat hg.pid >> $DAEMON_PIDS
1089
1089
1090 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-2
1090 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-2
1091 requesting all changes
1091 requesting all changes
1092 adding changesets
1092 adding changesets
1093 adding manifests
1093 adding manifests
1094 adding file changes
1094 adding file changes
1095 added 1 changesets with 1 changes to 1 files
1095 added 1 changesets with 1 changes to 1 files
1096 new changesets 96ee1d7354c4
1096 new changesets 96ee1d7354c4
1097 updating to branch default
1097 updating to branch default
1098 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1098 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1099
1099
1100 $ killdaemons.py
1100 $ killdaemons.py
1101 $ cd ../bundle2onlyserver
1101 $ cd ../bundle2onlyserver
1102
1102
1103 Verify bundle1 pushes can be disabled
1103 Verify bundle1 pushes can be disabled
1104
1104
1105 $ cat > .hg/hgrc << EOF
1105 $ cat > .hg/hgrc << EOF
1106 > [server]
1106 > [server]
1107 > bundle1.push = false
1107 > bundle1.push = false
1108 > [web]
1108 > [web]
1109 > allow_push = *
1109 > allow_push = *
1110 > push_ssl = false
1110 > push_ssl = false
1111 > EOF
1111 > EOF
1112
1112
1113 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1113 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1114 $ cat hg.pid >> $DAEMON_PIDS
1114 $ cat hg.pid >> $DAEMON_PIDS
1115 $ cd ..
1115 $ cd ..
1116
1116
1117 $ hg clone http://localhost:$HGPORT bundle2-only
1117 $ hg clone http://localhost:$HGPORT bundle2-only
1118 requesting all changes
1118 requesting all changes
1119 adding changesets
1119 adding changesets
1120 adding manifests
1120 adding manifests
1121 adding file changes
1121 adding file changes
1122 added 1 changesets with 1 changes to 1 files
1122 added 1 changesets with 1 changes to 1 files
1123 new changesets 96ee1d7354c4
1123 new changesets 96ee1d7354c4
1124 updating to branch default
1124 updating to branch default
1125 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1125 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1126 $ cd bundle2-only
1126 $ cd bundle2-only
1127 $ echo commit > foo
1127 $ echo commit > foo
1128 $ hg commit -m commit
1128 $ hg commit -m commit
1129 $ hg --config devel.legacy.exchange=bundle1 push
1129 $ hg --config devel.legacy.exchange=bundle1 push
1130 pushing to http://localhost:$HGPORT/
1130 pushing to http://localhost:$HGPORT/
1131 searching for changes
1131 searching for changes
1132 abort: remote error:
1132 abort: remote error:
1133 incompatible Mercurial client; bundle2 required
1133 incompatible Mercurial client; bundle2 required
1134 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1134 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1135 [100]
1135 [100]
1136
1136
1137 (also check with ssh)
1137 (also check with ssh)
1138
1138
1139 $ hg --config devel.legacy.exchange=bundle1 push ssh://user@dummy/bundle2onlyserver
1139 $ hg --config devel.legacy.exchange=bundle1 push ssh://user@dummy/bundle2onlyserver
1140 pushing to ssh://user@dummy/bundle2onlyserver
1140 pushing to ssh://user@dummy/bundle2onlyserver
1141 searching for changes
1141 searching for changes
1142 remote: abort: incompatible Mercurial client; bundle2 required
1142 remote: abort: incompatible Mercurial client; bundle2 required
1143 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1143 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1144 [1]
1144 [1]
1145
1145
1146 $ hg push
1146 $ hg push
1147 pushing to http://localhost:$HGPORT/
1147 pushing to http://localhost:$HGPORT/
1148 searching for changes
1148 searching for changes
1149 remote: adding changesets
1149 remote: adding changesets
1150 remote: adding manifests
1150 remote: adding manifests
1151 remote: adding file changes
1151 remote: adding file changes
1152 remote: added 1 changesets with 1 changes to 1 files
1152 remote: added 1 changesets with 1 changes to 1 files
@@ -1,610 +1,610 b''
1 #testcases sshv1 sshv2
1 #testcases sshv1 sshv2
2
2
3 #if sshv2
3 #if sshv2
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [experimental]
5 > [experimental]
6 > sshpeer.advertise-v2 = true
6 > sshpeer.advertise-v2 = true
7 > sshserver.support-v2 = true
7 > sshserver.support-v2 = true
8 > EOF
8 > EOF
9 #endif
9 #endif
10
10
11 Create an extension to test bundle2 remote-changegroup parts
11 Create an extension to test bundle2 remote-changegroup parts
12
12
13 $ cat > bundle2.py << EOF
13 $ cat > bundle2.py << EOF
14 > """A small extension to test bundle2 remote-changegroup parts.
14 > """A small extension to test bundle2 remote-changegroup parts.
15 >
15 >
16 > Current bundle2 implementation doesn't provide a way to generate those
16 > Current bundle2 implementation doesn't provide a way to generate those
17 > parts, so they must be created by extensions.
17 > parts, so they must be created by extensions.
18 > """
18 > """
19 > from mercurial import (
19 > from mercurial import (
20 > bundle2,
20 > bundle2,
21 > changegroup,
21 > changegroup,
22 > discovery,
22 > discovery,
23 > exchange,
23 > exchange,
24 > pycompat,
24 > pycompat,
25 > util,
25 > util,
26 > )
26 > )
27 >
27 >
28 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
28 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
29 > b2caps=None, heads=None, common=None,
29 > b2caps=None, heads=None, common=None,
30 > **kwargs):
30 > **kwargs):
31 > """this function replaces the changegroup part handler for getbundle.
31 > """this function replaces the changegroup part handler for getbundle.
32 > It allows to create a set of arbitrary parts containing changegroups
32 > It allows to create a set of arbitrary parts containing changegroups
33 > and remote-changegroups, as described in a bundle2maker file in the
33 > and remote-changegroups, as described in a bundle2maker file in the
34 > repository .hg/ directory.
34 > repository .hg/ directory.
35 >
35 >
36 > Each line of that bundle2maker file contain a description of the
36 > Each line of that bundle2maker file contain a description of the
37 > part to add:
37 > part to add:
38 > - changegroup common_revset heads_revset
38 > - changegroup common_revset heads_revset
39 > Creates a changegroup part based, using common_revset and
39 > Creates a changegroup part based, using common_revset and
40 > heads_revset for outgoing
40 > heads_revset for outgoing
41 > - remote-changegroup url file
41 > - remote-changegroup url file
42 > Creates a remote-changegroup part for a bundle at the given
42 > Creates a remote-changegroup part for a bundle at the given
43 > url. Size and digest, as required by the client, are computed
43 > url. Size and digest, as required by the client, are computed
44 > from the given file.
44 > from the given file.
45 > - raw-remote-changegroup <python expression>
45 > - raw-remote-changegroup <python expression>
46 > Creates a remote-changegroup part with the data given in the
46 > Creates a remote-changegroup part with the data given in the
47 > Python expression as parameters. The Python expression is
47 > Python expression as parameters. The Python expression is
48 > evaluated with eval, and is expected to be a dict.
48 > evaluated with eval, and is expected to be a dict.
49 > """
49 > """
50 > def newpart(name, data=b''):
50 > def newpart(name, data=b''):
51 > """wrapper around bundler.newpart adding an extra part making the
51 > """wrapper around bundler.newpart adding an extra part making the
52 > client output information about each processed part"""
52 > client output information about each processed part"""
53 > bundler.newpart(b'output', data=name)
53 > bundler.newpart(b'output', data=name)
54 > part = bundler.newpart(name, data=data)
54 > part = bundler.newpart(name, data=data)
55 > return part
55 > return part
56 >
56 >
57 > for line in open(repo.vfs.join(b'bundle2maker'), 'rb'):
57 > for line in open(repo.vfs.join(b'bundle2maker'), 'rb'):
58 > line = line.strip()
58 > line = line.strip()
59 > try:
59 > try:
60 > verb, args = line.split(None, 1)
60 > verb, args = line.split(None, 1)
61 > except ValueError:
61 > except ValueError:
62 > verb, args = line, b''
62 > verb, args = line, b''
63 > if verb == b'remote-changegroup':
63 > if verb == b'remote-changegroup':
64 > url, file = args.split()
64 > url, file = args.split()
65 > bundledata = open(file, 'rb').read()
65 > bundledata = open(file, 'rb').read()
66 > digest = util.digester.preferred(b2caps[b'digests'])
66 > digest = util.digester.preferred(b2caps[b'digests'])
67 > d = util.digester([digest], bundledata)
67 > d = util.digester([digest], bundledata)
68 > part = newpart(b'remote-changegroup')
68 > part = newpart(b'remote-changegroup')
69 > part.addparam(b'url', url)
69 > part.addparam(b'url', url)
70 > part.addparam(b'size', b'%d' % len(bundledata))
70 > part.addparam(b'size', b'%d' % len(bundledata))
71 > part.addparam(b'digests', digest)
71 > part.addparam(b'digests', digest)
72 > part.addparam(b'digest:%s' % digest, d[digest])
72 > part.addparam(b'digest:%s' % digest, d[digest])
73 > elif verb == b'raw-remote-changegroup':
73 > elif verb == b'raw-remote-changegroup':
74 > part = newpart(b'remote-changegroup')
74 > part = newpart(b'remote-changegroup')
75 > for k, v in eval(args).items():
75 > for k, v in eval(args).items():
76 > part.addparam(pycompat.sysbytes(k), pycompat.bytestr(v))
76 > part.addparam(pycompat.sysbytes(k), pycompat.bytestr(v))
77 > elif verb == b'changegroup':
77 > elif verb == b'changegroup':
78 > _common, heads = args.split()
78 > _common, heads = args.split()
79 > common.extend(repo[r].node() for r in repo.revs(_common))
79 > common.extend(repo[r].node() for r in repo.revs(_common))
80 > heads = [repo[r].node() for r in repo.revs(heads)]
80 > heads = [repo[r].node() for r in repo.revs(heads)]
81 > outgoing = discovery.outgoing(repo, common, heads)
81 > outgoing = discovery.outgoing(repo, common, heads)
82 > cg = changegroup.makechangegroup(repo, outgoing, b'01',
82 > cg = changegroup.makechangegroup(repo, outgoing, b'01',
83 > b'changegroup')
83 > b'changegroup')
84 > newpart(b'changegroup', cg.getchunks())
84 > newpart(b'changegroup', cg.getchunks())
85 > else:
85 > else:
86 > raise Exception('unknown verb')
86 > raise Exception('unknown verb')
87 >
87 >
88 > exchange.getbundle2partsmapping[b'changegroup'] = _getbundlechangegrouppart
88 > exchange.getbundle2partsmapping[b'changegroup'] = _getbundlechangegrouppart
89 > EOF
89 > EOF
90
90
91 Start a simple HTTP server to serve bundles
91 Start a simple HTTP server to serve bundles
92
92
93 $ "$PYTHON" "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
93 $ "$PYTHON" "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
94 $ cat dumb.pid >> $DAEMON_PIDS
94 $ cat dumb.pid >> $DAEMON_PIDS
95
95
96 $ cat >> $HGRCPATH << EOF
96 $ cat >> $HGRCPATH << EOF
97 > [ui]
97 > [ui]
98 > ssh="$PYTHON" "$TESTDIR/dummyssh"
98 > ssh="$PYTHON" "$TESTDIR/dummyssh"
99 > [command-templates]
99 > [command-templates]
100 > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
100 > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
101 > EOF
101 > EOF
102
102
103 $ hg init repo
103 $ hg init repo
104
104
105 $ hg -R repo unbundle $TESTDIR/bundles/rebase.hg
105 $ hg -R repo unbundle $TESTDIR/bundles/rebase.hg
106 adding changesets
106 adding changesets
107 adding manifests
107 adding manifests
108 adding file changes
108 adding file changes
109 added 8 changesets with 7 changes to 7 files (+2 heads)
109 added 8 changesets with 7 changes to 7 files (+2 heads)
110 new changesets cd010b8cd998:02de42196ebe (8 drafts)
110 new changesets cd010b8cd998:02de42196ebe (8 drafts)
111 (run 'hg heads' to see heads, 'hg merge' to merge)
111 (run 'hg heads' to see heads, 'hg merge' to merge)
112
112
113 $ hg -R repo log -G
113 $ hg -R repo log -G
114 o 7:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
114 o 7:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
115 |
115 |
116 | o 6:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
116 | o 6:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
117 |/|
117 |/|
118 o | 5:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
118 o | 5:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
119 | |
119 | |
120 | o 4:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
120 | o 4:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
121 |/
121 |/
122 | o 3:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
122 | o 3:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
123 | |
123 | |
124 | o 2:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
124 | o 2:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
125 | |
125 | |
126 | o 1:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
126 | o 1:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
127 |/
127 |/
128 o 0:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
128 o 0:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
129
129
130 $ hg clone repo orig
130 $ hg clone repo orig
131 updating to branch default
131 updating to branch default
132 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
132 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
133
133
134 $ cat > repo/.hg/hgrc << EOF
134 $ cat > repo/.hg/hgrc << EOF
135 > [extensions]
135 > [extensions]
136 > bundle2=$TESTTMP/bundle2.py
136 > bundle2=$TESTTMP/bundle2.py
137 > EOF
137 > EOF
138
138
139 Test a pull with an remote-changegroup
139 Test a pull with an remote-changegroup
140
140
141 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle.hg
141 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle.hg
142 3 changesets found
142 3 changesets found
143 $ cat > repo/.hg/bundle2maker << EOF
143 $ cat > repo/.hg/bundle2maker << EOF
144 > remote-changegroup http://localhost:$HGPORT/bundle.hg bundle.hg
144 > remote-changegroup http://localhost:$HGPORT/bundle.hg bundle.hg
145 > EOF
145 > EOF
146 $ hg clone orig clone -r 3 -r 4
146 $ hg clone orig clone -r 3 -r 4
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 5 changesets with 5 changes to 5 files (+1 heads)
150 added 5 changesets with 5 changes to 5 files (+1 heads)
151 new changesets cd010b8cd998:9520eea781bc
151 new changesets cd010b8cd998:9520eea781bc
152 updating to branch default
152 updating to branch default
153 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
153 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
154 $ hg pull -R clone ssh://user@dummy/repo
154 $ hg pull -R clone ssh://user@dummy/repo
155 pulling from ssh://user@dummy/repo
155 pulling from ssh://user@dummy/repo
156 searching for changes
156 searching for changes
157 remote: remote-changegroup
157 remote: remote-changegroup
158 adding changesets
158 adding changesets
159 adding manifests
159 adding manifests
160 adding file changes
160 adding file changes
161 added 3 changesets with 2 changes to 2 files (+1 heads)
161 added 3 changesets with 2 changes to 2 files (+1 heads)
162 new changesets 24b6387c8c8c:02de42196ebe
162 new changesets 24b6387c8c8c:02de42196ebe
163 (run 'hg heads .' to see heads, 'hg merge' to merge)
163 (run 'hg heads .' to see heads, 'hg merge' to merge)
164 $ hg -R clone log -G
164 $ hg -R clone log -G
165 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
165 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
166 |
166 |
167 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
167 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
168 |/|
168 |/|
169 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
169 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
170 | |
170 | |
171 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
171 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
172 |/
172 |/
173 | @ 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
173 | @ 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
174 | |
174 | |
175 | o 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
175 | o 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
176 | |
176 | |
177 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
177 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
178 |/
178 |/
179 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
179 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
180
180
181 $ rm -rf clone
181 $ rm -rf clone
182
182
183 Test a pull with an remote-changegroup and a following changegroup
183 Test a pull with an remote-changegroup and a following changegroup
184
184
185 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle2.hg
185 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle2.hg
186 2 changesets found
186 2 changesets found
187 $ cat > repo/.hg/bundle2maker << EOF
187 $ cat > repo/.hg/bundle2maker << EOF
188 > remote-changegroup http://localhost:$HGPORT/bundle2.hg bundle2.hg
188 > remote-changegroup http://localhost:$HGPORT/bundle2.hg bundle2.hg
189 > changegroup 0:4 5:7
189 > changegroup 0:4 5:7
190 > EOF
190 > EOF
191 $ hg clone orig clone -r 2
191 $ hg clone orig clone -r 2
192 adding changesets
192 adding changesets
193 adding manifests
193 adding manifests
194 adding file changes
194 adding file changes
195 added 3 changesets with 3 changes to 3 files
195 added 3 changesets with 3 changes to 3 files
196 new changesets cd010b8cd998:5fddd98957c8
196 new changesets cd010b8cd998:5fddd98957c8
197 updating to branch default
197 updating to branch default
198 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
198 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
199 $ hg pull -R clone ssh://user@dummy/repo
199 $ hg pull -R clone ssh://user@dummy/repo
200 pulling from ssh://user@dummy/repo
200 pulling from ssh://user@dummy/repo
201 searching for changes
201 searching for changes
202 remote: remote-changegroup
202 remote: remote-changegroup
203 adding changesets
203 adding changesets
204 adding manifests
204 adding manifests
205 adding file changes
205 adding file changes
206 remote: changegroup
206 remote: changegroup
207 adding changesets
207 adding changesets
208 adding manifests
208 adding manifests
209 adding file changes
209 adding file changes
210 added 5 changesets with 4 changes to 4 files (+2 heads)
210 added 5 changesets with 4 changes to 4 files (+2 heads)
211 new changesets 32af7686d403:02de42196ebe
211 new changesets 32af7686d403:02de42196ebe
212 (run 'hg heads' to see heads, 'hg merge' to merge)
212 (run 'hg heads' to see heads, 'hg merge' to merge)
213 $ hg -R clone log -G
213 $ hg -R clone log -G
214 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
214 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
215 |
215 |
216 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
216 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
217 |/|
217 |/|
218 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
218 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
219 | |
219 | |
220 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
220 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
221 |/
221 |/
222 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
222 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
223 | |
223 | |
224 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
224 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
225 | |
225 | |
226 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
226 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
227 |/
227 |/
228 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
228 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
229
229
230 $ rm -rf clone
230 $ rm -rf clone
231
231
232 Test a pull with a changegroup followed by an remote-changegroup
232 Test a pull with a changegroup followed by an remote-changegroup
233
233
234 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle3.hg
234 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle3.hg
235 3 changesets found
235 3 changesets found
236 $ cat > repo/.hg/bundle2maker << EOF
236 $ cat > repo/.hg/bundle2maker << EOF
237 > changegroup 000000000000 :4
237 > changegroup 000000000000 :4
238 > remote-changegroup http://localhost:$HGPORT/bundle3.hg bundle3.hg
238 > remote-changegroup http://localhost:$HGPORT/bundle3.hg bundle3.hg
239 > EOF
239 > EOF
240 $ hg clone orig clone -r 2
240 $ hg clone orig clone -r 2
241 adding changesets
241 adding changesets
242 adding manifests
242 adding manifests
243 adding file changes
243 adding file changes
244 added 3 changesets with 3 changes to 3 files
244 added 3 changesets with 3 changes to 3 files
245 new changesets cd010b8cd998:5fddd98957c8
245 new changesets cd010b8cd998:5fddd98957c8
246 updating to branch default
246 updating to branch default
247 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
247 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
248 $ hg pull -R clone ssh://user@dummy/repo
248 $ hg pull -R clone ssh://user@dummy/repo
249 pulling from ssh://user@dummy/repo
249 pulling from ssh://user@dummy/repo
250 searching for changes
250 searching for changes
251 remote: changegroup
251 remote: changegroup
252 adding changesets
252 adding changesets
253 adding manifests
253 adding manifests
254 adding file changes
254 adding file changes
255 remote: remote-changegroup
255 remote: remote-changegroup
256 adding changesets
256 adding changesets
257 adding manifests
257 adding manifests
258 adding file changes
258 adding file changes
259 added 5 changesets with 4 changes to 4 files (+2 heads)
259 added 5 changesets with 4 changes to 4 files (+2 heads)
260 new changesets 32af7686d403:02de42196ebe
260 new changesets 32af7686d403:02de42196ebe
261 (run 'hg heads' to see heads, 'hg merge' to merge)
261 (run 'hg heads' to see heads, 'hg merge' to merge)
262 $ hg -R clone log -G
262 $ hg -R clone log -G
263 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
263 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
264 |
264 |
265 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
265 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
266 |/|
266 |/|
267 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
267 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
268 | |
268 | |
269 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
269 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
270 |/
270 |/
271 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
271 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
272 | |
272 | |
273 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
273 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
274 | |
274 | |
275 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
275 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
276 |/
276 |/
277 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
277 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
278
278
279 $ rm -rf clone
279 $ rm -rf clone
280
280
281 Test a pull with two remote-changegroups and a changegroup
281 Test a pull with two remote-changegroups and a changegroup
282
282
283 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle4.hg
283 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle4.hg
284 2 changesets found
284 2 changesets found
285 $ hg bundle -R repo --type v1 --base '3:4' -r '5:6' bundle5.hg
285 $ hg bundle -R repo --type v1 --base '3:4' -r '5:6' bundle5.hg
286 2 changesets found
286 2 changesets found
287 $ cat > repo/.hg/bundle2maker << EOF
287 $ cat > repo/.hg/bundle2maker << EOF
288 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
288 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
289 > remote-changegroup http://localhost:$HGPORT/bundle5.hg bundle5.hg
289 > remote-changegroup http://localhost:$HGPORT/bundle5.hg bundle5.hg
290 > changegroup 0:6 7
290 > changegroup 0:6 7
291 > EOF
291 > EOF
292 $ hg clone orig clone -r 2
292 $ hg clone orig clone -r 2
293 adding changesets
293 adding changesets
294 adding manifests
294 adding manifests
295 adding file changes
295 adding file changes
296 added 3 changesets with 3 changes to 3 files
296 added 3 changesets with 3 changes to 3 files
297 new changesets cd010b8cd998:5fddd98957c8
297 new changesets cd010b8cd998:5fddd98957c8
298 updating to branch default
298 updating to branch default
299 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
299 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
300 $ hg pull -R clone ssh://user@dummy/repo
300 $ hg pull -R clone ssh://user@dummy/repo
301 pulling from ssh://user@dummy/repo
301 pulling from ssh://user@dummy/repo
302 searching for changes
302 searching for changes
303 remote: remote-changegroup
303 remote: remote-changegroup
304 adding changesets
304 adding changesets
305 adding manifests
305 adding manifests
306 adding file changes
306 adding file changes
307 remote: remote-changegroup
307 remote: remote-changegroup
308 adding changesets
308 adding changesets
309 adding manifests
309 adding manifests
310 adding file changes
310 adding file changes
311 remote: changegroup
311 remote: changegroup
312 adding changesets
312 adding changesets
313 adding manifests
313 adding manifests
314 adding file changes
314 adding file changes
315 added 5 changesets with 4 changes to 4 files (+2 heads)
315 added 5 changesets with 4 changes to 4 files (+2 heads)
316 new changesets 32af7686d403:02de42196ebe
316 new changesets 32af7686d403:02de42196ebe
317 (run 'hg heads' to see heads, 'hg merge' to merge)
317 (run 'hg heads' to see heads, 'hg merge' to merge)
318 $ hg -R clone log -G
318 $ hg -R clone log -G
319 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
319 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
320 |
320 |
321 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
321 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
322 |/|
322 |/|
323 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
323 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
324 | |
324 | |
325 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
325 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
326 |/
326 |/
327 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
327 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
328 | |
328 | |
329 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
329 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
330 | |
330 | |
331 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
331 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
332 |/
332 |/
333 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
333 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
334
334
335 $ rm -rf clone
335 $ rm -rf clone
336
336
337 Hash digest tests
337 Hash digest tests
338
338
339 $ hg bundle -R repo --type v1 -a bundle6.hg
339 $ hg bundle -R repo --type v1 -a bundle6.hg
340 8 changesets found
340 8 changesets found
341
341
342 $ cat > repo/.hg/bundle2maker << EOF
342 $ cat > repo/.hg/bundle2maker << EOF
343 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
343 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
344 > EOF
344 > EOF
345 $ hg clone ssh://user@dummy/repo clone
345 $ hg clone ssh://user@dummy/repo clone
346 requesting all changes
346 requesting all changes
347 remote: remote-changegroup
347 remote: remote-changegroup
348 adding changesets
348 adding changesets
349 adding manifests
349 adding manifests
350 adding file changes
350 adding file changes
351 added 8 changesets with 7 changes to 7 files (+2 heads)
351 added 8 changesets with 7 changes to 7 files (+2 heads)
352 new changesets cd010b8cd998:02de42196ebe
352 new changesets cd010b8cd998:02de42196ebe
353 updating to branch default
353 updating to branch default
354 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
354 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
355 $ rm -rf clone
355 $ rm -rf clone
356
356
357 $ cat > repo/.hg/bundle2maker << EOF
357 $ cat > repo/.hg/bundle2maker << EOF
358 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394'}
358 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394'}
359 > EOF
359 > EOF
360 $ hg clone ssh://user@dummy/repo clone
360 $ hg clone ssh://user@dummy/repo clone
361 requesting all changes
361 requesting all changes
362 remote: remote-changegroup
362 remote: remote-changegroup
363 adding changesets
363 adding changesets
364 adding manifests
364 adding manifests
365 adding file changes
365 adding file changes
366 added 8 changesets with 7 changes to 7 files (+2 heads)
366 added 8 changesets with 7 changes to 7 files (+2 heads)
367 new changesets cd010b8cd998:02de42196ebe
367 new changesets cd010b8cd998:02de42196ebe
368 updating to branch default
368 updating to branch default
369 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
369 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
370 $ rm -rf clone
370 $ rm -rf clone
371
371
372 Hash digest mismatch throws an error
372 Hash digest mismatch throws an error
373
373
374 $ cat > repo/.hg/bundle2maker << EOF
374 $ cat > repo/.hg/bundle2maker << EOF
375 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '0' * 40}
375 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '0' * 40}
376 > EOF
376 > EOF
377 $ hg clone ssh://user@dummy/repo clone
377 $ hg clone ssh://user@dummy/repo clone
378 requesting all changes
378 requesting all changes
379 remote: remote-changegroup
379 remote: remote-changegroup
380 adding changesets
380 adding changesets
381 adding manifests
381 adding manifests
382 adding file changes
382 adding file changes
383 transaction abort!
383 transaction abort!
384 rollback completed
384 rollback completed
385 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
385 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
386 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
386 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
387 [255]
387 [255]
388
388
389 Multiple hash digests can be given
389 Multiple hash digests can be given
390
390
391 $ cat > repo/.hg/bundle2maker << EOF
391 $ cat > repo/.hg/bundle2maker << EOF
392 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
392 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
393 > EOF
393 > EOF
394 $ hg clone ssh://user@dummy/repo clone
394 $ hg clone ssh://user@dummy/repo clone
395 requesting all changes
395 requesting all changes
396 remote: remote-changegroup
396 remote: remote-changegroup
397 adding changesets
397 adding changesets
398 adding manifests
398 adding manifests
399 adding file changes
399 adding file changes
400 added 8 changesets with 7 changes to 7 files (+2 heads)
400 added 8 changesets with 7 changes to 7 files (+2 heads)
401 new changesets cd010b8cd998:02de42196ebe
401 new changesets cd010b8cd998:02de42196ebe
402 updating to branch default
402 updating to branch default
403 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
403 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
404 $ rm -rf clone
404 $ rm -rf clone
405
405
406 If either of the multiple hash digests mismatches, an error is thrown
406 If either of the multiple hash digests mismatches, an error is thrown
407
407
408 $ cat > repo/.hg/bundle2maker << EOF
408 $ cat > repo/.hg/bundle2maker << EOF
409 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': '0' * 32, 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
409 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': '0' * 32, 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
410 > EOF
410 > EOF
411 $ hg clone ssh://user@dummy/repo clone
411 $ hg clone ssh://user@dummy/repo clone
412 requesting all changes
412 requesting all changes
413 remote: remote-changegroup
413 remote: remote-changegroup
414 adding changesets
414 adding changesets
415 adding manifests
415 adding manifests
416 adding file changes
416 adding file changes
417 transaction abort!
417 transaction abort!
418 rollback completed
418 rollback completed
419 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
419 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
420 md5 mismatch: expected 00000000000000000000000000000000, got e22172c2907ef88794b7bea6642c2394
420 md5 mismatch: expected 00000000000000000000000000000000, got e22172c2907ef88794b7bea6642c2394
421 [255]
421 [255]
422
422
423 $ cat > repo/.hg/bundle2maker << EOF
423 $ cat > repo/.hg/bundle2maker << EOF
424 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '0' * 40}
424 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '0' * 40}
425 > EOF
425 > EOF
426 $ hg clone ssh://user@dummy/repo clone
426 $ hg clone ssh://user@dummy/repo clone
427 requesting all changes
427 requesting all changes
428 remote: remote-changegroup
428 remote: remote-changegroup
429 adding changesets
429 adding changesets
430 adding manifests
430 adding manifests
431 adding file changes
431 adding file changes
432 transaction abort!
432 transaction abort!
433 rollback completed
433 rollback completed
434 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
434 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
435 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
435 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
436 [255]
436 [255]
437
437
438 Corruption tests
438 Corruption tests
439
439
440 $ hg clone orig clone -r 2
440 $ hg clone orig clone -r 2
441 adding changesets
441 adding changesets
442 adding manifests
442 adding manifests
443 adding file changes
443 adding file changes
444 added 3 changesets with 3 changes to 3 files
444 added 3 changesets with 3 changes to 3 files
445 new changesets cd010b8cd998:5fddd98957c8
445 new changesets cd010b8cd998:5fddd98957c8
446 updating to branch default
446 updating to branch default
447 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
447 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
448
448
449 $ cat > repo/.hg/bundle2maker << EOF
449 $ cat > repo/.hg/bundle2maker << EOF
450 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
450 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
451 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle5.hg', 'size': 578, 'digests': 'sha1', 'digest:sha1': '0' * 40}
451 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle5.hg', 'size': 578, 'digests': 'sha1', 'digest:sha1': '0' * 40}
452 > changegroup 0:6 7
452 > changegroup 0:6 7
453 > EOF
453 > EOF
454 $ hg pull -R clone ssh://user@dummy/repo
454 $ hg pull -R clone ssh://user@dummy/repo
455 pulling from ssh://user@dummy/repo
455 pulling from ssh://user@dummy/repo
456 searching for changes
456 searching for changes
457 remote: remote-changegroup
457 remote: remote-changegroup
458 adding changesets
458 adding changesets
459 adding manifests
459 adding manifests
460 adding file changes
460 adding file changes
461 remote: remote-changegroup
461 remote: remote-changegroup
462 adding changesets
462 adding changesets
463 adding manifests
463 adding manifests
464 adding file changes
464 adding file changes
465 transaction abort!
465 transaction abort!
466 rollback completed
466 rollback completed
467 abort: bundle at http://localhost:$HGPORT/bundle5.hg is corrupted:
467 abort: bundle at http://localhost:$HGPORT/bundle5.hg is corrupted:
468 sha1 mismatch: expected 0000000000000000000000000000000000000000, got f29485d6bfd37db99983cfc95ecb52f8ca396106
468 sha1 mismatch: expected 0000000000000000000000000000000000000000, got f29485d6bfd37db99983cfc95ecb52f8ca396106
469 [255]
469 [255]
470
470
471 The entire transaction has been rolled back in the pull above
471 The entire transaction has been rolled back in the pull above
472
472
473 $ hg -R clone log -G
473 $ hg -R clone log -G
474 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
474 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
475 |
475 |
476 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
476 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
477 |
477 |
478 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
478 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
479
479
480
480
481 No params
481 No params
482
482
483 $ cat > repo/.hg/bundle2maker << EOF
483 $ cat > repo/.hg/bundle2maker << EOF
484 > raw-remote-changegroup {}
484 > raw-remote-changegroup {}
485 > EOF
485 > EOF
486 $ hg pull -R clone ssh://user@dummy/repo
486 $ hg pull -R clone ssh://user@dummy/repo
487 pulling from ssh://user@dummy/repo
487 pulling from ssh://user@dummy/repo
488 searching for changes
488 searching for changes
489 remote: remote-changegroup
489 remote: remote-changegroup
490 abort: remote-changegroup: missing "url" param
490 abort: remote-changegroup: missing "url" param
491 [255]
491 [255]
492
492
493 Missing size
493 Missing size
494
494
495 $ cat > repo/.hg/bundle2maker << EOF
495 $ cat > repo/.hg/bundle2maker << EOF
496 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg'}
496 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg'}
497 > EOF
497 > EOF
498 $ hg pull -R clone ssh://user@dummy/repo
498 $ hg pull -R clone ssh://user@dummy/repo
499 pulling from ssh://user@dummy/repo
499 pulling from ssh://user@dummy/repo
500 searching for changes
500 searching for changes
501 remote: remote-changegroup
501 remote: remote-changegroup
502 abort: remote-changegroup: missing "size" param
502 abort: remote-changegroup: missing "size" param
503 [255]
503 [255]
504
504
505 Invalid size
505 Invalid size
506
506
507 $ cat > repo/.hg/bundle2maker << EOF
507 $ cat > repo/.hg/bundle2maker << EOF
508 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 'foo'}
508 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 'foo'}
509 > EOF
509 > EOF
510 $ hg pull -R clone ssh://user@dummy/repo
510 $ hg pull -R clone ssh://user@dummy/repo
511 pulling from ssh://user@dummy/repo
511 pulling from ssh://user@dummy/repo
512 searching for changes
512 searching for changes
513 remote: remote-changegroup
513 remote: remote-changegroup
514 abort: remote-changegroup: invalid value for param "size"
514 abort: remote-changegroup: invalid value for param "size"
515 [255]
515 [255]
516
516
517 Size mismatch
517 Size mismatch
518
518
519 $ cat > repo/.hg/bundle2maker << EOF
519 $ cat > repo/.hg/bundle2maker << EOF
520 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 42}
520 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 42}
521 > EOF
521 > EOF
522 $ hg pull -R clone ssh://user@dummy/repo
522 $ hg pull -R clone ssh://user@dummy/repo
523 pulling from ssh://user@dummy/repo
523 pulling from ssh://user@dummy/repo
524 searching for changes
524 searching for changes
525 remote: remote-changegroup
525 remote: remote-changegroup
526 adding changesets
526 adding changesets
527 adding manifests
527 adding manifests
528 adding file changes
528 adding file changes
529 transaction abort!
529 transaction abort!
530 rollback completed
530 rollback completed
531 abort: bundle at http://localhost:$HGPORT/bundle4.hg is corrupted:
531 abort: bundle at http://localhost:$HGPORT/bundle4.hg is corrupted:
532 size mismatch: expected 42, got 581
532 size mismatch: expected 42, got 581
533 [255]
533 [255]
534
534
535 Unknown digest
535 Unknown digest
536
536
537 $ cat > repo/.hg/bundle2maker << EOF
537 $ cat > repo/.hg/bundle2maker << EOF
538 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'foo', 'digest:foo': 'bar'}
538 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'foo', 'digest:foo': 'bar'}
539 > EOF
539 > EOF
540 $ hg pull -R clone ssh://user@dummy/repo
540 $ hg pull -R clone ssh://user@dummy/repo
541 pulling from ssh://user@dummy/repo
541 pulling from ssh://user@dummy/repo
542 searching for changes
542 searching for changes
543 remote: remote-changegroup
543 remote: remote-changegroup
544 abort: missing support for remote-changegroup - digest:foo
544 abort: missing support for remote-changegroup - digest:foo
545 [255]
545 [100]
546
546
547 Missing digest
547 Missing digest
548
548
549 $ cat > repo/.hg/bundle2maker << EOF
549 $ cat > repo/.hg/bundle2maker << EOF
550 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'sha1'}
550 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'sha1'}
551 > EOF
551 > EOF
552 $ hg pull -R clone ssh://user@dummy/repo
552 $ hg pull -R clone ssh://user@dummy/repo
553 pulling from ssh://user@dummy/repo
553 pulling from ssh://user@dummy/repo
554 searching for changes
554 searching for changes
555 remote: remote-changegroup
555 remote: remote-changegroup
556 abort: remote-changegroup: missing "digest:sha1" param
556 abort: remote-changegroup: missing "digest:sha1" param
557 [255]
557 [255]
558
558
559 Not an HTTP url
559 Not an HTTP url
560
560
561 $ cat > repo/.hg/bundle2maker << EOF
561 $ cat > repo/.hg/bundle2maker << EOF
562 > raw-remote-changegroup {'url': 'ssh://localhost:$HGPORT/bundle4.hg', 'size': 581}
562 > raw-remote-changegroup {'url': 'ssh://localhost:$HGPORT/bundle4.hg', 'size': 581}
563 > EOF
563 > EOF
564 $ hg pull -R clone ssh://user@dummy/repo
564 $ hg pull -R clone ssh://user@dummy/repo
565 pulling from ssh://user@dummy/repo
565 pulling from ssh://user@dummy/repo
566 searching for changes
566 searching for changes
567 remote: remote-changegroup
567 remote: remote-changegroup
568 abort: remote-changegroup does not support ssh urls
568 abort: remote-changegroup does not support ssh urls
569 [255]
569 [255]
570
570
571 Not a bundle
571 Not a bundle
572
572
573 $ cat > notbundle.hg << EOF
573 $ cat > notbundle.hg << EOF
574 > foo
574 > foo
575 > EOF
575 > EOF
576 $ cat > repo/.hg/bundle2maker << EOF
576 $ cat > repo/.hg/bundle2maker << EOF
577 > remote-changegroup http://localhost:$HGPORT/notbundle.hg notbundle.hg
577 > remote-changegroup http://localhost:$HGPORT/notbundle.hg notbundle.hg
578 > EOF
578 > EOF
579 $ hg pull -R clone ssh://user@dummy/repo
579 $ hg pull -R clone ssh://user@dummy/repo
580 pulling from ssh://user@dummy/repo
580 pulling from ssh://user@dummy/repo
581 searching for changes
581 searching for changes
582 remote: remote-changegroup
582 remote: remote-changegroup
583 abort: http://localhost:$HGPORT/notbundle.hg: not a Mercurial bundle
583 abort: http://localhost:$HGPORT/notbundle.hg: not a Mercurial bundle
584 [255]
584 [255]
585
585
586 Not a bundle 1.0
586 Not a bundle 1.0
587
587
588 $ cat > notbundle10.hg << EOF
588 $ cat > notbundle10.hg << EOF
589 > HG20
589 > HG20
590 > EOF
590 > EOF
591 $ cat > repo/.hg/bundle2maker << EOF
591 $ cat > repo/.hg/bundle2maker << EOF
592 > remote-changegroup http://localhost:$HGPORT/notbundle10.hg notbundle10.hg
592 > remote-changegroup http://localhost:$HGPORT/notbundle10.hg notbundle10.hg
593 > EOF
593 > EOF
594 $ hg pull -R clone ssh://user@dummy/repo
594 $ hg pull -R clone ssh://user@dummy/repo
595 pulling from ssh://user@dummy/repo
595 pulling from ssh://user@dummy/repo
596 searching for changes
596 searching for changes
597 remote: remote-changegroup
597 remote: remote-changegroup
598 abort: http://localhost:$HGPORT/notbundle10.hg: not a bundle version 1.0
598 abort: http://localhost:$HGPORT/notbundle10.hg: not a bundle version 1.0
599 [255]
599 [255]
600
600
601 $ hg -R clone log -G
601 $ hg -R clone log -G
602 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
602 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
603 |
603 |
604 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
604 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
605 |
605 |
606 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
606 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
607
607
608 $ rm -rf clone
608 $ rm -rf clone
609
609
610 $ killdaemons.py
610 $ killdaemons.py
@@ -1,636 +1,636 b''
1 #require serve no-reposimplestore no-chg
1 #require serve no-reposimplestore no-chg
2
2
3 #testcases stream-legacy stream-bundle2
3 #testcases stream-legacy stream-bundle2
4
4
5 #if stream-legacy
5 #if stream-legacy
6 $ cat << EOF >> $HGRCPATH
6 $ cat << EOF >> $HGRCPATH
7 > [server]
7 > [server]
8 > bundle2.stream = no
8 > bundle2.stream = no
9 > EOF
9 > EOF
10 #endif
10 #endif
11
11
12 Initialize repository
12 Initialize repository
13 the status call is to check for issue5130
13 the status call is to check for issue5130
14
14
15 $ hg init server
15 $ hg init server
16 $ cd server
16 $ cd server
17 $ touch foo
17 $ touch foo
18 $ hg -q commit -A -m initial
18 $ hg -q commit -A -m initial
19 >>> for i in range(1024):
19 >>> for i in range(1024):
20 ... with open(str(i), 'wb') as fh:
20 ... with open(str(i), 'wb') as fh:
21 ... fh.write(b"%d" % i) and None
21 ... fh.write(b"%d" % i) and None
22 $ hg -q commit -A -m 'add a lot of files'
22 $ hg -q commit -A -m 'add a lot of files'
23 $ hg st
23 $ hg st
24 $ hg --config server.uncompressed=false serve -p $HGPORT -d --pid-file=hg.pid
24 $ hg --config server.uncompressed=false serve -p $HGPORT -d --pid-file=hg.pid
25 $ cat hg.pid > $DAEMON_PIDS
25 $ cat hg.pid > $DAEMON_PIDS
26 $ cd ..
26 $ cd ..
27
27
28 Cannot stream clone when server.uncompressed is set
28 Cannot stream clone when server.uncompressed is set
29
29
30 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=stream_out'
30 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=stream_out'
31 200 Script output follows
31 200 Script output follows
32
32
33 1
33 1
34
34
35 #if stream-legacy
35 #if stream-legacy
36 $ hg debugcapabilities http://localhost:$HGPORT
36 $ hg debugcapabilities http://localhost:$HGPORT
37 Main capabilities:
37 Main capabilities:
38 batch
38 batch
39 branchmap
39 branchmap
40 $USUAL_BUNDLE2_CAPS_SERVER$
40 $USUAL_BUNDLE2_CAPS_SERVER$
41 changegroupsubset
41 changegroupsubset
42 compression=$BUNDLE2_COMPRESSIONS$
42 compression=$BUNDLE2_COMPRESSIONS$
43 getbundle
43 getbundle
44 httpheader=1024
44 httpheader=1024
45 httpmediatype=0.1rx,0.1tx,0.2tx
45 httpmediatype=0.1rx,0.1tx,0.2tx
46 known
46 known
47 lookup
47 lookup
48 pushkey
48 pushkey
49 unbundle=HG10GZ,HG10BZ,HG10UN
49 unbundle=HG10GZ,HG10BZ,HG10UN
50 unbundlehash
50 unbundlehash
51 Bundle2 capabilities:
51 Bundle2 capabilities:
52 HG20
52 HG20
53 bookmarks
53 bookmarks
54 changegroup
54 changegroup
55 01
55 01
56 02
56 02
57 checkheads
57 checkheads
58 related
58 related
59 digests
59 digests
60 md5
60 md5
61 sha1
61 sha1
62 sha512
62 sha512
63 error
63 error
64 abort
64 abort
65 unsupportedcontent
65 unsupportedcontent
66 pushraced
66 pushraced
67 pushkey
67 pushkey
68 hgtagsfnodes
68 hgtagsfnodes
69 listkeys
69 listkeys
70 phases
70 phases
71 heads
71 heads
72 pushkey
72 pushkey
73 remote-changegroup
73 remote-changegroup
74 http
74 http
75 https
75 https
76
76
77 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
77 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
78 warning: stream clone requested but server has them disabled
78 warning: stream clone requested but server has them disabled
79 requesting all changes
79 requesting all changes
80 adding changesets
80 adding changesets
81 adding manifests
81 adding manifests
82 adding file changes
82 adding file changes
83 added 2 changesets with 1025 changes to 1025 files
83 added 2 changesets with 1025 changes to 1025 files
84 new changesets 96ee1d7354c4:c17445101a72
84 new changesets 96ee1d7354c4:c17445101a72
85
85
86 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
86 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
87 200 Script output follows
87 200 Script output follows
88 content-type: application/mercurial-0.2
88 content-type: application/mercurial-0.2
89
89
90
90
91 $ f --size body --hexdump --bytes 100
91 $ f --size body --hexdump --bytes 100
92 body: size=232
92 body: size=232
93 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
93 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
94 0010: cf 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |..ERROR:ABORT...|
94 0010: cf 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |..ERROR:ABORT...|
95 0020: 00 01 01 07 3c 04 72 6d 65 73 73 61 67 65 73 74 |....<.rmessagest|
95 0020: 00 01 01 07 3c 04 72 6d 65 73 73 61 67 65 73 74 |....<.rmessagest|
96 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
96 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
97 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
97 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
98 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
98 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
99 0060: 69 73 20 66 |is f|
99 0060: 69 73 20 66 |is f|
100
100
101 #endif
101 #endif
102 #if stream-bundle2
102 #if stream-bundle2
103 $ hg debugcapabilities http://localhost:$HGPORT
103 $ hg debugcapabilities http://localhost:$HGPORT
104 Main capabilities:
104 Main capabilities:
105 batch
105 batch
106 branchmap
106 branchmap
107 $USUAL_BUNDLE2_CAPS_SERVER$
107 $USUAL_BUNDLE2_CAPS_SERVER$
108 changegroupsubset
108 changegroupsubset
109 compression=$BUNDLE2_COMPRESSIONS$
109 compression=$BUNDLE2_COMPRESSIONS$
110 getbundle
110 getbundle
111 httpheader=1024
111 httpheader=1024
112 httpmediatype=0.1rx,0.1tx,0.2tx
112 httpmediatype=0.1rx,0.1tx,0.2tx
113 known
113 known
114 lookup
114 lookup
115 pushkey
115 pushkey
116 unbundle=HG10GZ,HG10BZ,HG10UN
116 unbundle=HG10GZ,HG10BZ,HG10UN
117 unbundlehash
117 unbundlehash
118 Bundle2 capabilities:
118 Bundle2 capabilities:
119 HG20
119 HG20
120 bookmarks
120 bookmarks
121 changegroup
121 changegroup
122 01
122 01
123 02
123 02
124 checkheads
124 checkheads
125 related
125 related
126 digests
126 digests
127 md5
127 md5
128 sha1
128 sha1
129 sha512
129 sha512
130 error
130 error
131 abort
131 abort
132 unsupportedcontent
132 unsupportedcontent
133 pushraced
133 pushraced
134 pushkey
134 pushkey
135 hgtagsfnodes
135 hgtagsfnodes
136 listkeys
136 listkeys
137 phases
137 phases
138 heads
138 heads
139 pushkey
139 pushkey
140 remote-changegroup
140 remote-changegroup
141 http
141 http
142 https
142 https
143
143
144 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
144 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
145 warning: stream clone requested but server has them disabled
145 warning: stream clone requested but server has them disabled
146 requesting all changes
146 requesting all changes
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 2 changesets with 1025 changes to 1025 files
150 added 2 changesets with 1025 changes to 1025 files
151 new changesets 96ee1d7354c4:c17445101a72
151 new changesets 96ee1d7354c4:c17445101a72
152
152
153 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
153 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
154 200 Script output follows
154 200 Script output follows
155 content-type: application/mercurial-0.2
155 content-type: application/mercurial-0.2
156
156
157
157
158 $ f --size body --hexdump --bytes 100
158 $ f --size body --hexdump --bytes 100
159 body: size=232
159 body: size=232
160 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
160 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
161 0010: cf 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |..ERROR:ABORT...|
161 0010: cf 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |..ERROR:ABORT...|
162 0020: 00 01 01 07 3c 04 72 6d 65 73 73 61 67 65 73 74 |....<.rmessagest|
162 0020: 00 01 01 07 3c 04 72 6d 65 73 73 61 67 65 73 74 |....<.rmessagest|
163 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
163 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
164 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
164 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
165 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
165 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
166 0060: 69 73 20 66 |is f|
166 0060: 69 73 20 66 |is f|
167
167
168 #endif
168 #endif
169
169
170 $ killdaemons.py
170 $ killdaemons.py
171 $ cd server
171 $ cd server
172 $ hg serve -p $HGPORT -d --pid-file=hg.pid
172 $ hg serve -p $HGPORT -d --pid-file=hg.pid
173 $ cat hg.pid > $DAEMON_PIDS
173 $ cat hg.pid > $DAEMON_PIDS
174 $ cd ..
174 $ cd ..
175
175
176 Basic clone
176 Basic clone
177
177
178 #if stream-legacy
178 #if stream-legacy
179 $ hg clone --stream -U http://localhost:$HGPORT clone1
179 $ hg clone --stream -U http://localhost:$HGPORT clone1
180 streaming all changes
180 streaming all changes
181 1027 files to transfer, 96.3 KB of data (no-zstd !)
181 1027 files to transfer, 96.3 KB of data (no-zstd !)
182 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
182 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
183 1027 files to transfer, 93.5 KB of data (zstd !)
183 1027 files to transfer, 93.5 KB of data (zstd !)
184 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
184 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
185 searching for changes
185 searching for changes
186 no changes found
186 no changes found
187 #endif
187 #endif
188 #if stream-bundle2
188 #if stream-bundle2
189 $ hg clone --stream -U http://localhost:$HGPORT clone1
189 $ hg clone --stream -U http://localhost:$HGPORT clone1
190 streaming all changes
190 streaming all changes
191 1030 files to transfer, 96.5 KB of data (no-zstd !)
191 1030 files to transfer, 96.5 KB of data (no-zstd !)
192 transferred 96.5 KB in * seconds (*/sec) (glob) (no-zstd !)
192 transferred 96.5 KB in * seconds (*/sec) (glob) (no-zstd !)
193 1030 files to transfer, 93.6 KB of data (zstd !)
193 1030 files to transfer, 93.6 KB of data (zstd !)
194 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
194 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
195
195
196 $ ls -1 clone1/.hg/cache
196 $ ls -1 clone1/.hg/cache
197 branch2-base
197 branch2-base
198 branch2-immutable
198 branch2-immutable
199 branch2-served
199 branch2-served
200 branch2-served.hidden
200 branch2-served.hidden
201 branch2-visible
201 branch2-visible
202 branch2-visible-hidden
202 branch2-visible-hidden
203 hgtagsfnodes1
203 hgtagsfnodes1
204 rbc-names-v1
204 rbc-names-v1
205 rbc-revs-v1
205 rbc-revs-v1
206 tags2
206 tags2
207 tags2-served
207 tags2-served
208 #endif
208 #endif
209
209
210 getbundle requests with stream=1 are uncompressed
210 getbundle requests with stream=1 are uncompressed
211
211
212 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto '0.1 0.2 comp=zlib,none' --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
212 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto '0.1 0.2 comp=zlib,none' --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
213 200 Script output follows
213 200 Script output follows
214 content-type: application/mercurial-0.2
214 content-type: application/mercurial-0.2
215
215
216
216
217 $ f --size --hex --bytes 256 body
217 $ f --size --hex --bytes 256 body
218 body: size=112262 (no-zstd !)
218 body: size=112262 (no-zstd !)
219 body: size=109410 (zstd no-rust !)
219 body: size=109410 (zstd no-rust !)
220 body: size=109431 (rust !)
220 body: size=109431 (rust !)
221 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
221 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
222 0010: 7f 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (no-zstd !)
222 0010: 7f 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (no-zstd !)
223 0020: 05 09 04 0c 44 62 79 74 65 63 6f 75 6e 74 39 38 |....Dbytecount98| (no-zstd !)
223 0020: 05 09 04 0c 44 62 79 74 65 63 6f 75 6e 74 39 38 |....Dbytecount98| (no-zstd !)
224 0030: 37 37 35 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |775filecount1030| (no-zstd !)
224 0030: 37 37 35 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |775filecount1030| (no-zstd !)
225 0010: 99 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (zstd no-rust !)
225 0010: 99 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (zstd no-rust !)
226 0010: ae 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (rust !)
226 0010: ae 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (rust !)
227 0020: 05 09 04 0c 5e 62 79 74 65 63 6f 75 6e 74 39 35 |....^bytecount95| (zstd no-rust !)
227 0020: 05 09 04 0c 5e 62 79 74 65 63 6f 75 6e 74 39 35 |....^bytecount95| (zstd no-rust !)
228 0020: 05 09 04 0c 73 62 79 74 65 63 6f 75 6e 74 39 35 |....sbytecount95| (rust !)
228 0020: 05 09 04 0c 73 62 79 74 65 63 6f 75 6e 74 39 35 |....sbytecount95| (rust !)
229 0030: 38 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |897filecount1030| (zstd !)
229 0030: 38 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |897filecount1030| (zstd !)
230 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote|
230 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote|
231 0050: 6e 63 6f 64 65 25 32 43 66 6e 63 61 63 68 65 25 |ncode%2Cfncache%|
231 0050: 6e 63 6f 64 65 25 32 43 66 6e 63 61 63 68 65 25 |ncode%2Cfncache%|
232 0060: 32 43 67 65 6e 65 72 61 6c 64 65 6c 74 61 25 32 |2Cgeneraldelta%2|
232 0060: 32 43 67 65 6e 65 72 61 6c 64 65 6c 74 61 25 32 |2Cgeneraldelta%2|
233 0070: 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 |Crevlogv1%2Cspar| (no-zstd !)
233 0070: 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 |Crevlogv1%2Cspar| (no-zstd !)
234 0080: 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 |serevlog%2Cstore| (no-zstd !)
234 0080: 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 |serevlog%2Cstore| (no-zstd !)
235 0090: 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 |....s.Bdata/0.i.| (no-zstd !)
235 0090: 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 |....s.Bdata/0.i.| (no-zstd !)
236 00a0: 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 |................| (no-zstd !)
236 00a0: 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 |................| (no-zstd !)
237 00b0: 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 |................| (no-zstd !)
237 00b0: 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 |................| (no-zstd !)
238 00c0: 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 |)c.I.#....Vg.g,i| (no-zstd !)
238 00c0: 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 |)c.I.#....Vg.g,i| (no-zstd !)
239 00d0: d1 ec 39 00 00 00 00 00 00 00 00 00 00 00 00 75 |..9............u| (no-zstd !)
239 00d0: d1 ec 39 00 00 00 00 00 00 00 00 00 00 00 00 75 |..9............u| (no-zstd !)
240 00e0: 30 73 08 42 64 61 74 61 2f 31 2e 69 00 03 00 01 |0s.Bdata/1.i....| (no-zstd !)
240 00e0: 30 73 08 42 64 61 74 61 2f 31 2e 69 00 03 00 01 |0s.Bdata/1.i....| (no-zstd !)
241 00f0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................| (no-zstd !)
241 00f0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................| (no-zstd !)
242 0070: 43 72 65 76 6c 6f 67 2d 63 6f 6d 70 72 65 73 73 |Crevlog-compress| (zstd no-rust !)
242 0070: 43 72 65 76 6c 6f 67 2d 63 6f 6d 70 72 65 73 73 |Crevlog-compress| (zstd no-rust !)
243 0070: 43 70 65 72 73 69 73 74 65 6e 74 2d 6e 6f 64 65 |Cpersistent-node| (rust !)
243 0070: 43 70 65 72 73 69 73 74 65 6e 74 2d 6e 6f 64 65 |Cpersistent-node| (rust !)
244 0080: 69 6f 6e 2d 7a 73 74 64 25 32 43 72 65 76 6c 6f |ion-zstd%2Crevlo| (zstd no-rust !)
244 0080: 69 6f 6e 2d 7a 73 74 64 25 32 43 72 65 76 6c 6f |ion-zstd%2Crevlo| (zstd no-rust !)
245 0080: 6d 61 70 25 32 43 72 65 76 6c 6f 67 2d 63 6f 6d |map%2Crevlog-com| (rust !)
245 0080: 6d 61 70 25 32 43 72 65 76 6c 6f 67 2d 63 6f 6d |map%2Crevlog-com| (rust !)
246 0090: 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 6c |gv1%2Csparserevl| (zstd no-rust !)
246 0090: 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 6c |gv1%2Csparserevl| (zstd no-rust !)
247 0090: 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 25 32 43 |pression-zstd%2C| (rust !)
247 0090: 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 25 32 43 |pression-zstd%2C| (rust !)
248 00a0: 6f 67 25 32 43 73 74 6f 72 65 00 00 80 00 73 08 |og%2Cstore....s.| (zstd no-rust !)
248 00a0: 6f 67 25 32 43 73 74 6f 72 65 00 00 80 00 73 08 |og%2Cstore....s.| (zstd no-rust !)
249 00a0: 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 73 |revlogv1%2Cspars| (rust !)
249 00a0: 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 73 |revlogv1%2Cspars| (rust !)
250 00b0: 42 64 61 74 61 2f 30 2e 69 00 03 00 01 00 00 00 |Bdata/0.i.......| (zstd no-rust !)
250 00b0: 42 64 61 74 61 2f 30 2e 69 00 03 00 01 00 00 00 |Bdata/0.i.......| (zstd no-rust !)
251 00b0: 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 00 |erevlog%2Cstore.| (rust !)
251 00b0: 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 00 |erevlog%2Cstore.| (rust !)
252 00c0: 00 00 00 00 02 00 00 00 01 00 00 00 00 00 00 00 |................| (zstd no-rust !)
252 00c0: 00 00 00 00 02 00 00 00 01 00 00 00 00 00 00 00 |................| (zstd no-rust !)
253 00c0: 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 |...s.Bdata/0.i..| (rust !)
253 00c0: 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 |...s.Bdata/0.i..| (rust !)
254 00d0: 01 ff ff ff ff ff ff ff ff 80 29 63 a0 49 d3 23 |..........)c.I.#| (zstd no-rust !)
254 00d0: 01 ff ff ff ff ff ff ff ff 80 29 63 a0 49 d3 23 |..........)c.I.#| (zstd no-rust !)
255 00d0: 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 00 |................| (rust !)
255 00d0: 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 00 |................| (rust !)
256 00e0: 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 00 00 00 |....Vg.g,i..9...| (zstd no-rust !)
256 00e0: 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 00 00 00 |....Vg.g,i..9...| (zstd no-rust !)
257 00e0: 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 |...............)| (rust !)
257 00e0: 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 |...............)| (rust !)
258 00f0: 00 00 00 00 00 00 00 00 00 75 30 73 08 42 64 61 |.........u0s.Bda| (zstd no-rust !)
258 00f0: 00 00 00 00 00 00 00 00 00 75 30 73 08 42 64 61 |.........u0s.Bda| (zstd no-rust !)
259 00f0: 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 |c.I.#....Vg.g,i.| (rust !)
259 00f0: 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 |c.I.#....Vg.g,i.| (rust !)
260
260
261 --uncompressed is an alias to --stream
261 --uncompressed is an alias to --stream
262
262
263 #if stream-legacy
263 #if stream-legacy
264 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
264 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
265 streaming all changes
265 streaming all changes
266 1027 files to transfer, 96.3 KB of data (no-zstd !)
266 1027 files to transfer, 96.3 KB of data (no-zstd !)
267 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
267 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
268 1027 files to transfer, 93.5 KB of data (zstd !)
268 1027 files to transfer, 93.5 KB of data (zstd !)
269 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
269 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
270 searching for changes
270 searching for changes
271 no changes found
271 no changes found
272 #endif
272 #endif
273 #if stream-bundle2
273 #if stream-bundle2
274 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
274 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
275 streaming all changes
275 streaming all changes
276 1030 files to transfer, 96.5 KB of data (no-zstd !)
276 1030 files to transfer, 96.5 KB of data (no-zstd !)
277 transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !)
277 transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !)
278 1030 files to transfer, 93.6 KB of data (zstd !)
278 1030 files to transfer, 93.6 KB of data (zstd !)
279 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
279 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
280 #endif
280 #endif
281
281
282 Clone with background file closing enabled
282 Clone with background file closing enabled
283
283
284 #if stream-legacy
284 #if stream-legacy
285 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
285 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
286 using http://localhost:$HGPORT/
286 using http://localhost:$HGPORT/
287 sending capabilities command
287 sending capabilities command
288 sending branchmap command
288 sending branchmap command
289 streaming all changes
289 streaming all changes
290 sending stream_out command
290 sending stream_out command
291 1027 files to transfer, 96.3 KB of data (no-zstd !)
291 1027 files to transfer, 96.3 KB of data (no-zstd !)
292 1027 files to transfer, 93.5 KB of data (zstd !)
292 1027 files to transfer, 93.5 KB of data (zstd !)
293 starting 4 threads for background file closing
293 starting 4 threads for background file closing
294 updating the branch cache
294 updating the branch cache
295 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
295 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
296 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
296 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
297 query 1; heads
297 query 1; heads
298 sending batch command
298 sending batch command
299 searching for changes
299 searching for changes
300 all remote heads known locally
300 all remote heads known locally
301 no changes found
301 no changes found
302 sending getbundle command
302 sending getbundle command
303 bundle2-input-bundle: with-transaction
303 bundle2-input-bundle: with-transaction
304 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
304 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
305 bundle2-input-part: "phase-heads" supported
305 bundle2-input-part: "phase-heads" supported
306 bundle2-input-part: total payload size 24
306 bundle2-input-part: total payload size 24
307 bundle2-input-bundle: 2 parts total
307 bundle2-input-bundle: 2 parts total
308 checking for updated bookmarks
308 checking for updated bookmarks
309 updating the branch cache
309 updating the branch cache
310 (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
310 (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
311 #endif
311 #endif
312 #if stream-bundle2
312 #if stream-bundle2
313 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
313 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
314 using http://localhost:$HGPORT/
314 using http://localhost:$HGPORT/
315 sending capabilities command
315 sending capabilities command
316 query 1; heads
316 query 1; heads
317 sending batch command
317 sending batch command
318 streaming all changes
318 streaming all changes
319 sending getbundle command
319 sending getbundle command
320 bundle2-input-bundle: with-transaction
320 bundle2-input-bundle: with-transaction
321 bundle2-input-part: "stream2" (params: 3 mandatory) supported
321 bundle2-input-part: "stream2" (params: 3 mandatory) supported
322 applying stream bundle
322 applying stream bundle
323 1030 files to transfer, 96.5 KB of data (no-zstd !)
323 1030 files to transfer, 96.5 KB of data (no-zstd !)
324 1030 files to transfer, 93.6 KB of data (zstd !)
324 1030 files to transfer, 93.6 KB of data (zstd !)
325 starting 4 threads for background file closing
325 starting 4 threads for background file closing
326 starting 4 threads for background file closing
326 starting 4 threads for background file closing
327 updating the branch cache
327 updating the branch cache
328 transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !)
328 transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !)
329 bundle2-input-part: total payload size 112094 (no-zstd !)
329 bundle2-input-part: total payload size 112094 (no-zstd !)
330 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
330 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
331 bundle2-input-part: total payload size 109216 (zstd !)
331 bundle2-input-part: total payload size 109216 (zstd !)
332 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
332 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
333 bundle2-input-bundle: 2 parts total
333 bundle2-input-bundle: 2 parts total
334 checking for updated bookmarks
334 checking for updated bookmarks
335 updating the branch cache
335 updating the branch cache
336 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
336 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
337 #endif
337 #endif
338
338
339 Cannot stream clone when there are secret changesets
339 Cannot stream clone when there are secret changesets
340
340
341 $ hg -R server phase --force --secret -r tip
341 $ hg -R server phase --force --secret -r tip
342 $ hg clone --stream -U http://localhost:$HGPORT secret-denied
342 $ hg clone --stream -U http://localhost:$HGPORT secret-denied
343 warning: stream clone requested but server has them disabled
343 warning: stream clone requested but server has them disabled
344 requesting all changes
344 requesting all changes
345 adding changesets
345 adding changesets
346 adding manifests
346 adding manifests
347 adding file changes
347 adding file changes
348 added 1 changesets with 1 changes to 1 files
348 added 1 changesets with 1 changes to 1 files
349 new changesets 96ee1d7354c4
349 new changesets 96ee1d7354c4
350
350
351 $ killdaemons.py
351 $ killdaemons.py
352
352
353 Streaming of secrets can be overridden by server config
353 Streaming of secrets can be overridden by server config
354
354
355 $ cd server
355 $ cd server
356 $ hg serve --config server.uncompressedallowsecret=true -p $HGPORT -d --pid-file=hg.pid
356 $ hg serve --config server.uncompressedallowsecret=true -p $HGPORT -d --pid-file=hg.pid
357 $ cat hg.pid > $DAEMON_PIDS
357 $ cat hg.pid > $DAEMON_PIDS
358 $ cd ..
358 $ cd ..
359
359
360 #if stream-legacy
360 #if stream-legacy
361 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
361 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
362 streaming all changes
362 streaming all changes
363 1027 files to transfer, 96.3 KB of data (no-zstd !)
363 1027 files to transfer, 96.3 KB of data (no-zstd !)
364 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
364 transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !)
365 1027 files to transfer, 93.5 KB of data (zstd !)
365 1027 files to transfer, 93.5 KB of data (zstd !)
366 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
366 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
367 searching for changes
367 searching for changes
368 no changes found
368 no changes found
369 #endif
369 #endif
370 #if stream-bundle2
370 #if stream-bundle2
371 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
371 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
372 streaming all changes
372 streaming all changes
373 1030 files to transfer, 96.5 KB of data (no-zstd !)
373 1030 files to transfer, 96.5 KB of data (no-zstd !)
374 transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !)
374 transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !)
375 1030 files to transfer, 93.6 KB of data (zstd !)
375 1030 files to transfer, 93.6 KB of data (zstd !)
376 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
376 transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !)
377 #endif
377 #endif
378
378
379 $ killdaemons.py
379 $ killdaemons.py
380
380
381 Verify interaction between preferuncompressed and secret presence
381 Verify interaction between preferuncompressed and secret presence
382
382
383 $ cd server
383 $ cd server
384 $ hg serve --config server.preferuncompressed=true -p $HGPORT -d --pid-file=hg.pid
384 $ hg serve --config server.preferuncompressed=true -p $HGPORT -d --pid-file=hg.pid
385 $ cat hg.pid > $DAEMON_PIDS
385 $ cat hg.pid > $DAEMON_PIDS
386 $ cd ..
386 $ cd ..
387
387
388 $ hg clone -U http://localhost:$HGPORT preferuncompressed-secret
388 $ hg clone -U http://localhost:$HGPORT preferuncompressed-secret
389 requesting all changes
389 requesting all changes
390 adding changesets
390 adding changesets
391 adding manifests
391 adding manifests
392 adding file changes
392 adding file changes
393 added 1 changesets with 1 changes to 1 files
393 added 1 changesets with 1 changes to 1 files
394 new changesets 96ee1d7354c4
394 new changesets 96ee1d7354c4
395
395
396 $ killdaemons.py
396 $ killdaemons.py
397
397
398 Clone not allowed when full bundles disabled and can't serve secrets
398 Clone not allowed when full bundles disabled and can't serve secrets
399
399
400 $ cd server
400 $ cd server
401 $ hg serve --config server.disablefullbundle=true -p $HGPORT -d --pid-file=hg.pid
401 $ hg serve --config server.disablefullbundle=true -p $HGPORT -d --pid-file=hg.pid
402 $ cat hg.pid > $DAEMON_PIDS
402 $ cat hg.pid > $DAEMON_PIDS
403 $ cd ..
403 $ cd ..
404
404
405 $ hg clone --stream http://localhost:$HGPORT secret-full-disabled
405 $ hg clone --stream http://localhost:$HGPORT secret-full-disabled
406 warning: stream clone requested but server has them disabled
406 warning: stream clone requested but server has them disabled
407 requesting all changes
407 requesting all changes
408 remote: abort: server has pull-based clones disabled
408 remote: abort: server has pull-based clones disabled
409 abort: pull failed on remote
409 abort: pull failed on remote
410 (remove --pull if specified or upgrade Mercurial)
410 (remove --pull if specified or upgrade Mercurial)
411 [255]
411 [100]
412
412
413 Local stream clone with secrets involved
413 Local stream clone with secrets involved
414 (This is just a test over behavior: if you have access to the repo's files,
414 (This is just a test over behavior: if you have access to the repo's files,
415 there is no security so it isn't important to prevent a clone here.)
415 there is no security so it isn't important to prevent a clone here.)
416
416
417 $ hg clone -U --stream server local-secret
417 $ hg clone -U --stream server local-secret
418 warning: stream clone requested but server has them disabled
418 warning: stream clone requested but server has them disabled
419 requesting all changes
419 requesting all changes
420 adding changesets
420 adding changesets
421 adding manifests
421 adding manifests
422 adding file changes
422 adding file changes
423 added 1 changesets with 1 changes to 1 files
423 added 1 changesets with 1 changes to 1 files
424 new changesets 96ee1d7354c4
424 new changesets 96ee1d7354c4
425
425
426 Stream clone while repo is changing:
426 Stream clone while repo is changing:
427
427
428 $ mkdir changing
428 $ mkdir changing
429 $ cd changing
429 $ cd changing
430
430
431 extension for delaying the server process so we reliably can modify the repo
431 extension for delaying the server process so we reliably can modify the repo
432 while cloning
432 while cloning
433
433
434 $ cat > delayer.py <<EOF
434 $ cat > delayer.py <<EOF
435 > import time
435 > import time
436 > from mercurial import extensions, vfs
436 > from mercurial import extensions, vfs
437 > def __call__(orig, self, path, *args, **kwargs):
437 > def __call__(orig, self, path, *args, **kwargs):
438 > if path == 'data/f1.i':
438 > if path == 'data/f1.i':
439 > time.sleep(2)
439 > time.sleep(2)
440 > return orig(self, path, *args, **kwargs)
440 > return orig(self, path, *args, **kwargs)
441 > extensions.wrapfunction(vfs.vfs, '__call__', __call__)
441 > extensions.wrapfunction(vfs.vfs, '__call__', __call__)
442 > EOF
442 > EOF
443
443
444 prepare repo with small and big file to cover both code paths in emitrevlogdata
444 prepare repo with small and big file to cover both code paths in emitrevlogdata
445
445
446 $ hg init repo
446 $ hg init repo
447 $ touch repo/f1
447 $ touch repo/f1
448 $ $TESTDIR/seq.py 50000 > repo/f2
448 $ $TESTDIR/seq.py 50000 > repo/f2
449 $ hg -R repo ci -Aqm "0"
449 $ hg -R repo ci -Aqm "0"
450 $ hg serve -R repo -p $HGPORT1 -d --pid-file=hg.pid --config extensions.delayer=delayer.py
450 $ hg serve -R repo -p $HGPORT1 -d --pid-file=hg.pid --config extensions.delayer=delayer.py
451 $ cat hg.pid >> $DAEMON_PIDS
451 $ cat hg.pid >> $DAEMON_PIDS
452
452
453 clone while modifying the repo between stating file with write lock and
453 clone while modifying the repo between stating file with write lock and
454 actually serving file content
454 actually serving file content
455
455
456 $ hg clone -q --stream -U http://localhost:$HGPORT1 clone &
456 $ hg clone -q --stream -U http://localhost:$HGPORT1 clone &
457 $ sleep 1
457 $ sleep 1
458 $ echo >> repo/f1
458 $ echo >> repo/f1
459 $ echo >> repo/f2
459 $ echo >> repo/f2
460 $ hg -R repo ci -m "1" --config ui.timeout.warn=-1
460 $ hg -R repo ci -m "1" --config ui.timeout.warn=-1
461 $ wait
461 $ wait
462 $ hg -R clone id
462 $ hg -R clone id
463 000000000000
463 000000000000
464 $ cd ..
464 $ cd ..
465
465
466 Stream repository with bookmarks
466 Stream repository with bookmarks
467 --------------------------------
467 --------------------------------
468
468
469 (revert introduction of secret changeset)
469 (revert introduction of secret changeset)
470
470
471 $ hg -R server phase --draft 'secret()'
471 $ hg -R server phase --draft 'secret()'
472
472
473 add a bookmark
473 add a bookmark
474
474
475 $ hg -R server bookmark -r tip some-bookmark
475 $ hg -R server bookmark -r tip some-bookmark
476
476
477 clone it
477 clone it
478
478
479 #if stream-legacy
479 #if stream-legacy
480 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
480 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
481 streaming all changes
481 streaming all changes
482 1027 files to transfer, 96.3 KB of data (no-zstd !)
482 1027 files to transfer, 96.3 KB of data (no-zstd !)
483 transferred 96.3 KB in * seconds (*) (glob) (no-zstd !)
483 transferred 96.3 KB in * seconds (*) (glob) (no-zstd !)
484 1027 files to transfer, 93.5 KB of data (zstd !)
484 1027 files to transfer, 93.5 KB of data (zstd !)
485 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
485 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
486 searching for changes
486 searching for changes
487 no changes found
487 no changes found
488 updating to branch default
488 updating to branch default
489 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
489 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
490 #endif
490 #endif
491 #if stream-bundle2
491 #if stream-bundle2
492 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
492 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
493 streaming all changes
493 streaming all changes
494 1033 files to transfer, 96.6 KB of data (no-zstd !)
494 1033 files to transfer, 96.6 KB of data (no-zstd !)
495 transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !)
495 transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !)
496 1033 files to transfer, 93.8 KB of data (zstd !)
496 1033 files to transfer, 93.8 KB of data (zstd !)
497 transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !)
497 transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !)
498 updating to branch default
498 updating to branch default
499 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
499 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
500 #endif
500 #endif
501 $ hg -R with-bookmarks bookmarks
501 $ hg -R with-bookmarks bookmarks
502 some-bookmark 1:c17445101a72
502 some-bookmark 1:c17445101a72
503
503
504 Stream repository with phases
504 Stream repository with phases
505 -----------------------------
505 -----------------------------
506
506
507 Clone as publishing
507 Clone as publishing
508
508
509 $ hg -R server phase -r 'all()'
509 $ hg -R server phase -r 'all()'
510 0: draft
510 0: draft
511 1: draft
511 1: draft
512
512
513 #if stream-legacy
513 #if stream-legacy
514 $ hg clone --stream http://localhost:$HGPORT phase-publish
514 $ hg clone --stream http://localhost:$HGPORT phase-publish
515 streaming all changes
515 streaming all changes
516 1027 files to transfer, 96.3 KB of data (no-zstd !)
516 1027 files to transfer, 96.3 KB of data (no-zstd !)
517 transferred 96.3 KB in * seconds (*) (glob) (no-zstd !)
517 transferred 96.3 KB in * seconds (*) (glob) (no-zstd !)
518 1027 files to transfer, 93.5 KB of data (zstd !)
518 1027 files to transfer, 93.5 KB of data (zstd !)
519 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
519 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
520 searching for changes
520 searching for changes
521 no changes found
521 no changes found
522 updating to branch default
522 updating to branch default
523 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
523 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
524 #endif
524 #endif
525 #if stream-bundle2
525 #if stream-bundle2
526 $ hg clone --stream http://localhost:$HGPORT phase-publish
526 $ hg clone --stream http://localhost:$HGPORT phase-publish
527 streaming all changes
527 streaming all changes
528 1033 files to transfer, 96.6 KB of data (no-zstd !)
528 1033 files to transfer, 96.6 KB of data (no-zstd !)
529 transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !)
529 transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !)
530 1033 files to transfer, 93.8 KB of data (zstd !)
530 1033 files to transfer, 93.8 KB of data (zstd !)
531 transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !)
531 transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !)
532 updating to branch default
532 updating to branch default
533 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
533 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
534 #endif
534 #endif
535 $ hg -R phase-publish phase -r 'all()'
535 $ hg -R phase-publish phase -r 'all()'
536 0: public
536 0: public
537 1: public
537 1: public
538
538
539 Clone as non publishing
539 Clone as non publishing
540
540
541 $ cat << EOF >> server/.hg/hgrc
541 $ cat << EOF >> server/.hg/hgrc
542 > [phases]
542 > [phases]
543 > publish = False
543 > publish = False
544 > EOF
544 > EOF
545 $ killdaemons.py
545 $ killdaemons.py
546 $ hg -R server serve -p $HGPORT -d --pid-file=hg.pid
546 $ hg -R server serve -p $HGPORT -d --pid-file=hg.pid
547 $ cat hg.pid > $DAEMON_PIDS
547 $ cat hg.pid > $DAEMON_PIDS
548
548
549 #if stream-legacy
549 #if stream-legacy
550
550
551 With v1 of the stream protocol, changeset are always cloned as public. It make
551 With v1 of the stream protocol, changeset are always cloned as public. It make
552 stream v1 unsuitable for non-publishing repository.
552 stream v1 unsuitable for non-publishing repository.
553
553
554 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
554 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
555 streaming all changes
555 streaming all changes
556 1027 files to transfer, 96.3 KB of data (no-zstd !)
556 1027 files to transfer, 96.3 KB of data (no-zstd !)
557 transferred 96.3 KB in * seconds (* */sec) (glob) (no-zstd !)
557 transferred 96.3 KB in * seconds (* */sec) (glob) (no-zstd !)
558 1027 files to transfer, 93.5 KB of data (zstd !)
558 1027 files to transfer, 93.5 KB of data (zstd !)
559 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
559 transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !)
560 searching for changes
560 searching for changes
561 no changes found
561 no changes found
562 updating to branch default
562 updating to branch default
563 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
563 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
564 $ hg -R phase-no-publish phase -r 'all()'
564 $ hg -R phase-no-publish phase -r 'all()'
565 0: public
565 0: public
566 1: public
566 1: public
567 #endif
567 #endif
568 #if stream-bundle2
568 #if stream-bundle2
569 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
569 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
570 streaming all changes
570 streaming all changes
571 1034 files to transfer, 96.7 KB of data (no-zstd !)
571 1034 files to transfer, 96.7 KB of data (no-zstd !)
572 transferred 96.7 KB in * seconds (* */sec) (glob) (no-zstd !)
572 transferred 96.7 KB in * seconds (* */sec) (glob) (no-zstd !)
573 1034 files to transfer, 93.9 KB of data (zstd !)
573 1034 files to transfer, 93.9 KB of data (zstd !)
574 transferred 93.9 KB in * seconds (* */sec) (glob) (zstd !)
574 transferred 93.9 KB in * seconds (* */sec) (glob) (zstd !)
575 updating to branch default
575 updating to branch default
576 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
576 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved
577 $ hg -R phase-no-publish phase -r 'all()'
577 $ hg -R phase-no-publish phase -r 'all()'
578 0: draft
578 0: draft
579 1: draft
579 1: draft
580 #endif
580 #endif
581
581
582 $ killdaemons.py
582 $ killdaemons.py
583
583
584 #if stream-legacy
584 #if stream-legacy
585
585
586 With v1 of the stream protocol, changeset are always cloned as public. There's
586 With v1 of the stream protocol, changeset are always cloned as public. There's
587 no obsolescence markers exchange in stream v1.
587 no obsolescence markers exchange in stream v1.
588
588
589 #endif
589 #endif
590 #if stream-bundle2
590 #if stream-bundle2
591
591
592 Stream repository with obsolescence
592 Stream repository with obsolescence
593 -----------------------------------
593 -----------------------------------
594
594
595 Clone non-publishing with obsolescence
595 Clone non-publishing with obsolescence
596
596
597 $ cat >> $HGRCPATH << EOF
597 $ cat >> $HGRCPATH << EOF
598 > [experimental]
598 > [experimental]
599 > evolution=all
599 > evolution=all
600 > EOF
600 > EOF
601
601
602 $ cd server
602 $ cd server
603 $ echo foo > foo
603 $ echo foo > foo
604 $ hg -q commit -m 'about to be pruned'
604 $ hg -q commit -m 'about to be pruned'
605 $ hg debugobsolete `hg log -r . -T '{node}'` -d '0 0' -u test --record-parents
605 $ hg debugobsolete `hg log -r . -T '{node}'` -d '0 0' -u test --record-parents
606 1 new obsolescence markers
606 1 new obsolescence markers
607 obsoleted 1 changesets
607 obsoleted 1 changesets
608 $ hg up null -q
608 $ hg up null -q
609 $ hg log -T '{rev}: {phase}\n'
609 $ hg log -T '{rev}: {phase}\n'
610 1: draft
610 1: draft
611 0: draft
611 0: draft
612 $ hg serve -p $HGPORT -d --pid-file=hg.pid
612 $ hg serve -p $HGPORT -d --pid-file=hg.pid
613 $ cat hg.pid > $DAEMON_PIDS
613 $ cat hg.pid > $DAEMON_PIDS
614 $ cd ..
614 $ cd ..
615
615
616 $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence
616 $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence
617 streaming all changes
617 streaming all changes
618 1035 files to transfer, 97.1 KB of data (no-zstd !)
618 1035 files to transfer, 97.1 KB of data (no-zstd !)
619 transferred 97.1 KB in * seconds (* */sec) (glob) (no-zstd !)
619 transferred 97.1 KB in * seconds (* */sec) (glob) (no-zstd !)
620 1035 files to transfer, 94.3 KB of data (zstd !)
620 1035 files to transfer, 94.3 KB of data (zstd !)
621 transferred 94.3 KB in * seconds (* */sec) (glob) (zstd !)
621 transferred 94.3 KB in * seconds (* */sec) (glob) (zstd !)
622 $ hg -R with-obsolescence log -T '{rev}: {phase}\n'
622 $ hg -R with-obsolescence log -T '{rev}: {phase}\n'
623 1: draft
623 1: draft
624 0: draft
624 0: draft
625 $ hg debugobsolete -R with-obsolescence
625 $ hg debugobsolete -R with-obsolescence
626 50382b884f66690b7045cac93a540cba4d4c906f 0 {c17445101a72edac06facd130d14808dfbd5c7c2} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
626 50382b884f66690b7045cac93a540cba4d4c906f 0 {c17445101a72edac06facd130d14808dfbd5c7c2} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
627
627
628 $ hg clone -U --stream --config experimental.evolution=0 http://localhost:$HGPORT with-obsolescence-no-evolution
628 $ hg clone -U --stream --config experimental.evolution=0 http://localhost:$HGPORT with-obsolescence-no-evolution
629 streaming all changes
629 streaming all changes
630 remote: abort: server has obsolescence markers, but client cannot receive them via stream clone
630 remote: abort: server has obsolescence markers, but client cannot receive them via stream clone
631 abort: pull failed on remote
631 abort: pull failed on remote
632 [255]
632 [100]
633
633
634 $ killdaemons.py
634 $ killdaemons.py
635
635
636 #endif
636 #endif
@@ -1,617 +1,617 b''
1 #require serve
1 #require serve
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo foo>foo
5 $ echo foo>foo
6 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
6 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
7 $ echo foo>foo.d/foo
7 $ echo foo>foo.d/foo
8 $ echo bar>foo.d/bAr.hg.d/BaR
8 $ echo bar>foo.d/bAr.hg.d/BaR
9 $ echo bar>foo.d/baR.d.hg/bAR
9 $ echo bar>foo.d/baR.d.hg/bAR
10 $ hg commit -A -m 1
10 $ hg commit -A -m 1
11 adding foo
11 adding foo
12 adding foo.d/bAr.hg.d/BaR
12 adding foo.d/bAr.hg.d/BaR
13 adding foo.d/baR.d.hg/bAR
13 adding foo.d/baR.d.hg/bAR
14 adding foo.d/foo
14 adding foo.d/foo
15 $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log
15 $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log
16 $ hg serve --config server.uncompressed=False -p $HGPORT1 -d --pid-file=../hg2.pid
16 $ hg serve --config server.uncompressed=False -p $HGPORT1 -d --pid-file=../hg2.pid
17
17
18 Test server address cannot be reused
18 Test server address cannot be reused
19
19
20 $ hg serve -p $HGPORT1 2>&1
20 $ hg serve -p $HGPORT1 2>&1
21 abort: cannot start server at 'localhost:$HGPORT1': $EADDRINUSE$
21 abort: cannot start server at 'localhost:$HGPORT1': $EADDRINUSE$
22 [255]
22 [255]
23
23
24 $ cd ..
24 $ cd ..
25 $ cat hg1.pid hg2.pid >> $DAEMON_PIDS
25 $ cat hg1.pid hg2.pid >> $DAEMON_PIDS
26
26
27 clone via stream
27 clone via stream
28
28
29 #if no-reposimplestore
29 #if no-reposimplestore
30 $ hg clone --stream http://localhost:$HGPORT/ copy 2>&1
30 $ hg clone --stream http://localhost:$HGPORT/ copy 2>&1
31 streaming all changes
31 streaming all changes
32 9 files to transfer, 715 bytes of data (no-zstd !)
32 9 files to transfer, 715 bytes of data (no-zstd !)
33 9 files to transfer, 717 bytes of data (zstd !)
33 9 files to transfer, 717 bytes of data (zstd !)
34 transferred * bytes in * seconds (*/sec) (glob)
34 transferred * bytes in * seconds (*/sec) (glob)
35 updating to branch default
35 updating to branch default
36 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
36 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
37 $ hg verify -R copy
37 $ hg verify -R copy
38 checking changesets
38 checking changesets
39 checking manifests
39 checking manifests
40 crosschecking files in changesets and manifests
40 crosschecking files in changesets and manifests
41 checking files
41 checking files
42 checked 1 changesets with 4 changes to 4 files
42 checked 1 changesets with 4 changes to 4 files
43 #endif
43 #endif
44
44
45 try to clone via stream, should use pull instead
45 try to clone via stream, should use pull instead
46
46
47 $ hg clone --stream http://localhost:$HGPORT1/ copy2
47 $ hg clone --stream http://localhost:$HGPORT1/ copy2
48 warning: stream clone requested but server has them disabled
48 warning: stream clone requested but server has them disabled
49 requesting all changes
49 requesting all changes
50 adding changesets
50 adding changesets
51 adding manifests
51 adding manifests
52 adding file changes
52 adding file changes
53 added 1 changesets with 4 changes to 4 files
53 added 1 changesets with 4 changes to 4 files
54 new changesets 8b6053c928fe
54 new changesets 8b6053c928fe
55 updating to branch default
55 updating to branch default
56 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
57
57
58 try to clone via stream but missing requirements, so should use pull instead
58 try to clone via stream but missing requirements, so should use pull instead
59
59
60 $ cat > $TESTTMP/removesupportedformat.py << EOF
60 $ cat > $TESTTMP/removesupportedformat.py << EOF
61 > from mercurial import localrepo
61 > from mercurial import localrepo
62 > def extsetup(ui):
62 > def extsetup(ui):
63 > localrepo.localrepository.supportedformats.remove(b'generaldelta')
63 > localrepo.localrepository.supportedformats.remove(b'generaldelta')
64 > EOF
64 > EOF
65
65
66 $ hg clone --config extensions.rsf=$TESTTMP/removesupportedformat.py --stream http://localhost:$HGPORT/ copy3
66 $ hg clone --config extensions.rsf=$TESTTMP/removesupportedformat.py --stream http://localhost:$HGPORT/ copy3
67 warning: stream clone requested but client is missing requirements: generaldelta
67 warning: stream clone requested but client is missing requirements: generaldelta
68 (see https://www.mercurial-scm.org/wiki/MissingRequirement for more information)
68 (see https://www.mercurial-scm.org/wiki/MissingRequirement for more information)
69 requesting all changes
69 requesting all changes
70 adding changesets
70 adding changesets
71 adding manifests
71 adding manifests
72 adding file changes
72 adding file changes
73 added 1 changesets with 4 changes to 4 files
73 added 1 changesets with 4 changes to 4 files
74 new changesets 8b6053c928fe
74 new changesets 8b6053c928fe
75 updating to branch default
75 updating to branch default
76 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
77
77
78 clone via pull
78 clone via pull
79
79
80 $ hg clone http://localhost:$HGPORT1/ copy-pull
80 $ hg clone http://localhost:$HGPORT1/ copy-pull
81 requesting all changes
81 requesting all changes
82 adding changesets
82 adding changesets
83 adding manifests
83 adding manifests
84 adding file changes
84 adding file changes
85 added 1 changesets with 4 changes to 4 files
85 added 1 changesets with 4 changes to 4 files
86 new changesets 8b6053c928fe
86 new changesets 8b6053c928fe
87 updating to branch default
87 updating to branch default
88 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
88 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
89 $ hg verify -R copy-pull
89 $ hg verify -R copy-pull
90 checking changesets
90 checking changesets
91 checking manifests
91 checking manifests
92 crosschecking files in changesets and manifests
92 crosschecking files in changesets and manifests
93 checking files
93 checking files
94 checked 1 changesets with 4 changes to 4 files
94 checked 1 changesets with 4 changes to 4 files
95 $ cd test
95 $ cd test
96 $ echo bar > bar
96 $ echo bar > bar
97 $ hg commit -A -d '1 0' -m 2
97 $ hg commit -A -d '1 0' -m 2
98 adding bar
98 adding bar
99 $ cd ..
99 $ cd ..
100
100
101 clone over http with --update
101 clone over http with --update
102
102
103 $ hg clone http://localhost:$HGPORT1/ updated --update 0
103 $ hg clone http://localhost:$HGPORT1/ updated --update 0
104 requesting all changes
104 requesting all changes
105 adding changesets
105 adding changesets
106 adding manifests
106 adding manifests
107 adding file changes
107 adding file changes
108 added 2 changesets with 5 changes to 5 files
108 added 2 changesets with 5 changes to 5 files
109 new changesets 8b6053c928fe:5fed3813f7f5
109 new changesets 8b6053c928fe:5fed3813f7f5
110 updating to branch default
110 updating to branch default
111 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
112 $ hg log -r . -R updated
112 $ hg log -r . -R updated
113 changeset: 0:8b6053c928fe
113 changeset: 0:8b6053c928fe
114 user: test
114 user: test
115 date: Thu Jan 01 00:00:00 1970 +0000
115 date: Thu Jan 01 00:00:00 1970 +0000
116 summary: 1
116 summary: 1
117
117
118 $ rm -rf updated
118 $ rm -rf updated
119
119
120 incoming via HTTP
120 incoming via HTTP
121
121
122 $ hg clone http://localhost:$HGPORT1/ --rev 0 partial
122 $ hg clone http://localhost:$HGPORT1/ --rev 0 partial
123 adding changesets
123 adding changesets
124 adding manifests
124 adding manifests
125 adding file changes
125 adding file changes
126 added 1 changesets with 4 changes to 4 files
126 added 1 changesets with 4 changes to 4 files
127 new changesets 8b6053c928fe
127 new changesets 8b6053c928fe
128 updating to branch default
128 updating to branch default
129 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
129 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 $ cd partial
130 $ cd partial
131 $ touch LOCAL
131 $ touch LOCAL
132 $ hg ci -qAm LOCAL
132 $ hg ci -qAm LOCAL
133 $ hg incoming http://localhost:$HGPORT1/ --template '{desc}\n'
133 $ hg incoming http://localhost:$HGPORT1/ --template '{desc}\n'
134 comparing with http://localhost:$HGPORT1/
134 comparing with http://localhost:$HGPORT1/
135 searching for changes
135 searching for changes
136 2
136 2
137 $ cd ..
137 $ cd ..
138
138
139 pull
139 pull
140
140
141 $ cd copy-pull
141 $ cd copy-pull
142 $ cat >> .hg/hgrc <<EOF
142 $ cat >> .hg/hgrc <<EOF
143 > [hooks]
143 > [hooks]
144 > changegroup = sh -c "printenv.py --line changegroup"
144 > changegroup = sh -c "printenv.py --line changegroup"
145 > EOF
145 > EOF
146 $ hg pull
146 $ hg pull
147 pulling from http://localhost:$HGPORT1/
147 pulling from http://localhost:$HGPORT1/
148 searching for changes
148 searching for changes
149 adding changesets
149 adding changesets
150 adding manifests
150 adding manifests
151 adding file changes
151 adding file changes
152 added 1 changesets with 1 changes to 1 files
152 added 1 changesets with 1 changes to 1 files
153 new changesets 5fed3813f7f5
153 new changesets 5fed3813f7f5
154 changegroup hook: HG_HOOKNAME=changegroup
154 changegroup hook: HG_HOOKNAME=changegroup
155 HG_HOOKTYPE=changegroup
155 HG_HOOKTYPE=changegroup
156 HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d
156 HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d
157 HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d
157 HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d
158 HG_SOURCE=pull
158 HG_SOURCE=pull
159 HG_TXNID=TXN:$ID$
159 HG_TXNID=TXN:$ID$
160 HG_TXNNAME=pull
160 HG_TXNNAME=pull
161 http://localhost:$HGPORT1/
161 http://localhost:$HGPORT1/
162 HG_URL=http://localhost:$HGPORT1/
162 HG_URL=http://localhost:$HGPORT1/
163
163
164 (run 'hg update' to get a working copy)
164 (run 'hg update' to get a working copy)
165 $ cd ..
165 $ cd ..
166
166
167 clone from invalid URL
167 clone from invalid URL
168
168
169 $ hg clone http://localhost:$HGPORT/bad
169 $ hg clone http://localhost:$HGPORT/bad
170 abort: HTTP Error 404: Not Found
170 abort: HTTP Error 404: Not Found
171 [100]
171 [100]
172
172
173 test http authentication
173 test http authentication
174 + use the same server to test server side streaming preference
174 + use the same server to test server side streaming preference
175
175
176 $ cd test
176 $ cd test
177
177
178 $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -p $HGPORT2 -d \
178 $ hg serve --config extensions.x=$TESTDIR/httpserverauth.py -p $HGPORT2 -d \
179 > --pid-file=pid --config server.preferuncompressed=True -E ../errors2.log \
179 > --pid-file=pid --config server.preferuncompressed=True -E ../errors2.log \
180 > --config web.push_ssl=False --config web.allow_push=* -A ../access.log
180 > --config web.push_ssl=False --config web.allow_push=* -A ../access.log
181 $ cat pid >> $DAEMON_PIDS
181 $ cat pid >> $DAEMON_PIDS
182
182
183 $ cat << EOF > get_pass.py
183 $ cat << EOF > get_pass.py
184 > import getpass
184 > import getpass
185 > def newgetpass(arg):
185 > def newgetpass(arg):
186 > return "pass"
186 > return "pass"
187 > getpass.getpass = newgetpass
187 > getpass.getpass = newgetpass
188 > EOF
188 > EOF
189
189
190 $ hg id http://localhost:$HGPORT2/
190 $ hg id http://localhost:$HGPORT2/
191 abort: http authorization required for http://localhost:$HGPORT2/
191 abort: http authorization required for http://localhost:$HGPORT2/
192 [255]
192 [255]
193 $ hg id http://localhost:$HGPORT2/
193 $ hg id http://localhost:$HGPORT2/
194 abort: http authorization required for http://localhost:$HGPORT2/
194 abort: http authorization required for http://localhost:$HGPORT2/
195 [255]
195 [255]
196 $ hg id --config ui.interactive=true --debug http://localhost:$HGPORT2/
196 $ hg id --config ui.interactive=true --debug http://localhost:$HGPORT2/
197 using http://localhost:$HGPORT2/
197 using http://localhost:$HGPORT2/
198 sending capabilities command
198 sending capabilities command
199 http authorization required for http://localhost:$HGPORT2/
199 http authorization required for http://localhost:$HGPORT2/
200 realm: mercurial
200 realm: mercurial
201 user: abort: response expected
201 user: abort: response expected
202 [255]
202 [255]
203 $ cat <<'EOF' | hg id --config ui.interactive=true --config ui.nontty=true --debug http://localhost:$HGPORT2/
203 $ cat <<'EOF' | hg id --config ui.interactive=true --config ui.nontty=true --debug http://localhost:$HGPORT2/
204 >
204 >
205 > EOF
205 > EOF
206 using http://localhost:$HGPORT2/
206 using http://localhost:$HGPORT2/
207 sending capabilities command
207 sending capabilities command
208 http authorization required for http://localhost:$HGPORT2/
208 http authorization required for http://localhost:$HGPORT2/
209 realm: mercurial
209 realm: mercurial
210 user:
210 user:
211 password: abort: response expected
211 password: abort: response expected
212 [255]
212 [255]
213 $ cat <<'EOF' | hg id --config ui.interactive=true --config ui.nontty=true --debug http://localhost:$HGPORT2/
213 $ cat <<'EOF' | hg id --config ui.interactive=true --config ui.nontty=true --debug http://localhost:$HGPORT2/
214 >
214 >
215 >
215 >
216 > EOF
216 > EOF
217 using http://localhost:$HGPORT2/
217 using http://localhost:$HGPORT2/
218 sending capabilities command
218 sending capabilities command
219 http authorization required for http://localhost:$HGPORT2/
219 http authorization required for http://localhost:$HGPORT2/
220 realm: mercurial
220 realm: mercurial
221 user:
221 user:
222 password: abort: authorization failed
222 password: abort: authorization failed
223 [255]
223 [255]
224 $ hg id --config ui.interactive=true --config extensions.getpass=get_pass.py http://user@localhost:$HGPORT2/
224 $ hg id --config ui.interactive=true --config extensions.getpass=get_pass.py http://user@localhost:$HGPORT2/
225 http authorization required for http://localhost:$HGPORT2/
225 http authorization required for http://localhost:$HGPORT2/
226 realm: mercurial
226 realm: mercurial
227 user: user
227 user: user
228 password: 5fed3813f7f5
228 password: 5fed3813f7f5
229 $ hg id http://user:pass@localhost:$HGPORT2/
229 $ hg id http://user:pass@localhost:$HGPORT2/
230 5fed3813f7f5
230 5fed3813f7f5
231 $ echo '[auth]' >> .hg/hgrc
231 $ echo '[auth]' >> .hg/hgrc
232 $ echo 'l.schemes=http' >> .hg/hgrc
232 $ echo 'l.schemes=http' >> .hg/hgrc
233 $ echo 'l.prefix=lo' >> .hg/hgrc
233 $ echo 'l.prefix=lo' >> .hg/hgrc
234 $ echo 'l.username=user' >> .hg/hgrc
234 $ echo 'l.username=user' >> .hg/hgrc
235 $ echo 'l.password=pass' >> .hg/hgrc
235 $ echo 'l.password=pass' >> .hg/hgrc
236 $ hg id http://localhost:$HGPORT2/
236 $ hg id http://localhost:$HGPORT2/
237 5fed3813f7f5
237 5fed3813f7f5
238 $ hg id http://localhost:$HGPORT2/
238 $ hg id http://localhost:$HGPORT2/
239 5fed3813f7f5
239 5fed3813f7f5
240 $ hg id http://user@localhost:$HGPORT2/
240 $ hg id http://user@localhost:$HGPORT2/
241 5fed3813f7f5
241 5fed3813f7f5
242
242
243 $ cat > use_digests.py << EOF
243 $ cat > use_digests.py << EOF
244 > from mercurial import (
244 > from mercurial import (
245 > exthelper,
245 > exthelper,
246 > url,
246 > url,
247 > )
247 > )
248 >
248 >
249 > eh = exthelper.exthelper()
249 > eh = exthelper.exthelper()
250 > uisetup = eh.finaluisetup
250 > uisetup = eh.finaluisetup
251 >
251 >
252 > @eh.wrapfunction(url, 'opener')
252 > @eh.wrapfunction(url, 'opener')
253 > def urlopener(orig, *args, **kwargs):
253 > def urlopener(orig, *args, **kwargs):
254 > opener = orig(*args, **kwargs)
254 > opener = orig(*args, **kwargs)
255 > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest'))
255 > opener.addheaders.append((r'X-HgTest-AuthType', r'Digest'))
256 > return opener
256 > return opener
257 > EOF
257 > EOF
258
258
259 $ hg id http://localhost:$HGPORT2/ --config extensions.x=use_digests.py
259 $ hg id http://localhost:$HGPORT2/ --config extensions.x=use_digests.py
260 5fed3813f7f5
260 5fed3813f7f5
261
261
262 #if no-reposimplestore
262 #if no-reposimplestore
263 $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
263 $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
264 streaming all changes
264 streaming all changes
265 10 files to transfer, 1.01 KB of data
265 10 files to transfer, 1.01 KB of data
266 transferred * KB in * seconds (*/sec) (glob)
266 transferred * KB in * seconds (*/sec) (glob)
267 updating to branch default
267 updating to branch default
268 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
268 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
269 #endif
269 #endif
270
270
271 --pull should override server's preferuncompressed
271 --pull should override server's preferuncompressed
272 $ hg clone --pull http://user:pass@localhost:$HGPORT2/ dest-pull 2>&1
272 $ hg clone --pull http://user:pass@localhost:$HGPORT2/ dest-pull 2>&1
273 requesting all changes
273 requesting all changes
274 adding changesets
274 adding changesets
275 adding manifests
275 adding manifests
276 adding file changes
276 adding file changes
277 added 2 changesets with 5 changes to 5 files
277 added 2 changesets with 5 changes to 5 files
278 new changesets 8b6053c928fe:5fed3813f7f5
278 new changesets 8b6053c928fe:5fed3813f7f5
279 updating to branch default
279 updating to branch default
280 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
280 5 files updated, 0 files merged, 0 files removed, 0 files unresolved
281
281
282 $ hg id http://user2@localhost:$HGPORT2/
282 $ hg id http://user2@localhost:$HGPORT2/
283 abort: http authorization required for http://localhost:$HGPORT2/
283 abort: http authorization required for http://localhost:$HGPORT2/
284 [255]
284 [255]
285 $ hg id http://user:pass2@localhost:$HGPORT2/
285 $ hg id http://user:pass2@localhost:$HGPORT2/
286 abort: HTTP Error 403: no
286 abort: HTTP Error 403: no
287 [100]
287 [100]
288
288
289 $ hg -R dest-pull tag -r tip top
289 $ hg -R dest-pull tag -r tip top
290 $ hg -R dest-pull push http://user:pass@localhost:$HGPORT2/
290 $ hg -R dest-pull push http://user:pass@localhost:$HGPORT2/
291 pushing to http://user:***@localhost:$HGPORT2/
291 pushing to http://user:***@localhost:$HGPORT2/
292 searching for changes
292 searching for changes
293 remote: adding changesets
293 remote: adding changesets
294 remote: adding manifests
294 remote: adding manifests
295 remote: adding file changes
295 remote: adding file changes
296 remote: added 1 changesets with 1 changes to 1 files
296 remote: added 1 changesets with 1 changes to 1 files
297 $ hg rollback -q
297 $ hg rollback -q
298 $ hg -R dest-pull push http://user:pass@localhost:$HGPORT2/ --debug --config devel.debug.peer-request=yes
298 $ hg -R dest-pull push http://user:pass@localhost:$HGPORT2/ --debug --config devel.debug.peer-request=yes
299 pushing to http://user:***@localhost:$HGPORT2/
299 pushing to http://user:***@localhost:$HGPORT2/
300 using http://localhost:$HGPORT2/
300 using http://localhost:$HGPORT2/
301 http auth: user user, password ****
301 http auth: user user, password ****
302 sending capabilities command
302 sending capabilities command
303 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=capabilities
303 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=capabilities
304 http auth: user user, password ****
304 http auth: user user, password ****
305 devel-peer-request: finished in *.???? seconds (200) (glob)
305 devel-peer-request: finished in *.???? seconds (200) (glob)
306 query 1; heads
306 query 1; heads
307 devel-peer-request: batched-content
307 devel-peer-request: batched-content
308 devel-peer-request: - heads (0 arguments)
308 devel-peer-request: - heads (0 arguments)
309 devel-peer-request: - known (1 arguments)
309 devel-peer-request: - known (1 arguments)
310 sending batch command
310 sending batch command
311 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=batch
311 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=batch
312 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
312 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
313 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
313 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
314 devel-peer-request: 68 bytes of commands arguments in headers
314 devel-peer-request: 68 bytes of commands arguments in headers
315 devel-peer-request: finished in *.???? seconds (200) (glob)
315 devel-peer-request: finished in *.???? seconds (200) (glob)
316 searching for changes
316 searching for changes
317 all remote heads known locally
317 all remote heads known locally
318 preparing listkeys for "phases"
318 preparing listkeys for "phases"
319 sending listkeys command
319 sending listkeys command
320 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
320 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
321 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
321 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
322 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
322 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
323 devel-peer-request: 16 bytes of commands arguments in headers
323 devel-peer-request: 16 bytes of commands arguments in headers
324 devel-peer-request: finished in *.???? seconds (200) (glob)
324 devel-peer-request: finished in *.???? seconds (200) (glob)
325 received listkey for "phases": 58 bytes
325 received listkey for "phases": 58 bytes
326 checking for updated bookmarks
326 checking for updated bookmarks
327 preparing listkeys for "bookmarks"
327 preparing listkeys for "bookmarks"
328 sending listkeys command
328 sending listkeys command
329 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
329 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
330 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
330 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
331 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
331 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
332 devel-peer-request: 19 bytes of commands arguments in headers
332 devel-peer-request: 19 bytes of commands arguments in headers
333 devel-peer-request: finished in *.???? seconds (200) (glob)
333 devel-peer-request: finished in *.???? seconds (200) (glob)
334 received listkey for "bookmarks": 0 bytes
334 received listkey for "bookmarks": 0 bytes
335 sending branchmap command
335 sending branchmap command
336 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=branchmap
336 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=branchmap
337 devel-peer-request: Vary X-HgProto-1
337 devel-peer-request: Vary X-HgProto-1
338 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
338 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
339 devel-peer-request: finished in *.???? seconds (200) (glob)
339 devel-peer-request: finished in *.???? seconds (200) (glob)
340 preparing listkeys for "bookmarks"
340 preparing listkeys for "bookmarks"
341 sending listkeys command
341 sending listkeys command
342 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
342 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
343 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
343 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
344 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
344 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
345 devel-peer-request: 19 bytes of commands arguments in headers
345 devel-peer-request: 19 bytes of commands arguments in headers
346 devel-peer-request: finished in *.???? seconds (200) (glob)
346 devel-peer-request: finished in *.???? seconds (200) (glob)
347 received listkey for "bookmarks": 0 bytes
347 received listkey for "bookmarks": 0 bytes
348 1 changesets found
348 1 changesets found
349 list of changesets:
349 list of changesets:
350 7f4e523d01f2cc3765ac8934da3d14db775ff872
350 7f4e523d01f2cc3765ac8934da3d14db775ff872
351 bundle2-output-bundle: "HG20", 5 parts total
351 bundle2-output-bundle: "HG20", 5 parts total
352 bundle2-output-part: "replycaps" 207 bytes payload
352 bundle2-output-part: "replycaps" 207 bytes payload
353 bundle2-output-part: "check:phases" 24 bytes payload
353 bundle2-output-part: "check:phases" 24 bytes payload
354 bundle2-output-part: "check:updated-heads" streamed payload
354 bundle2-output-part: "check:updated-heads" streamed payload
355 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
355 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
356 bundle2-output-part: "phase-heads" 24 bytes payload
356 bundle2-output-part: "phase-heads" 24 bytes payload
357 sending unbundle command
357 sending unbundle command
358 sending 1023 bytes
358 sending 1023 bytes
359 devel-peer-request: POST http://localhost:$HGPORT2/?cmd=unbundle
359 devel-peer-request: POST http://localhost:$HGPORT2/?cmd=unbundle
360 devel-peer-request: Content-length 1023
360 devel-peer-request: Content-length 1023
361 devel-peer-request: Content-type application/mercurial-0.1
361 devel-peer-request: Content-type application/mercurial-0.1
362 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
362 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
363 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
363 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
364 devel-peer-request: 16 bytes of commands arguments in headers
364 devel-peer-request: 16 bytes of commands arguments in headers
365 devel-peer-request: 1023 bytes of data
365 devel-peer-request: 1023 bytes of data
366 devel-peer-request: finished in *.???? seconds (200) (glob)
366 devel-peer-request: finished in *.???? seconds (200) (glob)
367 bundle2-input-bundle: no-transaction
367 bundle2-input-bundle: no-transaction
368 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
368 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
369 bundle2-input-part: "output" (advisory) (params: 0 advisory) supported
369 bundle2-input-part: "output" (advisory) (params: 0 advisory) supported
370 bundle2-input-part: total payload size 55
370 bundle2-input-part: total payload size 55
371 remote: adding changesets
371 remote: adding changesets
372 remote: adding manifests
372 remote: adding manifests
373 remote: adding file changes
373 remote: adding file changes
374 bundle2-input-part: "output" (advisory) supported
374 bundle2-input-part: "output" (advisory) supported
375 bundle2-input-part: total payload size 45
375 bundle2-input-part: total payload size 45
376 remote: added 1 changesets with 1 changes to 1 files
376 remote: added 1 changesets with 1 changes to 1 files
377 bundle2-input-bundle: 3 parts total
377 bundle2-input-bundle: 3 parts total
378 preparing listkeys for "phases"
378 preparing listkeys for "phases"
379 sending listkeys command
379 sending listkeys command
380 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
380 devel-peer-request: GET http://localhost:$HGPORT2/?cmd=listkeys
381 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
381 devel-peer-request: Vary X-HgArg-1,X-HgProto-1
382 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
382 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
383 devel-peer-request: 16 bytes of commands arguments in headers
383 devel-peer-request: 16 bytes of commands arguments in headers
384 devel-peer-request: finished in *.???? seconds (200) (glob)
384 devel-peer-request: finished in *.???? seconds (200) (glob)
385 received listkey for "phases": 15 bytes
385 received listkey for "phases": 15 bytes
386 (sent 9 HTTP requests and * bytes; received * bytes in responses) (glob) (?)
386 (sent 9 HTTP requests and * bytes; received * bytes in responses) (glob) (?)
387 $ hg rollback -q
387 $ hg rollback -q
388
388
389 $ sed 's/.*] "/"/' < ../access.log
389 $ sed 's/.*] "/"/' < ../access.log
390 "GET /?cmd=capabilities HTTP/1.1" 401 -
390 "GET /?cmd=capabilities HTTP/1.1" 401 -
391 "GET /?cmd=capabilities HTTP/1.1" 401 -
391 "GET /?cmd=capabilities HTTP/1.1" 401 -
392 "GET /?cmd=capabilities HTTP/1.1" 401 -
392 "GET /?cmd=capabilities HTTP/1.1" 401 -
393 "GET /?cmd=capabilities HTTP/1.1" 401 -
393 "GET /?cmd=capabilities HTTP/1.1" 401 -
394 "GET /?cmd=capabilities HTTP/1.1" 401 -
394 "GET /?cmd=capabilities HTTP/1.1" 401 -
395 "GET /?cmd=capabilities HTTP/1.1" 401 -
395 "GET /?cmd=capabilities HTTP/1.1" 401 -
396 "GET /?cmd=capabilities HTTP/1.1" 200 -
396 "GET /?cmd=capabilities HTTP/1.1" 200 -
397 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
397 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
398 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
398 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
399 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
399 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
400 "GET /?cmd=capabilities HTTP/1.1" 401 -
400 "GET /?cmd=capabilities HTTP/1.1" 401 -
401 "GET /?cmd=capabilities HTTP/1.1" 200 -
401 "GET /?cmd=capabilities HTTP/1.1" 200 -
402 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
402 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
403 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
403 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
404 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
404 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
405 "GET /?cmd=capabilities HTTP/1.1" 401 -
405 "GET /?cmd=capabilities HTTP/1.1" 401 -
406 "GET /?cmd=capabilities HTTP/1.1" 200 -
406 "GET /?cmd=capabilities HTTP/1.1" 200 -
407 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
407 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
408 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
408 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
409 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
409 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
410 "GET /?cmd=capabilities HTTP/1.1" 401 -
410 "GET /?cmd=capabilities HTTP/1.1" 401 -
411 "GET /?cmd=capabilities HTTP/1.1" 200 -
411 "GET /?cmd=capabilities HTTP/1.1" 200 -
412 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
412 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
413 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
413 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
414 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
414 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
415 "GET /?cmd=capabilities HTTP/1.1" 401 -
415 "GET /?cmd=capabilities HTTP/1.1" 401 -
416 "GET /?cmd=capabilities HTTP/1.1" 200 -
416 "GET /?cmd=capabilities HTTP/1.1" 200 -
417 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
417 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
418 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
418 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
419 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
419 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
420 "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest
420 "GET /?cmd=capabilities HTTP/1.1" 401 - x-hgtest-authtype:Digest
421 "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest
421 "GET /?cmd=capabilities HTTP/1.1" 200 - x-hgtest-authtype:Digest
422 "GET /?cmd=lookup HTTP/1.1" 401 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
422 "GET /?cmd=lookup HTTP/1.1" 401 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
423 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
423 "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
424 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
424 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
425 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
425 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
426 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
426 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
427 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
427 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull x-hgtest-authtype:Digest
428 "GET /?cmd=capabilities HTTP/1.1" 401 - (no-reposimplestore !)
428 "GET /?cmd=capabilities HTTP/1.1" 401 - (no-reposimplestore !)
429 "GET /?cmd=capabilities HTTP/1.1" 200 - (no-reposimplestore !)
429 "GET /?cmd=capabilities HTTP/1.1" 200 - (no-reposimplestore !)
430 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (no-reposimplestore !)
430 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (no-reposimplestore !)
431 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&stream=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (no-reposimplestore !)
431 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&stream=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (no-reposimplestore !)
432 "GET /?cmd=capabilities HTTP/1.1" 401 - (no-reposimplestore !)
432 "GET /?cmd=capabilities HTTP/1.1" 401 - (no-reposimplestore !)
433 "GET /?cmd=capabilities HTTP/1.1" 200 - (no-reposimplestore !)
433 "GET /?cmd=capabilities HTTP/1.1" 200 - (no-reposimplestore !)
434 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
434 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
435 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
435 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
436 "GET /?cmd=capabilities HTTP/1.1" 401 -
436 "GET /?cmd=capabilities HTTP/1.1" 401 -
437 "GET /?cmd=capabilities HTTP/1.1" 401 -
437 "GET /?cmd=capabilities HTTP/1.1" 401 -
438 "GET /?cmd=capabilities HTTP/1.1" 403 -
438 "GET /?cmd=capabilities HTTP/1.1" 403 -
439 "GET /?cmd=capabilities HTTP/1.1" 401 -
439 "GET /?cmd=capabilities HTTP/1.1" 401 -
440 "GET /?cmd=capabilities HTTP/1.1" 200 -
440 "GET /?cmd=capabilities HTTP/1.1" 200 -
441 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
441 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
442 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
442 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
443 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
443 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
444 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
444 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
445 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
445 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
446 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365* (glob)
446 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365* (glob)
447 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
447 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
448 "GET /?cmd=capabilities HTTP/1.1" 401 -
448 "GET /?cmd=capabilities HTTP/1.1" 401 -
449 "GET /?cmd=capabilities HTTP/1.1" 200 -
449 "GET /?cmd=capabilities HTTP/1.1" 200 -
450 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
450 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
451 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
451 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
452 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
452 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
453 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
453 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
454 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
454 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
455 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
455 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
456 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
456 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
457
457
458 $ cd ..
458 $ cd ..
459
459
460 clone of serve with repo in root and unserved subrepo (issue2970)
460 clone of serve with repo in root and unserved subrepo (issue2970)
461
461
462 $ hg --cwd test init sub
462 $ hg --cwd test init sub
463 $ echo empty > test/sub/empty
463 $ echo empty > test/sub/empty
464 $ hg --cwd test/sub add empty
464 $ hg --cwd test/sub add empty
465 $ hg --cwd test/sub commit -qm 'add empty'
465 $ hg --cwd test/sub commit -qm 'add empty'
466 $ hg --cwd test/sub tag -r 0 something
466 $ hg --cwd test/sub tag -r 0 something
467 $ echo sub = sub > test/.hgsub
467 $ echo sub = sub > test/.hgsub
468 $ hg --cwd test add .hgsub
468 $ hg --cwd test add .hgsub
469 $ hg --cwd test commit -qm 'add subrepo'
469 $ hg --cwd test commit -qm 'add subrepo'
470 $ hg clone http://localhost:$HGPORT noslash-clone
470 $ hg clone http://localhost:$HGPORT noslash-clone
471 requesting all changes
471 requesting all changes
472 adding changesets
472 adding changesets
473 adding manifests
473 adding manifests
474 adding file changes
474 adding file changes
475 added 3 changesets with 7 changes to 7 files
475 added 3 changesets with 7 changes to 7 files
476 new changesets 8b6053c928fe:56f9bc90cce6
476 new changesets 8b6053c928fe:56f9bc90cce6
477 updating to branch default
477 updating to branch default
478 cloning subrepo sub from http://localhost:$HGPORT/sub
478 cloning subrepo sub from http://localhost:$HGPORT/sub
479 abort: HTTP Error 404: Not Found
479 abort: HTTP Error 404: Not Found
480 [100]
480 [100]
481 $ hg clone http://localhost:$HGPORT/ slash-clone
481 $ hg clone http://localhost:$HGPORT/ slash-clone
482 requesting all changes
482 requesting all changes
483 adding changesets
483 adding changesets
484 adding manifests
484 adding manifests
485 adding file changes
485 adding file changes
486 added 3 changesets with 7 changes to 7 files
486 added 3 changesets with 7 changes to 7 files
487 new changesets 8b6053c928fe:56f9bc90cce6
487 new changesets 8b6053c928fe:56f9bc90cce6
488 updating to branch default
488 updating to branch default
489 cloning subrepo sub from http://localhost:$HGPORT/sub
489 cloning subrepo sub from http://localhost:$HGPORT/sub
490 abort: HTTP Error 404: Not Found
490 abort: HTTP Error 404: Not Found
491 [100]
491 [100]
492
492
493 check error log
493 check error log
494
494
495 $ cat error.log
495 $ cat error.log
496
496
497 $ cat errors2.log
497 $ cat errors2.log
498
498
499 check abort error reporting while pulling/cloning
499 check abort error reporting while pulling/cloning
500
500
501 $ $RUNTESTDIR/killdaemons.py
501 $ $RUNTESTDIR/killdaemons.py
502 $ hg serve -R test -p $HGPORT -d --pid-file=hg3.pid -E error.log --config extensions.crash=${TESTDIR}/crashgetbundler.py
502 $ hg serve -R test -p $HGPORT -d --pid-file=hg3.pid -E error.log --config extensions.crash=${TESTDIR}/crashgetbundler.py
503 $ cat hg3.pid >> $DAEMON_PIDS
503 $ cat hg3.pid >> $DAEMON_PIDS
504 $ hg clone http://localhost:$HGPORT/ abort-clone
504 $ hg clone http://localhost:$HGPORT/ abort-clone
505 requesting all changes
505 requesting all changes
506 remote: abort: this is an exercise
506 remote: abort: this is an exercise
507 abort: pull failed on remote
507 abort: pull failed on remote
508 [255]
508 [100]
509 $ cat error.log
509 $ cat error.log
510
510
511 disable pull-based clones
511 disable pull-based clones
512
512
513 $ hg serve -R test -p $HGPORT1 -d --pid-file=hg4.pid -E error.log --config server.disablefullbundle=True
513 $ hg serve -R test -p $HGPORT1 -d --pid-file=hg4.pid -E error.log --config server.disablefullbundle=True
514 $ cat hg4.pid >> $DAEMON_PIDS
514 $ cat hg4.pid >> $DAEMON_PIDS
515 $ hg clone http://localhost:$HGPORT1/ disable-pull-clone
515 $ hg clone http://localhost:$HGPORT1/ disable-pull-clone
516 requesting all changes
516 requesting all changes
517 remote: abort: server has pull-based clones disabled
517 remote: abort: server has pull-based clones disabled
518 abort: pull failed on remote
518 abort: pull failed on remote
519 (remove --pull if specified or upgrade Mercurial)
519 (remove --pull if specified or upgrade Mercurial)
520 [255]
520 [100]
521
521
522 #if no-reposimplestore
522 #if no-reposimplestore
523 ... but keep stream clones working
523 ... but keep stream clones working
524
524
525 $ hg clone --stream --noupdate http://localhost:$HGPORT1/ test-stream-clone
525 $ hg clone --stream --noupdate http://localhost:$HGPORT1/ test-stream-clone
526 streaming all changes
526 streaming all changes
527 * files to transfer, * of data (glob)
527 * files to transfer, * of data (glob)
528 transferred * in * seconds (*/sec) (glob)
528 transferred * in * seconds (*/sec) (glob)
529 $ cat error.log
529 $ cat error.log
530 #endif
530 #endif
531
531
532 ... and also keep partial clones and pulls working
532 ... and also keep partial clones and pulls working
533 $ hg clone http://localhost:$HGPORT1 --rev 0 test/partial/clone
533 $ hg clone http://localhost:$HGPORT1 --rev 0 test/partial/clone
534 adding changesets
534 adding changesets
535 adding manifests
535 adding manifests
536 adding file changes
536 adding file changes
537 added 1 changesets with 4 changes to 4 files
537 added 1 changesets with 4 changes to 4 files
538 new changesets 8b6053c928fe
538 new changesets 8b6053c928fe
539 updating to branch default
539 updating to branch default
540 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
540 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
541 $ hg pull -R test/partial/clone
541 $ hg pull -R test/partial/clone
542 pulling from http://localhost:$HGPORT1/
542 pulling from http://localhost:$HGPORT1/
543 searching for changes
543 searching for changes
544 adding changesets
544 adding changesets
545 adding manifests
545 adding manifests
546 adding file changes
546 adding file changes
547 added 2 changesets with 3 changes to 3 files
547 added 2 changesets with 3 changes to 3 files
548 new changesets 5fed3813f7f5:56f9bc90cce6
548 new changesets 5fed3813f7f5:56f9bc90cce6
549 (run 'hg update' to get a working copy)
549 (run 'hg update' to get a working copy)
550
550
551 $ hg clone -U -r 0 test/partial/clone test/another/clone
551 $ hg clone -U -r 0 test/partial/clone test/another/clone
552 adding changesets
552 adding changesets
553 adding manifests
553 adding manifests
554 adding file changes
554 adding file changes
555 added 1 changesets with 4 changes to 4 files
555 added 1 changesets with 4 changes to 4 files
556 new changesets 8b6053c928fe
556 new changesets 8b6053c928fe
557
557
558 corrupt cookies file should yield a warning
558 corrupt cookies file should yield a warning
559
559
560 $ cat > $TESTTMP/cookies.txt << EOF
560 $ cat > $TESTTMP/cookies.txt << EOF
561 > bad format
561 > bad format
562 > EOF
562 > EOF
563
563
564 $ hg --config auth.cookiefile=$TESTTMP/cookies.txt id http://localhost:$HGPORT/
564 $ hg --config auth.cookiefile=$TESTTMP/cookies.txt id http://localhost:$HGPORT/
565 (error loading cookie file $TESTTMP/cookies.txt: '*/cookies.txt' does not look like a Netscape format cookies file; continuing without cookies) (glob)
565 (error loading cookie file $TESTTMP/cookies.txt: '*/cookies.txt' does not look like a Netscape format cookies file; continuing without cookies) (glob)
566 56f9bc90cce6
566 56f9bc90cce6
567
567
568 $ killdaemons.py
568 $ killdaemons.py
569
569
570 Create dummy authentication handler that looks for cookies. It doesn't do anything
570 Create dummy authentication handler that looks for cookies. It doesn't do anything
571 useful. It just raises an HTTP 500 with details about the Cookie request header.
571 useful. It just raises an HTTP 500 with details about the Cookie request header.
572 We raise HTTP 500 because its message is printed in the abort message.
572 We raise HTTP 500 because its message is printed in the abort message.
573
573
574 $ cat > cookieauth.py << EOF
574 $ cat > cookieauth.py << EOF
575 > from mercurial import util
575 > from mercurial import util
576 > from mercurial.hgweb import common
576 > from mercurial.hgweb import common
577 > def perform_authentication(hgweb, req, op):
577 > def perform_authentication(hgweb, req, op):
578 > cookie = req.headers.get(b'Cookie')
578 > cookie = req.headers.get(b'Cookie')
579 > if not cookie:
579 > if not cookie:
580 > raise common.ErrorResponse(common.HTTP_SERVER_ERROR, b'no-cookie')
580 > raise common.ErrorResponse(common.HTTP_SERVER_ERROR, b'no-cookie')
581 > raise common.ErrorResponse(common.HTTP_SERVER_ERROR, b'Cookie: %s' % cookie)
581 > raise common.ErrorResponse(common.HTTP_SERVER_ERROR, b'Cookie: %s' % cookie)
582 > def extsetup(ui):
582 > def extsetup(ui):
583 > common.permhooks.insert(0, perform_authentication)
583 > common.permhooks.insert(0, perform_authentication)
584 > EOF
584 > EOF
585
585
586 $ hg serve --config extensions.cookieauth=cookieauth.py -R test -p $HGPORT -d --pid-file=pid
586 $ hg serve --config extensions.cookieauth=cookieauth.py -R test -p $HGPORT -d --pid-file=pid
587 $ cat pid > $DAEMON_PIDS
587 $ cat pid > $DAEMON_PIDS
588
588
589 Request without cookie sent should fail due to lack of cookie
589 Request without cookie sent should fail due to lack of cookie
590
590
591 $ hg id http://localhost:$HGPORT
591 $ hg id http://localhost:$HGPORT
592 abort: HTTP Error 500: no-cookie
592 abort: HTTP Error 500: no-cookie
593 [100]
593 [100]
594
594
595 Populate a cookies file
595 Populate a cookies file
596
596
597 $ cat > cookies.txt << EOF
597 $ cat > cookies.txt << EOF
598 > # HTTP Cookie File
598 > # HTTP Cookie File
599 > # Expiration is 2030-01-01 at midnight
599 > # Expiration is 2030-01-01 at midnight
600 > .example.com TRUE / FALSE 1893456000 hgkey examplevalue
600 > .example.com TRUE / FALSE 1893456000 hgkey examplevalue
601 > EOF
601 > EOF
602
602
603 Should not send a cookie for another domain
603 Should not send a cookie for another domain
604
604
605 $ hg --config auth.cookiefile=cookies.txt id http://localhost:$HGPORT/
605 $ hg --config auth.cookiefile=cookies.txt id http://localhost:$HGPORT/
606 abort: HTTP Error 500: no-cookie
606 abort: HTTP Error 500: no-cookie
607 [100]
607 [100]
608
608
609 Add a cookie entry for our test server and verify it is sent
609 Add a cookie entry for our test server and verify it is sent
610
610
611 $ cat >> cookies.txt << EOF
611 $ cat >> cookies.txt << EOF
612 > localhost.local FALSE / FALSE 1893456000 hgkey localhostvalue
612 > localhost.local FALSE / FALSE 1893456000 hgkey localhostvalue
613 > EOF
613 > EOF
614
614
615 $ hg --config auth.cookiefile=cookies.txt id http://localhost:$HGPORT/
615 $ hg --config auth.cookiefile=cookies.txt id http://localhost:$HGPORT/
616 abort: HTTP Error 500: Cookie: hgkey=localhostvalue
616 abort: HTTP Error 500: Cookie: hgkey=localhostvalue
617 [100]
617 [100]
@@ -1,694 +1,694 b''
1 #testcases lfsremote-on lfsremote-off
1 #testcases lfsremote-on lfsremote-off
2 #require serve no-reposimplestore no-chg
2 #require serve no-reposimplestore no-chg
3
3
4 This test splits `hg serve` with and without using the extension into separate
4 This test splits `hg serve` with and without using the extension into separate
5 tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
5 tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
6 indicates whether or not there are commits that use an LFS file, and "D"/"E"
6 indicates whether or not there are commits that use an LFS file, and "D"/"E"
7 indicates whether or not the extension is loaded. The "X" cases are not tested
7 indicates whether or not the extension is loaded. The "X" cases are not tested
8 individually, because the lfs requirement causes the process to bail early if
8 individually, because the lfs requirement causes the process to bail early if
9 the extension is disabled.
9 the extension is disabled.
10
10
11 . Server
11 . Server
12 .
12 .
13 . No-LFS LFS
13 . No-LFS LFS
14 . +----------------------------+
14 . +----------------------------+
15 . | || D | E | D | E |
15 . | || D | E | D | E |
16 . |---++=======================|
16 . |---++=======================|
17 . C | D || N/A | #1 | X | #4 |
17 . C | D || N/A | #1 | X | #4 |
18 . l No +---++-----------------------|
18 . l No +---++-----------------------|
19 . i LFS | E || #2 | #2 | X | #5 |
19 . i LFS | E || #2 | #2 | X | #5 |
20 . e +---++-----------------------|
20 . e +---++-----------------------|
21 . n | D || X | X | X | X |
21 . n | D || X | X | X | X |
22 . t LFS |---++-----------------------|
22 . t LFS |---++-----------------------|
23 . | E || #3 | #3 | X | #6 |
23 . | E || #3 | #3 | X | #6 |
24 . |---++-----------------------+
24 . |---++-----------------------+
25
25
26 make command server magic visible
26 make command server magic visible
27
27
28 #if windows
28 #if windows
29 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
29 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
30 #else
30 #else
31 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
31 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
32 #endif
32 #endif
33 $ export PYTHONPATH
33 $ export PYTHONPATH
34
34
35 $ hg init server
35 $ hg init server
36 $ SERVER_REQUIRES="$TESTTMP/server/.hg/requires"
36 $ SERVER_REQUIRES="$TESTTMP/server/.hg/requires"
37
37
38 $ cat > $TESTTMP/debugprocessors.py <<EOF
38 $ cat > $TESTTMP/debugprocessors.py <<EOF
39 > from mercurial import (
39 > from mercurial import (
40 > cmdutil,
40 > cmdutil,
41 > commands,
41 > commands,
42 > pycompat,
42 > pycompat,
43 > registrar,
43 > registrar,
44 > )
44 > )
45 > cmdtable = {}
45 > cmdtable = {}
46 > command = registrar.command(cmdtable)
46 > command = registrar.command(cmdtable)
47 > @command(b'debugprocessors', [], b'FILE')
47 > @command(b'debugprocessors', [], b'FILE')
48 > def debugprocessors(ui, repo, file_=None, **opts):
48 > def debugprocessors(ui, repo, file_=None, **opts):
49 > opts = pycompat.byteskwargs(opts)
49 > opts = pycompat.byteskwargs(opts)
50 > opts[b'changelog'] = False
50 > opts[b'changelog'] = False
51 > opts[b'manifest'] = False
51 > opts[b'manifest'] = False
52 > opts[b'dir'] = False
52 > opts[b'dir'] = False
53 > rl = cmdutil.openrevlog(repo, b'debugprocessors', file_, opts)
53 > rl = cmdutil.openrevlog(repo, b'debugprocessors', file_, opts)
54 > for flag, proc in rl._flagprocessors.items():
54 > for flag, proc in rl._flagprocessors.items():
55 > ui.status(b"registered processor '%#x'\n" % (flag))
55 > ui.status(b"registered processor '%#x'\n" % (flag))
56 > EOF
56 > EOF
57
57
58 Skip the experimental.changegroup3=True config. Failure to agree on this comes
58 Skip the experimental.changegroup3=True config. Failure to agree on this comes
59 first, and causes an "abort: no common changegroup version" if the extension is
59 first, and causes an "abort: no common changegroup version" if the extension is
60 only loaded on one side. If that *is* enabled, the subsequent failure is "abort:
60 only loaded on one side. If that *is* enabled, the subsequent failure is "abort:
61 missing processor for flag '0x2000'!" if the extension is only loaded on one side
61 missing processor for flag '0x2000'!" if the extension is only loaded on one side
62 (possibly also masked by the Internal Server Error message).
62 (possibly also masked by the Internal Server Error message).
63 $ cat >> $HGRCPATH <<EOF
63 $ cat >> $HGRCPATH <<EOF
64 > [extensions]
64 > [extensions]
65 > debugprocessors = $TESTTMP/debugprocessors.py
65 > debugprocessors = $TESTTMP/debugprocessors.py
66 > [experimental]
66 > [experimental]
67 > lfs.disableusercache = True
67 > lfs.disableusercache = True
68 > lfs.worker-enable = False
68 > lfs.worker-enable = False
69 > [lfs]
69 > [lfs]
70 > threshold=10
70 > threshold=10
71 > [web]
71 > [web]
72 > allow_push=*
72 > allow_push=*
73 > push_ssl=False
73 > push_ssl=False
74 > EOF
74 > EOF
75
75
76 $ cp $HGRCPATH $HGRCPATH.orig
76 $ cp $HGRCPATH $HGRCPATH.orig
77
77
78 #if lfsremote-on
78 #if lfsremote-on
79 $ hg --config extensions.lfs= -R server \
79 $ hg --config extensions.lfs= -R server \
80 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
80 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
81 #else
81 #else
82 $ hg --config extensions.lfs=! -R server \
82 $ hg --config extensions.lfs=! -R server \
83 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
83 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
84 #endif
84 #endif
85
85
86 $ cat hg.pid >> $DAEMON_PIDS
86 $ cat hg.pid >> $DAEMON_PIDS
87 $ hg clone -q http://localhost:$HGPORT client
87 $ hg clone -q http://localhost:$HGPORT client
88 $ grep 'lfs' client/.hg/requires $SERVER_REQUIRES
88 $ grep 'lfs' client/.hg/requires $SERVER_REQUIRES
89 [1]
89 [1]
90
90
91 This trivial repo will force commandserver to load the extension, but not call
91 This trivial repo will force commandserver to load the extension, but not call
92 reposetup() on another repo actually being operated on. This gives coverage
92 reposetup() on another repo actually being operated on. This gives coverage
93 that wrapper functions are not assuming reposetup() was called.
93 that wrapper functions are not assuming reposetup() was called.
94
94
95 $ hg init $TESTTMP/cmdservelfs
95 $ hg init $TESTTMP/cmdservelfs
96 $ cat >> $TESTTMP/cmdservelfs/.hg/hgrc << EOF
96 $ cat >> $TESTTMP/cmdservelfs/.hg/hgrc << EOF
97 > [extensions]
97 > [extensions]
98 > lfs =
98 > lfs =
99 > EOF
99 > EOF
100
100
101 --------------------------------------------------------------------------------
101 --------------------------------------------------------------------------------
102 Case #1: client with non-lfs content and the extension disabled; server with
102 Case #1: client with non-lfs content and the extension disabled; server with
103 non-lfs content, and the extension enabled.
103 non-lfs content, and the extension enabled.
104
104
105 $ cd client
105 $ cd client
106 $ echo 'non-lfs' > nonlfs.txt
106 $ echo 'non-lfs' > nonlfs.txt
107 >>> from __future__ import absolute_import
107 >>> from __future__ import absolute_import
108 >>> from hgclient import check, readchannel, runcommand
108 >>> from hgclient import check, readchannel, runcommand
109 >>> @check
109 >>> @check
110 ... def diff(server):
110 ... def diff(server):
111 ... readchannel(server)
111 ... readchannel(server)
112 ... # run an arbitrary command in the repo with the extension loaded
112 ... # run an arbitrary command in the repo with the extension loaded
113 ... runcommand(server, [b'id', b'-R', b'../cmdservelfs'])
113 ... runcommand(server, [b'id', b'-R', b'../cmdservelfs'])
114 ... # now run a command in a repo without the extension to ensure that
114 ... # now run a command in a repo without the extension to ensure that
115 ... # files are added safely..
115 ... # files are added safely..
116 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
116 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
117 ... # .. and that scmutil.prefetchfiles() safely no-ops..
117 ... # .. and that scmutil.prefetchfiles() safely no-ops..
118 ... runcommand(server, [b'diff', b'-r', b'.~1'])
118 ... runcommand(server, [b'diff', b'-r', b'.~1'])
119 ... # .. and that debugupgraderepo safely no-ops.
119 ... # .. and that debugupgraderepo safely no-ops.
120 ... runcommand(server, [b'debugupgraderepo', b'-q', b'--run'])
120 ... runcommand(server, [b'debugupgraderepo', b'-q', b'--run'])
121 *** runcommand id -R ../cmdservelfs
121 *** runcommand id -R ../cmdservelfs
122 000000000000 tip
122 000000000000 tip
123 *** runcommand ci -Aqm non-lfs
123 *** runcommand ci -Aqm non-lfs
124 *** runcommand diff -r .~1
124 *** runcommand diff -r .~1
125 diff -r 000000000000 nonlfs.txt
125 diff -r 000000000000 nonlfs.txt
126 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
126 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
127 +++ b/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
127 +++ b/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
128 @@ -0,0 +1,1 @@
128 @@ -0,0 +1,1 @@
129 +non-lfs
129 +non-lfs
130 *** runcommand debugupgraderepo -q --run
130 *** runcommand debugupgraderepo -q --run
131
131
132 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
132 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
133 [1]
133 [1]
134
134
135 #if lfsremote-on
135 #if lfsremote-on
136
136
137 $ hg push -q
137 $ hg push -q
138 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
138 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
139 [1]
139 [1]
140
140
141 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client1_clone
141 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client1_clone
142 $ grep 'lfs' $TESTTMP/client1_clone/.hg/requires $SERVER_REQUIRES
142 $ grep 'lfs' $TESTTMP/client1_clone/.hg/requires $SERVER_REQUIRES
143 [1]
143 [1]
144
144
145 $ hg init $TESTTMP/client1_pull
145 $ hg init $TESTTMP/client1_pull
146 $ hg -R $TESTTMP/client1_pull pull -q http://localhost:$HGPORT
146 $ hg -R $TESTTMP/client1_pull pull -q http://localhost:$HGPORT
147 $ grep 'lfs' $TESTTMP/client1_pull/.hg/requires $SERVER_REQUIRES
147 $ grep 'lfs' $TESTTMP/client1_pull/.hg/requires $SERVER_REQUIRES
148 [1]
148 [1]
149
149
150 $ hg identify http://localhost:$HGPORT
150 $ hg identify http://localhost:$HGPORT
151 d437e1d24fbd
151 d437e1d24fbd
152
152
153 #endif
153 #endif
154
154
155 --------------------------------------------------------------------------------
155 --------------------------------------------------------------------------------
156 Case #2: client with non-lfs content and the extension enabled; server with
156 Case #2: client with non-lfs content and the extension enabled; server with
157 non-lfs content, and the extension state controlled by #testcases.
157 non-lfs content, and the extension state controlled by #testcases.
158
158
159 $ cat >> $HGRCPATH <<EOF
159 $ cat >> $HGRCPATH <<EOF
160 > [extensions]
160 > [extensions]
161 > lfs =
161 > lfs =
162 > EOF
162 > EOF
163 $ echo 'non-lfs' > nonlfs2.txt
163 $ echo 'non-lfs' > nonlfs2.txt
164 $ hg ci -Aqm 'non-lfs file with lfs client'
164 $ hg ci -Aqm 'non-lfs file with lfs client'
165
165
166 Since no lfs content has been added yet, the push is allowed, even when the
166 Since no lfs content has been added yet, the push is allowed, even when the
167 extension is not enabled remotely.
167 extension is not enabled remotely.
168
168
169 $ hg push -q
169 $ hg push -q
170 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
170 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
171 [1]
171 [1]
172
172
173 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client2_clone
173 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client2_clone
174 $ grep 'lfs' $TESTTMP/client2_clone/.hg/requires $SERVER_REQUIRES
174 $ grep 'lfs' $TESTTMP/client2_clone/.hg/requires $SERVER_REQUIRES
175 [1]
175 [1]
176
176
177 $ hg init $TESTTMP/client2_pull
177 $ hg init $TESTTMP/client2_pull
178 $ hg -R $TESTTMP/client2_pull pull -q http://localhost:$HGPORT
178 $ hg -R $TESTTMP/client2_pull pull -q http://localhost:$HGPORT
179 $ grep 'lfs' $TESTTMP/client2_pull/.hg/requires $SERVER_REQUIRES
179 $ grep 'lfs' $TESTTMP/client2_pull/.hg/requires $SERVER_REQUIRES
180 [1]
180 [1]
181
181
182 $ hg identify http://localhost:$HGPORT
182 $ hg identify http://localhost:$HGPORT
183 1477875038c6
183 1477875038c6
184
184
185 --------------------------------------------------------------------------------
185 --------------------------------------------------------------------------------
186 Case #3: client with lfs content and the extension enabled; server with
186 Case #3: client with lfs content and the extension enabled; server with
187 non-lfs content, and the extension state controlled by #testcases. The server
187 non-lfs content, and the extension state controlled by #testcases. The server
188 should have an 'lfs' requirement after it picks up its first commit with a blob.
188 should have an 'lfs' requirement after it picks up its first commit with a blob.
189
189
190 $ echo 'this is a big lfs file' > lfs.bin
190 $ echo 'this is a big lfs file' > lfs.bin
191 $ hg ci -Aqm 'lfs'
191 $ hg ci -Aqm 'lfs'
192 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
192 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
193 .hg/requires:lfs
193 .hg/requires:lfs
194
194
195 #if lfsremote-off
195 #if lfsremote-off
196 $ hg push -q
196 $ hg push -q
197 abort: required features are not supported in the destination: lfs
197 abort: required features are not supported in the destination: lfs
198 (enable the lfs extension on the server)
198 (enable the lfs extension on the server)
199 [255]
199 [255]
200 #else
200 #else
201 $ hg push -q
201 $ hg push -q
202 #endif
202 #endif
203 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
203 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
204 .hg/requires:lfs
204 .hg/requires:lfs
205 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
205 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
206
206
207 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client3_clone
207 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client3_clone
208 $ grep 'lfs' $TESTTMP/client3_clone/.hg/requires $SERVER_REQUIRES || true
208 $ grep 'lfs' $TESTTMP/client3_clone/.hg/requires $SERVER_REQUIRES || true
209 $TESTTMP/client3_clone/.hg/requires:lfs (lfsremote-on !)
209 $TESTTMP/client3_clone/.hg/requires:lfs (lfsremote-on !)
210 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
210 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
211
211
212 $ hg init $TESTTMP/client3_pull
212 $ hg init $TESTTMP/client3_pull
213 $ hg -R $TESTTMP/client3_pull pull -q http://localhost:$HGPORT
213 $ hg -R $TESTTMP/client3_pull pull -q http://localhost:$HGPORT
214 $ grep 'lfs' $TESTTMP/client3_pull/.hg/requires $SERVER_REQUIRES || true
214 $ grep 'lfs' $TESTTMP/client3_pull/.hg/requires $SERVER_REQUIRES || true
215 $TESTTMP/client3_pull/.hg/requires:lfs (lfsremote-on !)
215 $TESTTMP/client3_pull/.hg/requires:lfs (lfsremote-on !)
216 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
216 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
217
217
218 Test that the commit/changegroup requirement check hook can be run multiple
218 Test that the commit/changegroup requirement check hook can be run multiple
219 times.
219 times.
220
220
221 $ hg clone -qr 0 http://localhost:$HGPORT $TESTTMP/cmdserve_client3
221 $ hg clone -qr 0 http://localhost:$HGPORT $TESTTMP/cmdserve_client3
222
222
223 $ cd ../cmdserve_client3
223 $ cd ../cmdserve_client3
224
224
225 >>> from __future__ import absolute_import
225 >>> from __future__ import absolute_import
226 >>> from hgclient import check, readchannel, runcommand
226 >>> from hgclient import check, readchannel, runcommand
227 >>> @check
227 >>> @check
228 ... def addrequirement(server):
228 ... def addrequirement(server):
229 ... readchannel(server)
229 ... readchannel(server)
230 ... # change the repo in a way that adds the lfs requirement
230 ... # change the repo in a way that adds the lfs requirement
231 ... runcommand(server, [b'pull', b'-qu'])
231 ... runcommand(server, [b'pull', b'-qu'])
232 ... # Now cause the requirement adding hook to fire again, without going
232 ... # Now cause the requirement adding hook to fire again, without going
233 ... # through reposetup() again.
233 ... # through reposetup() again.
234 ... with open('file.txt', 'wb') as fp:
234 ... with open('file.txt', 'wb') as fp:
235 ... fp.write(b'data')
235 ... fp.write(b'data')
236 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
236 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
237 *** runcommand pull -qu
237 *** runcommand pull -qu
238 *** runcommand ci -Aqm non-lfs
238 *** runcommand ci -Aqm non-lfs
239
239
240 $ cd ../client
240 $ cd ../client
241
241
242 The difference here is the push failed above when the extension isn't
242 The difference here is the push failed above when the extension isn't
243 enabled on the server.
243 enabled on the server.
244 $ hg identify http://localhost:$HGPORT
244 $ hg identify http://localhost:$HGPORT
245 8374dc4052cb (lfsremote-on !)
245 8374dc4052cb (lfsremote-on !)
246 1477875038c6 (lfsremote-off !)
246 1477875038c6 (lfsremote-off !)
247
247
248 Don't bother testing the lfsremote-off cases- the server won't be able
248 Don't bother testing the lfsremote-off cases- the server won't be able
249 to launch if there's lfs content and the extension is disabled.
249 to launch if there's lfs content and the extension is disabled.
250
250
251 #if lfsremote-on
251 #if lfsremote-on
252
252
253 --------------------------------------------------------------------------------
253 --------------------------------------------------------------------------------
254 Case #4: client with non-lfs content and the extension disabled; server with
254 Case #4: client with non-lfs content and the extension disabled; server with
255 lfs content, and the extension enabled.
255 lfs content, and the extension enabled.
256
256
257 $ cat >> $HGRCPATH <<EOF
257 $ cat >> $HGRCPATH <<EOF
258 > [extensions]
258 > [extensions]
259 > lfs = !
259 > lfs = !
260 > EOF
260 > EOF
261
261
262 $ hg init $TESTTMP/client4
262 $ hg init $TESTTMP/client4
263 $ cd $TESTTMP/client4
263 $ cd $TESTTMP/client4
264 $ cat >> .hg/hgrc <<EOF
264 $ cat >> .hg/hgrc <<EOF
265 > [paths]
265 > [paths]
266 > default = http://localhost:$HGPORT
266 > default = http://localhost:$HGPORT
267 > EOF
267 > EOF
268 $ echo 'non-lfs' > nonlfs2.txt
268 $ echo 'non-lfs' > nonlfs2.txt
269 $ hg ci -Aqm 'non-lfs'
269 $ hg ci -Aqm 'non-lfs'
270 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
270 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
271 $TESTTMP/server/.hg/requires:lfs
271 $TESTTMP/server/.hg/requires:lfs
272
272
273 $ hg push -q --force
273 $ hg push -q --force
274 warning: repository is unrelated
274 warning: repository is unrelated
275 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
275 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
276 $TESTTMP/server/.hg/requires:lfs
276 $TESTTMP/server/.hg/requires:lfs
277
277
278 $ hg clone http://localhost:$HGPORT $TESTTMP/client4_clone
278 $ hg clone http://localhost:$HGPORT $TESTTMP/client4_clone
279 (remote is using large file support (lfs), but it is explicitly disabled in the local configuration)
279 (remote is using large file support (lfs), but it is explicitly disabled in the local configuration)
280 abort: repository requires features unknown to this Mercurial: lfs
280 abort: repository requires features unknown to this Mercurial: lfs
281 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
281 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
282 [255]
282 [255]
283 $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
283 $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
284 grep: $TESTTMP/client4_clone/.hg/requires: $ENOENT$
284 grep: $TESTTMP/client4_clone/.hg/requires: $ENOENT$
285 $TESTTMP/server/.hg/requires:lfs
285 $TESTTMP/server/.hg/requires:lfs
286 [2]
286 [2]
287
287
288 TODO: fail more gracefully.
288 TODO: fail more gracefully.
289
289
290 $ hg init $TESTTMP/client4_pull
290 $ hg init $TESTTMP/client4_pull
291 $ hg -R $TESTTMP/client4_pull pull http://localhost:$HGPORT
291 $ hg -R $TESTTMP/client4_pull pull http://localhost:$HGPORT
292 pulling from http://localhost:$HGPORT/
292 pulling from http://localhost:$HGPORT/
293 requesting all changes
293 requesting all changes
294 remote: abort: no common changegroup version
294 remote: abort: no common changegroup version
295 abort: pull failed on remote
295 abort: pull failed on remote
296 [255]
296 [100]
297 $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES
297 $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES
298 $TESTTMP/server/.hg/requires:lfs
298 $TESTTMP/server/.hg/requires:lfs
299
299
300 $ hg identify http://localhost:$HGPORT
300 $ hg identify http://localhost:$HGPORT
301 03b080fa9d93
301 03b080fa9d93
302
302
303 --------------------------------------------------------------------------------
303 --------------------------------------------------------------------------------
304 Case #5: client with non-lfs content and the extension enabled; server with
304 Case #5: client with non-lfs content and the extension enabled; server with
305 lfs content, and the extension enabled.
305 lfs content, and the extension enabled.
306
306
307 $ cat >> $HGRCPATH <<EOF
307 $ cat >> $HGRCPATH <<EOF
308 > [extensions]
308 > [extensions]
309 > lfs =
309 > lfs =
310 > EOF
310 > EOF
311 $ echo 'non-lfs' > nonlfs3.txt
311 $ echo 'non-lfs' > nonlfs3.txt
312 $ hg ci -Aqm 'non-lfs file with lfs client'
312 $ hg ci -Aqm 'non-lfs file with lfs client'
313
313
314 $ hg push -q
314 $ hg push -q
315 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
315 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
316 $TESTTMP/server/.hg/requires:lfs
316 $TESTTMP/server/.hg/requires:lfs
317
317
318 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client5_clone
318 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client5_clone
319 $ grep 'lfs' $TESTTMP/client5_clone/.hg/requires $SERVER_REQUIRES
319 $ grep 'lfs' $TESTTMP/client5_clone/.hg/requires $SERVER_REQUIRES
320 $TESTTMP/client5_clone/.hg/requires:lfs
320 $TESTTMP/client5_clone/.hg/requires:lfs
321 $TESTTMP/server/.hg/requires:lfs
321 $TESTTMP/server/.hg/requires:lfs
322
322
323 $ hg init $TESTTMP/client5_pull
323 $ hg init $TESTTMP/client5_pull
324 $ hg -R $TESTTMP/client5_pull pull -q http://localhost:$HGPORT
324 $ hg -R $TESTTMP/client5_pull pull -q http://localhost:$HGPORT
325 $ grep 'lfs' $TESTTMP/client5_pull/.hg/requires $SERVER_REQUIRES
325 $ grep 'lfs' $TESTTMP/client5_pull/.hg/requires $SERVER_REQUIRES
326 $TESTTMP/client5_pull/.hg/requires:lfs
326 $TESTTMP/client5_pull/.hg/requires:lfs
327 $TESTTMP/server/.hg/requires:lfs
327 $TESTTMP/server/.hg/requires:lfs
328
328
329 $ hg identify http://localhost:$HGPORT
329 $ hg identify http://localhost:$HGPORT
330 c729025cc5e3
330 c729025cc5e3
331
331
332 $ mv $HGRCPATH $HGRCPATH.tmp
332 $ mv $HGRCPATH $HGRCPATH.tmp
333 $ cp $HGRCPATH.orig $HGRCPATH
333 $ cp $HGRCPATH.orig $HGRCPATH
334
334
335 >>> from __future__ import absolute_import
335 >>> from __future__ import absolute_import
336 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
336 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
337 >>> @check
337 >>> @check
338 ... def checkflags(server):
338 ... def checkflags(server):
339 ... readchannel(server)
339 ... readchannel(server)
340 ... bprint(b'')
340 ... bprint(b'')
341 ... bprint(b'# LFS required- both lfs and non-lfs revlogs have 0x2000 flag')
341 ... bprint(b'# LFS required- both lfs and non-lfs revlogs have 0x2000 flag')
342 ... stdout.flush()
342 ... stdout.flush()
343 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
343 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
344 ... b'../server'])
344 ... b'../server'])
345 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
345 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
346 ... b'../server'])
346 ... b'../server'])
347 ... runcommand(server, [b'config', b'extensions', b'--cwd',
347 ... runcommand(server, [b'config', b'extensions', b'--cwd',
348 ... b'../server'])
348 ... b'../server'])
349 ...
349 ...
350 ... bprint(b"\n# LFS not enabled- revlogs don't have 0x2000 flag")
350 ... bprint(b"\n# LFS not enabled- revlogs don't have 0x2000 flag")
351 ... stdout.flush()
351 ... stdout.flush()
352 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
352 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
353 ... runcommand(server, [b'config', b'extensions'])
353 ... runcommand(server, [b'config', b'extensions'])
354
354
355 # LFS required- both lfs and non-lfs revlogs have 0x2000 flag
355 # LFS required- both lfs and non-lfs revlogs have 0x2000 flag
356 *** runcommand debugprocessors lfs.bin -R ../server
356 *** runcommand debugprocessors lfs.bin -R ../server
357 registered processor '0x8000'
357 registered processor '0x8000'
358 registered processor '0x800'
358 registered processor '0x800'
359 registered processor '0x2000'
359 registered processor '0x2000'
360 *** runcommand debugprocessors nonlfs2.txt -R ../server
360 *** runcommand debugprocessors nonlfs2.txt -R ../server
361 registered processor '0x8000'
361 registered processor '0x8000'
362 registered processor '0x800'
362 registered processor '0x800'
363 registered processor '0x2000'
363 registered processor '0x2000'
364 *** runcommand config extensions --cwd ../server
364 *** runcommand config extensions --cwd ../server
365 extensions.debugprocessors=$TESTTMP/debugprocessors.py
365 extensions.debugprocessors=$TESTTMP/debugprocessors.py
366 extensions.lfs=
366 extensions.lfs=
367
367
368 # LFS not enabled- revlogs don't have 0x2000 flag
368 # LFS not enabled- revlogs don't have 0x2000 flag
369 *** runcommand debugprocessors nonlfs3.txt
369 *** runcommand debugprocessors nonlfs3.txt
370 registered processor '0x8000'
370 registered processor '0x8000'
371 registered processor '0x800'
371 registered processor '0x800'
372 *** runcommand config extensions
372 *** runcommand config extensions
373 extensions.debugprocessors=$TESTTMP/debugprocessors.py
373 extensions.debugprocessors=$TESTTMP/debugprocessors.py
374
374
375 $ rm $HGRCPATH
375 $ rm $HGRCPATH
376 $ mv $HGRCPATH.tmp $HGRCPATH
376 $ mv $HGRCPATH.tmp $HGRCPATH
377
377
378 $ hg clone $TESTTMP/client $TESTTMP/nonlfs -qr 0 --config extensions.lfs=
378 $ hg clone $TESTTMP/client $TESTTMP/nonlfs -qr 0 --config extensions.lfs=
379 $ cat >> $TESTTMP/nonlfs/.hg/hgrc <<EOF
379 $ cat >> $TESTTMP/nonlfs/.hg/hgrc <<EOF
380 > [extensions]
380 > [extensions]
381 > lfs = !
381 > lfs = !
382 > EOF
382 > EOF
383
383
384 >>> from __future__ import absolute_import, print_function
384 >>> from __future__ import absolute_import, print_function
385 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
385 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
386 >>> @check
386 >>> @check
387 ... def checkflags2(server):
387 ... def checkflags2(server):
388 ... readchannel(server)
388 ... readchannel(server)
389 ... bprint(b'')
389 ... bprint(b'')
390 ... bprint(b'# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag')
390 ... bprint(b'# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag')
391 ... stdout.flush()
391 ... stdout.flush()
392 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
392 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
393 ... b'../server'])
393 ... b'../server'])
394 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
394 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
395 ... b'../server'])
395 ... b'../server'])
396 ... runcommand(server, [b'config', b'extensions', b'--cwd',
396 ... runcommand(server, [b'config', b'extensions', b'--cwd',
397 ... b'../server'])
397 ... b'../server'])
398 ...
398 ...
399 ... bprint(b'\n# LFS enabled without requirement- revlogs have 0x2000 flag')
399 ... bprint(b'\n# LFS enabled without requirement- revlogs have 0x2000 flag')
400 ... stdout.flush()
400 ... stdout.flush()
401 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
401 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
402 ... runcommand(server, [b'config', b'extensions'])
402 ... runcommand(server, [b'config', b'extensions'])
403 ...
403 ...
404 ... bprint(b"\n# LFS disabled locally- revlogs don't have 0x2000 flag")
404 ... bprint(b"\n# LFS disabled locally- revlogs don't have 0x2000 flag")
405 ... stdout.flush()
405 ... stdout.flush()
406 ... runcommand(server, [b'debugprocessors', b'nonlfs.txt', b'-R',
406 ... runcommand(server, [b'debugprocessors', b'nonlfs.txt', b'-R',
407 ... b'../nonlfs'])
407 ... b'../nonlfs'])
408 ... runcommand(server, [b'config', b'extensions', b'--cwd',
408 ... runcommand(server, [b'config', b'extensions', b'--cwd',
409 ... b'../nonlfs'])
409 ... b'../nonlfs'])
410
410
411 # LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag
411 # LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag
412 *** runcommand debugprocessors lfs.bin -R ../server
412 *** runcommand debugprocessors lfs.bin -R ../server
413 registered processor '0x8000'
413 registered processor '0x8000'
414 registered processor '0x800'
414 registered processor '0x800'
415 registered processor '0x2000'
415 registered processor '0x2000'
416 *** runcommand debugprocessors nonlfs2.txt -R ../server
416 *** runcommand debugprocessors nonlfs2.txt -R ../server
417 registered processor '0x8000'
417 registered processor '0x8000'
418 registered processor '0x800'
418 registered processor '0x800'
419 registered processor '0x2000'
419 registered processor '0x2000'
420 *** runcommand config extensions --cwd ../server
420 *** runcommand config extensions --cwd ../server
421 extensions.debugprocessors=$TESTTMP/debugprocessors.py
421 extensions.debugprocessors=$TESTTMP/debugprocessors.py
422 extensions.lfs=
422 extensions.lfs=
423
423
424 # LFS enabled without requirement- revlogs have 0x2000 flag
424 # LFS enabled without requirement- revlogs have 0x2000 flag
425 *** runcommand debugprocessors nonlfs3.txt
425 *** runcommand debugprocessors nonlfs3.txt
426 registered processor '0x8000'
426 registered processor '0x8000'
427 registered processor '0x800'
427 registered processor '0x800'
428 registered processor '0x2000'
428 registered processor '0x2000'
429 *** runcommand config extensions
429 *** runcommand config extensions
430 extensions.debugprocessors=$TESTTMP/debugprocessors.py
430 extensions.debugprocessors=$TESTTMP/debugprocessors.py
431 extensions.lfs=
431 extensions.lfs=
432
432
433 # LFS disabled locally- revlogs don't have 0x2000 flag
433 # LFS disabled locally- revlogs don't have 0x2000 flag
434 *** runcommand debugprocessors nonlfs.txt -R ../nonlfs
434 *** runcommand debugprocessors nonlfs.txt -R ../nonlfs
435 registered processor '0x8000'
435 registered processor '0x8000'
436 registered processor '0x800'
436 registered processor '0x800'
437 *** runcommand config extensions --cwd ../nonlfs
437 *** runcommand config extensions --cwd ../nonlfs
438 extensions.debugprocessors=$TESTTMP/debugprocessors.py
438 extensions.debugprocessors=$TESTTMP/debugprocessors.py
439 extensions.lfs=!
439 extensions.lfs=!
440
440
441 --------------------------------------------------------------------------------
441 --------------------------------------------------------------------------------
442 Case #6: client with lfs content and the extension enabled; server with
442 Case #6: client with lfs content and the extension enabled; server with
443 lfs content, and the extension enabled.
443 lfs content, and the extension enabled.
444
444
445 $ echo 'this is another lfs file' > lfs2.txt
445 $ echo 'this is another lfs file' > lfs2.txt
446 $ hg ci -Aqm 'lfs file with lfs client'
446 $ hg ci -Aqm 'lfs file with lfs client'
447
447
448 $ hg --config paths.default= push -v http://localhost:$HGPORT
448 $ hg --config paths.default= push -v http://localhost:$HGPORT
449 pushing to http://localhost:$HGPORT/
449 pushing to http://localhost:$HGPORT/
450 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
450 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
451 searching for changes
451 searching for changes
452 remote has heads on branch 'default' that are not known locally: 8374dc4052cb
452 remote has heads on branch 'default' that are not known locally: 8374dc4052cb
453 lfs: uploading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
453 lfs: uploading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
454 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
454 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
455 lfs: uploaded 1 files (25 bytes)
455 lfs: uploaded 1 files (25 bytes)
456 1 changesets found
456 1 changesets found
457 uncompressed size of bundle content:
457 uncompressed size of bundle content:
458 206 (changelog)
458 206 (changelog)
459 172 (manifests)
459 172 (manifests)
460 275 lfs2.txt
460 275 lfs2.txt
461 remote: adding changesets
461 remote: adding changesets
462 remote: adding manifests
462 remote: adding manifests
463 remote: adding file changes
463 remote: adding file changes
464 remote: added 1 changesets with 1 changes to 1 files
464 remote: added 1 changesets with 1 changes to 1 files
465 (sent 8 HTTP requests and * bytes; received * bytes in responses) (glob) (?)
465 (sent 8 HTTP requests and * bytes; received * bytes in responses) (glob) (?)
466 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
466 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
467 .hg/requires:lfs
467 .hg/requires:lfs
468 $TESTTMP/server/.hg/requires:lfs
468 $TESTTMP/server/.hg/requires:lfs
469
469
470 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client6_clone
470 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client6_clone
471 $ grep 'lfs' $TESTTMP/client6_clone/.hg/requires $SERVER_REQUIRES
471 $ grep 'lfs' $TESTTMP/client6_clone/.hg/requires $SERVER_REQUIRES
472 $TESTTMP/client6_clone/.hg/requires:lfs
472 $TESTTMP/client6_clone/.hg/requires:lfs
473 $TESTTMP/server/.hg/requires:lfs
473 $TESTTMP/server/.hg/requires:lfs
474
474
475 $ hg init $TESTTMP/client6_pull
475 $ hg init $TESTTMP/client6_pull
476 $ hg -R $TESTTMP/client6_pull pull -u -v http://localhost:$HGPORT
476 $ hg -R $TESTTMP/client6_pull pull -u -v http://localhost:$HGPORT
477 pulling from http://localhost:$HGPORT/
477 pulling from http://localhost:$HGPORT/
478 requesting all changes
478 requesting all changes
479 adding changesets
479 adding changesets
480 adding manifests
480 adding manifests
481 adding file changes
481 adding file changes
482 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
482 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
483 added 6 changesets with 5 changes to 5 files (+1 heads)
483 added 6 changesets with 5 changes to 5 files (+1 heads)
484 new changesets d437e1d24fbd:d3b84d50eacb
484 new changesets d437e1d24fbd:d3b84d50eacb
485 resolving manifests
485 resolving manifests
486 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
486 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
487 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
487 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
488 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
488 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
489 lfs: downloaded 1 files (25 bytes)
489 lfs: downloaded 1 files (25 bytes)
490 getting lfs2.txt
490 getting lfs2.txt
491 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
491 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
492 getting nonlfs2.txt
492 getting nonlfs2.txt
493 getting nonlfs3.txt
493 getting nonlfs3.txt
494 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
494 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
495 updated to "d3b84d50eacb: lfs file with lfs client"
495 updated to "d3b84d50eacb: lfs file with lfs client"
496 1 other heads for branch "default"
496 1 other heads for branch "default"
497 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
497 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
498 $ grep 'lfs' $TESTTMP/client6_pull/.hg/requires $SERVER_REQUIRES
498 $ grep 'lfs' $TESTTMP/client6_pull/.hg/requires $SERVER_REQUIRES
499 $TESTTMP/client6_pull/.hg/requires:lfs
499 $TESTTMP/client6_pull/.hg/requires:lfs
500 $TESTTMP/server/.hg/requires:lfs
500 $TESTTMP/server/.hg/requires:lfs
501
501
502 $ hg identify http://localhost:$HGPORT
502 $ hg identify http://localhost:$HGPORT
503 d3b84d50eacb
503 d3b84d50eacb
504
504
505 --------------------------------------------------------------------------------
505 --------------------------------------------------------------------------------
506 Misc: process dies early if a requirement exists and the extension is disabled
506 Misc: process dies early if a requirement exists and the extension is disabled
507
507
508 $ hg --config extensions.lfs=! summary
508 $ hg --config extensions.lfs=! summary
509 abort: repository requires features unknown to this Mercurial: lfs
509 abort: repository requires features unknown to this Mercurial: lfs
510 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
510 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
511 [255]
511 [255]
512
512
513 $ echo 'this is an lfs file' > $TESTTMP/client6_clone/lfspair1.bin
513 $ echo 'this is an lfs file' > $TESTTMP/client6_clone/lfspair1.bin
514 $ echo 'this is an lfs file too' > $TESTTMP/client6_clone/lfspair2.bin
514 $ echo 'this is an lfs file too' > $TESTTMP/client6_clone/lfspair2.bin
515 $ hg -R $TESTTMP/client6_clone ci -Aqm 'add lfs pair'
515 $ hg -R $TESTTMP/client6_clone ci -Aqm 'add lfs pair'
516 $ hg -R $TESTTMP/client6_clone push -q
516 $ hg -R $TESTTMP/client6_clone push -q
517
517
518 $ hg clone -qU http://localhost:$HGPORT $TESTTMP/bulkfetch
518 $ hg clone -qU http://localhost:$HGPORT $TESTTMP/bulkfetch
519
519
520 Cat doesn't prefetch unless data is needed (e.g. '-T {rawdata}' doesn't need it)
520 Cat doesn't prefetch unless data is needed (e.g. '-T {rawdata}' doesn't need it)
521
521
522 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{rawdata}\n{path}\n'
522 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{rawdata}\n{path}\n'
523 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
523 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
524 version https://git-lfs.github.com/spec/v1
524 version https://git-lfs.github.com/spec/v1
525 oid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
525 oid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
526 size 20
526 size 20
527 x-is-binary 0
527 x-is-binary 0
528
528
529 lfspair1.bin
529 lfspair1.bin
530
530
531 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T json
531 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T json
532 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
532 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
533 [lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
533 [lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
534 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
534 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
535 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
535 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
536 lfs: downloaded 1 files (20 bytes)
536 lfs: downloaded 1 files (20 bytes)
537 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
537 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
538
538
539 {
539 {
540 "data": "this is an lfs file\n",
540 "data": "this is an lfs file\n",
541 "path": "lfspair1.bin",
541 "path": "lfspair1.bin",
542 "rawdata": "version https://git-lfs.github.com/spec/v1\noid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782\nsize 20\nx-is-binary 0\n"
542 "rawdata": "version https://git-lfs.github.com/spec/v1\noid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782\nsize 20\nx-is-binary 0\n"
543 }
543 }
544 ]
544 ]
545
545
546 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
546 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
547
547
548 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{data}\n'
548 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{data}\n'
549 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
549 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
550 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
550 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
551 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
551 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
552 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
552 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
553 lfs: downloaded 1 files (20 bytes)
553 lfs: downloaded 1 files (20 bytes)
554 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
554 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
555 this is an lfs file
555 this is an lfs file
556
556
557 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair2.bin
557 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair2.bin
558 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
558 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
559 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
559 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
560 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
560 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
561 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
561 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
562 lfs: downloaded 1 files (24 bytes)
562 lfs: downloaded 1 files (24 bytes)
563 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
563 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
564 this is an lfs file too
564 this is an lfs file too
565
565
566 Export will prefetch all needed files across all needed revisions
566 Export will prefetch all needed files across all needed revisions
567
567
568 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
568 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
569 $ hg -R $TESTTMP/bulkfetch -v export -r 0:tip -o all.export
569 $ hg -R $TESTTMP/bulkfetch -v export -r 0:tip -o all.export
570 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
570 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
571 exporting patches:
571 exporting patches:
572 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
572 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
573 lfs: need to transfer 4 objects (92 bytes)
573 lfs: need to transfer 4 objects (92 bytes)
574 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
574 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
575 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
575 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
576 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
576 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
577 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
577 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
578 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
578 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
579 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
579 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
580 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
580 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
581 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
581 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
582 lfs: downloaded 4 files (92 bytes)
582 lfs: downloaded 4 files (92 bytes)
583 all.export
583 all.export
584 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
584 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
585 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
585 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
586 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
586 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
587 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
587 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
588
588
589 Export with selected files is used with `extdiff --patch`
589 Export with selected files is used with `extdiff --patch`
590
590
591 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
591 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
592 $ hg --config extensions.extdiff= \
592 $ hg --config extensions.extdiff= \
593 > -R $TESTTMP/bulkfetch -v extdiff -r 2:tip --patch $TESTTMP/bulkfetch/lfs.bin
593 > -R $TESTTMP/bulkfetch -v extdiff -r 2:tip --patch $TESTTMP/bulkfetch/lfs.bin
594 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
594 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
595 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
595 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
596 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
596 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
597 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
597 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
598 lfs: downloaded 1 files (23 bytes)
598 lfs: downloaded 1 files (23 bytes)
599 */hg-8374dc4052cb.patch (glob)
599 */hg-8374dc4052cb.patch (glob)
600 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
600 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
601 */hg-9640b57e77b1.patch (glob)
601 */hg-9640b57e77b1.patch (glob)
602 --- */hg-8374dc4052cb.patch * (glob)
602 --- */hg-8374dc4052cb.patch * (glob)
603 +++ */hg-9640b57e77b1.patch * (glob)
603 +++ */hg-9640b57e77b1.patch * (glob)
604 @@ -2,12 +2,7 @@
604 @@ -2,12 +2,7 @@
605 # User test
605 # User test
606 # Date 0 0
606 # Date 0 0
607 # Thu Jan 01 00:00:00 1970 +0000
607 # Thu Jan 01 00:00:00 1970 +0000
608 -# Node ID 8374dc4052cbd388e79d9dc4ddb29784097aa354
608 -# Node ID 8374dc4052cbd388e79d9dc4ddb29784097aa354
609 -# Parent 1477875038c60152e391238920a16381c627b487
609 -# Parent 1477875038c60152e391238920a16381c627b487
610 -lfs
610 -lfs
611 +# Node ID 9640b57e77b14c3a0144fb4478b6cc13e13ea0d1
611 +# Node ID 9640b57e77b14c3a0144fb4478b6cc13e13ea0d1
612 +# Parent d3b84d50eacbd56638e11abce6b8616aaba54420
612 +# Parent d3b84d50eacbd56638e11abce6b8616aaba54420
613 +add lfs pair
613 +add lfs pair
614
614
615 -diff -r 1477875038c6 -r 8374dc4052cb lfs.bin
615 -diff -r 1477875038c6 -r 8374dc4052cb lfs.bin
616 ---- /dev/null Thu Jan 01 00:00:00 1970 +0000
616 ---- /dev/null Thu Jan 01 00:00:00 1970 +0000
617 -+++ b/lfs.bin Thu Jan 01 00:00:00 1970 +0000
617 -+++ b/lfs.bin Thu Jan 01 00:00:00 1970 +0000
618 -@@ -0,0 +1,1 @@
618 -@@ -0,0 +1,1 @@
619 -+this is a big lfs file
619 -+this is a big lfs file
620 cleaning up temp directory
620 cleaning up temp directory
621 [1]
621 [1]
622
622
623 Diff will prefetch files
623 Diff will prefetch files
624
624
625 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
625 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
626 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip
626 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip
627 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
627 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
628 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
628 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
629 lfs: need to transfer 4 objects (92 bytes)
629 lfs: need to transfer 4 objects (92 bytes)
630 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
630 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
631 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
631 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
632 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
632 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
633 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
633 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
634 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
634 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
635 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
635 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
636 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
636 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
637 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
637 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
638 lfs: downloaded 4 files (92 bytes)
638 lfs: downloaded 4 files (92 bytes)
639 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
639 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
640 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
640 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
641 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
641 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
642 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
642 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
643 diff -r 8374dc4052cb -r 9640b57e77b1 lfs.bin
643 diff -r 8374dc4052cb -r 9640b57e77b1 lfs.bin
644 --- a/lfs.bin Thu Jan 01 00:00:00 1970 +0000
644 --- a/lfs.bin Thu Jan 01 00:00:00 1970 +0000
645 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
645 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
646 @@ -1,1 +0,0 @@
646 @@ -1,1 +0,0 @@
647 -this is a big lfs file
647 -this is a big lfs file
648 diff -r 8374dc4052cb -r 9640b57e77b1 lfs2.txt
648 diff -r 8374dc4052cb -r 9640b57e77b1 lfs2.txt
649 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
649 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
650 +++ b/lfs2.txt Thu Jan 01 00:00:00 1970 +0000
650 +++ b/lfs2.txt Thu Jan 01 00:00:00 1970 +0000
651 @@ -0,0 +1,1 @@
651 @@ -0,0 +1,1 @@
652 +this is another lfs file
652 +this is another lfs file
653 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair1.bin
653 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair1.bin
654 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
654 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
655 +++ b/lfspair1.bin Thu Jan 01 00:00:00 1970 +0000
655 +++ b/lfspair1.bin Thu Jan 01 00:00:00 1970 +0000
656 @@ -0,0 +1,1 @@
656 @@ -0,0 +1,1 @@
657 +this is an lfs file
657 +this is an lfs file
658 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
658 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
659 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
659 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
660 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
660 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
661 @@ -0,0 +1,1 @@
661 @@ -0,0 +1,1 @@
662 +this is an lfs file too
662 +this is an lfs file too
663 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs.txt
663 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs.txt
664 --- a/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
664 --- a/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
665 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
665 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
666 @@ -1,1 +0,0 @@
666 @@ -1,1 +0,0 @@
667 -non-lfs
667 -non-lfs
668 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs3.txt
668 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs3.txt
669 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
669 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
670 +++ b/nonlfs3.txt Thu Jan 01 00:00:00 1970 +0000
670 +++ b/nonlfs3.txt Thu Jan 01 00:00:00 1970 +0000
671 @@ -0,0 +1,1 @@
671 @@ -0,0 +1,1 @@
672 +non-lfs
672 +non-lfs
673
673
674 Only the files required by diff are prefetched
674 Only the files required by diff are prefetched
675
675
676 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
676 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
677 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip $TESTTMP/bulkfetch/lfspair2.bin
677 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip $TESTTMP/bulkfetch/lfspair2.bin
678 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
678 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
679 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
679 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
680 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
680 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
681 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
681 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
682 lfs: downloaded 1 files (24 bytes)
682 lfs: downloaded 1 files (24 bytes)
683 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
683 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
684 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
684 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
685 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
685 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
686 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
686 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
687 @@ -0,0 +1,1 @@
687 @@ -0,0 +1,1 @@
688 +this is an lfs file too
688 +this is an lfs file too
689
689
690 #endif
690 #endif
691
691
692 $ "$PYTHON" $TESTDIR/killdaemons.py $DAEMON_PIDS
692 $ "$PYTHON" $TESTDIR/killdaemons.py $DAEMON_PIDS
693
693
694 $ cat $TESTTMP/errors.log
694 $ cat $TESTTMP/errors.log
@@ -1,101 +1,101 b''
1 #testcases tree flat-fncache flat-nofncache
1 #testcases tree flat-fncache flat-nofncache
2
2
3 Tests narrow stream clones
3 Tests narrow stream clones
4
4
5 $ . "$TESTDIR/narrow-library.sh"
5 $ . "$TESTDIR/narrow-library.sh"
6
6
7 #if tree
7 #if tree
8 $ cat << EOF >> $HGRCPATH
8 $ cat << EOF >> $HGRCPATH
9 > [experimental]
9 > [experimental]
10 > treemanifest = 1
10 > treemanifest = 1
11 > EOF
11 > EOF
12 #endif
12 #endif
13
13
14 #if flat-nofncache
14 #if flat-nofncache
15 $ cat << EOF >> $HGRCPATH
15 $ cat << EOF >> $HGRCPATH
16 > [format]
16 > [format]
17 > usefncache = 0
17 > usefncache = 0
18 > EOF
18 > EOF
19 #endif
19 #endif
20
20
21 Server setup
21 Server setup
22
22
23 $ hg init master
23 $ hg init master
24 $ cd master
24 $ cd master
25 $ mkdir dir
25 $ mkdir dir
26 $ mkdir dir/src
26 $ mkdir dir/src
27 $ cd dir/src
27 $ cd dir/src
28 $ for x in `$TESTDIR/seq.py 20`; do echo $x > "F$x"; hg add "F$x"; hg commit -m "Commit src $x"; done
28 $ for x in `$TESTDIR/seq.py 20`; do echo $x > "F$x"; hg add "F$x"; hg commit -m "Commit src $x"; done
29
29
30 $ cd ..
30 $ cd ..
31 $ mkdir tests
31 $ mkdir tests
32 $ cd tests
32 $ cd tests
33 $ for x in `$TESTDIR/seq.py 20`; do echo $x > "F$x"; hg add "F$x"; hg commit -m "Commit src $x"; done
33 $ for x in `$TESTDIR/seq.py 20`; do echo $x > "F$x"; hg add "F$x"; hg commit -m "Commit src $x"; done
34 $ cd ../../..
34 $ cd ../../..
35
35
36 Trying to stream clone when the server does not support it
36 Trying to stream clone when the server does not support it
37
37
38 $ hg clone --narrow ssh://user@dummy/master narrow --noupdate --include "dir/src/F10" --stream
38 $ hg clone --narrow ssh://user@dummy/master narrow --noupdate --include "dir/src/F10" --stream
39 streaming all changes
39 streaming all changes
40 remote: abort: server does not support narrow stream clones
40 remote: abort: server does not support narrow stream clones
41 abort: pull failed on remote
41 abort: pull failed on remote
42 [255]
42 [100]
43
43
44 Enable stream clone on the server
44 Enable stream clone on the server
45
45
46 $ echo "[experimental]" >> master/.hg/hgrc
46 $ echo "[experimental]" >> master/.hg/hgrc
47 $ echo "server.stream-narrow-clones=True" >> master/.hg/hgrc
47 $ echo "server.stream-narrow-clones=True" >> master/.hg/hgrc
48
48
49 Cloning a specific file when stream clone is supported
49 Cloning a specific file when stream clone is supported
50
50
51 $ hg clone --narrow ssh://user@dummy/master narrow --noupdate --include "dir/src/F10" --stream
51 $ hg clone --narrow ssh://user@dummy/master narrow --noupdate --include "dir/src/F10" --stream
52 streaming all changes
52 streaming all changes
53 * files to transfer, * KB of data (glob)
53 * files to transfer, * KB of data (glob)
54 transferred * KB in * seconds (* */sec) (glob)
54 transferred * KB in * seconds (* */sec) (glob)
55
55
56 $ cd narrow
56 $ cd narrow
57 $ ls -A
57 $ ls -A
58 .hg
58 .hg
59 $ hg tracked
59 $ hg tracked
60 I path:dir/src/F10
60 I path:dir/src/F10
61
61
62 Making sure we have the correct set of requirements
62 Making sure we have the correct set of requirements
63
63
64 $ cat .hg/requires
64 $ cat .hg/requires
65 dotencode (tree !)
65 dotencode (tree !)
66 dotencode (flat-fncache !)
66 dotencode (flat-fncache !)
67 fncache (tree !)
67 fncache (tree !)
68 fncache (flat-fncache !)
68 fncache (flat-fncache !)
69 generaldelta
69 generaldelta
70 narrowhg-experimental
70 narrowhg-experimental
71 persistent-nodemap (rust !)
71 persistent-nodemap (rust !)
72 revlog-compression-zstd (zstd !)
72 revlog-compression-zstd (zstd !)
73 revlogv1
73 revlogv1
74 sparserevlog
74 sparserevlog
75 store
75 store
76 treemanifest (tree !)
76 treemanifest (tree !)
77
77
78 Making sure store has the required files
78 Making sure store has the required files
79
79
80 $ ls .hg/store/
80 $ ls .hg/store/
81 00changelog.i
81 00changelog.i
82 00manifest.i
82 00manifest.i
83 data
83 data
84 fncache (tree !)
84 fncache (tree !)
85 fncache (flat-fncache !)
85 fncache (flat-fncache !)
86 meta (tree !)
86 meta (tree !)
87 narrowspec
87 narrowspec
88 undo
88 undo
89 undo.backupfiles
89 undo.backupfiles
90 undo.narrowspec
90 undo.narrowspec
91 undo.phaseroots
91 undo.phaseroots
92
92
93 Checking that repository has all the required data and not broken
93 Checking that repository has all the required data and not broken
94
94
95 $ hg verify
95 $ hg verify
96 checking changesets
96 checking changesets
97 checking manifests
97 checking manifests
98 checking directory manifests (tree !)
98 checking directory manifests (tree !)
99 crosschecking files in changesets and manifests
99 crosschecking files in changesets and manifests
100 checking files
100 checking files
101 checked 40 changesets with 1 changes to 1 files
101 checked 40 changesets with 1 changes to 1 files
@@ -1,154 +1,154 b''
1 $ . "$TESTDIR/narrow-library.sh"
1 $ . "$TESTDIR/narrow-library.sh"
2
2
3 $ hg init master
3 $ hg init master
4 $ cd master
4 $ cd master
5 $ cat >> .hg/hgrc <<EOF
5 $ cat >> .hg/hgrc <<EOF
6 > [narrow]
6 > [narrow]
7 > serveellipses=True
7 > serveellipses=True
8 > EOF
8 > EOF
9 $ for x in `$TESTDIR/seq.py 10`
9 $ for x in `$TESTDIR/seq.py 10`
10 > do
10 > do
11 > echo $x > "f$x"
11 > echo $x > "f$x"
12 > hg add "f$x"
12 > hg add "f$x"
13 > hg commit -m "Commit f$x"
13 > hg commit -m "Commit f$x"
14 > done
14 > done
15 $ cd ..
15 $ cd ..
16
16
17 narrow clone a couple files, f2 and f8
17 narrow clone a couple files, f2 and f8
18
18
19 $ hg clone --narrow ssh://user@dummy/master narrow --include "f2" --include "f8"
19 $ hg clone --narrow ssh://user@dummy/master narrow --include "f2" --include "f8"
20 requesting all changes
20 requesting all changes
21 adding changesets
21 adding changesets
22 adding manifests
22 adding manifests
23 adding file changes
23 adding file changes
24 added 5 changesets with 2 changes to 2 files
24 added 5 changesets with 2 changes to 2 files
25 new changesets *:* (glob)
25 new changesets *:* (glob)
26 updating to branch default
26 updating to branch default
27 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 $ cd narrow
28 $ cd narrow
29 $ ls -A
29 $ ls -A
30 .hg
30 .hg
31 f2
31 f2
32 f8
32 f8
33 $ cat f2 f8
33 $ cat f2 f8
34 2
34 2
35 8
35 8
36
36
37 $ cd ..
37 $ cd ..
38
38
39 change every upstream file twice
39 change every upstream file twice
40
40
41 $ cd master
41 $ cd master
42 $ for x in `$TESTDIR/seq.py 10`
42 $ for x in `$TESTDIR/seq.py 10`
43 > do
43 > do
44 > echo "update#1 $x" >> "f$x"
44 > echo "update#1 $x" >> "f$x"
45 > hg commit -m "Update#1 to f$x" "f$x"
45 > hg commit -m "Update#1 to f$x" "f$x"
46 > done
46 > done
47 $ for x in `$TESTDIR/seq.py 10`
47 $ for x in `$TESTDIR/seq.py 10`
48 > do
48 > do
49 > echo "update#2 $x" >> "f$x"
49 > echo "update#2 $x" >> "f$x"
50 > hg commit -m "Update#2 to f$x" "f$x"
50 > hg commit -m "Update#2 to f$x" "f$x"
51 > done
51 > done
52 $ cd ..
52 $ cd ..
53
53
54 look for incoming changes
54 look for incoming changes
55
55
56 $ cd narrow
56 $ cd narrow
57 $ hg incoming --limit 3
57 $ hg incoming --limit 3
58 comparing with ssh://user@dummy/master
58 comparing with ssh://user@dummy/master
59 searching for changes
59 searching for changes
60 changeset: 5:ddc055582556
60 changeset: 5:ddc055582556
61 user: test
61 user: test
62 date: Thu Jan 01 00:00:00 1970 +0000
62 date: Thu Jan 01 00:00:00 1970 +0000
63 summary: Update#1 to f1
63 summary: Update#1 to f1
64
64
65 changeset: 6:f66eb5ad621d
65 changeset: 6:f66eb5ad621d
66 user: test
66 user: test
67 date: Thu Jan 01 00:00:00 1970 +0000
67 date: Thu Jan 01 00:00:00 1970 +0000
68 summary: Update#1 to f2
68 summary: Update#1 to f2
69
69
70 changeset: 7:c42ecff04e99
70 changeset: 7:c42ecff04e99
71 user: test
71 user: test
72 date: Thu Jan 01 00:00:00 1970 +0000
72 date: Thu Jan 01 00:00:00 1970 +0000
73 summary: Update#1 to f3
73 summary: Update#1 to f3
74
74
75
75
76 Interrupting the pull is safe
76 Interrupting the pull is safe
77 $ hg --config hooks.pretxnchangegroup.bad=false pull -q
77 $ hg --config hooks.pretxnchangegroup.bad=false pull -q
78 transaction abort!
78 transaction abort!
79 rollback completed
79 rollback completed
80 abort: pretxnchangegroup.bad hook exited with status 1
80 abort: pretxnchangegroup.bad hook exited with status 1
81 [40]
81 [40]
82 $ hg id
82 $ hg id
83 223311e70a6f tip
83 223311e70a6f tip
84
84
85 pull new changes down to the narrow clone. Should get 8 new changesets: 4
85 pull new changes down to the narrow clone. Should get 8 new changesets: 4
86 relevant to the narrow spec, and 4 ellipsis nodes gluing them all together.
86 relevant to the narrow spec, and 4 ellipsis nodes gluing them all together.
87
87
88 $ hg pull
88 $ hg pull
89 pulling from ssh://user@dummy/master
89 pulling from ssh://user@dummy/master
90 searching for changes
90 searching for changes
91 adding changesets
91 adding changesets
92 adding manifests
92 adding manifests
93 adding file changes
93 adding file changes
94 added 9 changesets with 4 changes to 2 files
94 added 9 changesets with 4 changes to 2 files
95 new changesets *:* (glob)
95 new changesets *:* (glob)
96 (run 'hg update' to get a working copy)
96 (run 'hg update' to get a working copy)
97 $ hg log -T '{rev}: {desc}\n'
97 $ hg log -T '{rev}: {desc}\n'
98 13: Update#2 to f10
98 13: Update#2 to f10
99 12: Update#2 to f8
99 12: Update#2 to f8
100 11: Update#2 to f7
100 11: Update#2 to f7
101 10: Update#2 to f2
101 10: Update#2 to f2
102 9: Update#2 to f1
102 9: Update#2 to f1
103 8: Update#1 to f8
103 8: Update#1 to f8
104 7: Update#1 to f7
104 7: Update#1 to f7
105 6: Update#1 to f2
105 6: Update#1 to f2
106 5: Update#1 to f1
106 5: Update#1 to f1
107 4: Commit f10
107 4: Commit f10
108 3: Commit f8
108 3: Commit f8
109 2: Commit f7
109 2: Commit f7
110 1: Commit f2
110 1: Commit f2
111 0: Commit f1
111 0: Commit f1
112 $ hg update tip
112 $ hg update tip
113 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
113 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
114
114
115 add a change and push it
115 add a change and push it
116
116
117 $ echo "update#3 2" >> f2
117 $ echo "update#3 2" >> f2
118 $ hg commit -m "Update#3 to f2" f2
118 $ hg commit -m "Update#3 to f2" f2
119 $ hg log f2 -T '{rev}: {desc}\n'
119 $ hg log f2 -T '{rev}: {desc}\n'
120 14: Update#3 to f2
120 14: Update#3 to f2
121 10: Update#2 to f2
121 10: Update#2 to f2
122 6: Update#1 to f2
122 6: Update#1 to f2
123 1: Commit f2
123 1: Commit f2
124 $ hg push
124 $ hg push
125 pushing to ssh://user@dummy/master
125 pushing to ssh://user@dummy/master
126 searching for changes
126 searching for changes
127 remote: adding changesets
127 remote: adding changesets
128 remote: adding manifests
128 remote: adding manifests
129 remote: adding file changes
129 remote: adding file changes
130 remote: added 1 changesets with 1 changes to 1 files
130 remote: added 1 changesets with 1 changes to 1 files
131 $ cd ..
131 $ cd ..
132
132
133 $ cd master
133 $ cd master
134 $ hg log f2 -T '{rev}: {desc}\n'
134 $ hg log f2 -T '{rev}: {desc}\n'
135 30: Update#3 to f2
135 30: Update#3 to f2
136 21: Update#2 to f2
136 21: Update#2 to f2
137 11: Update#1 to f2
137 11: Update#1 to f2
138 1: Commit f2
138 1: Commit f2
139 $ hg log -l 3 -T '{rev}: {desc}\n'
139 $ hg log -l 3 -T '{rev}: {desc}\n'
140 30: Update#3 to f2
140 30: Update#3 to f2
141 29: Update#2 to f10
141 29: Update#2 to f10
142 28: Update#2 to f9
142 28: Update#2 to f9
143
143
144 Can pull into repo with a single commit
144 Can pull into repo with a single commit
145
145
146 $ cd ..
146 $ cd ..
147 $ hg clone -q --narrow ssh://user@dummy/master narrow2 --include "f1" -r 0
147 $ hg clone -q --narrow ssh://user@dummy/master narrow2 --include "f1" -r 0
148 $ cd narrow2
148 $ cd narrow2
149 $ hg pull -q -r 1
149 $ hg pull -q -r 1
150 remote: abort: unexpected error: unable to resolve parent while packing '00manifest.i' 1 for changeset 0
150 remote: abort: unexpected error: unable to resolve parent while packing '00manifest.i' 1 for changeset 0
151 transaction abort!
151 transaction abort!
152 rollback completed
152 rollback completed
153 abort: pull failed on remote
153 abort: pull failed on remote
154 [255]
154 [100]
@@ -1,118 +1,118 b''
1 #require no-reposimplestore
1 #require no-reposimplestore
2
2
3 $ . "$TESTDIR/narrow-library.sh"
3 $ . "$TESTDIR/narrow-library.sh"
4
4
5 $ hg init master
5 $ hg init master
6 $ cd master
6 $ cd master
7 $ cat >> .hg/hgrc <<EOF
7 $ cat >> .hg/hgrc <<EOF
8 > [narrow]
8 > [narrow]
9 > serveellipses=True
9 > serveellipses=True
10 > EOF
10 > EOF
11 $ for x in `$TESTDIR/seq.py 10`
11 $ for x in `$TESTDIR/seq.py 10`
12 > do
12 > do
13 > echo $x > "f$x"
13 > echo $x > "f$x"
14 > hg add "f$x"
14 > hg add "f$x"
15 > done
15 > done
16 $ hg commit -m "Add root files"
16 $ hg commit -m "Add root files"
17 $ mkdir d1 d2
17 $ mkdir d1 d2
18 $ for x in `$TESTDIR/seq.py 10`
18 $ for x in `$TESTDIR/seq.py 10`
19 > do
19 > do
20 > echo d1/$x > "d1/f$x"
20 > echo d1/$x > "d1/f$x"
21 > hg add "d1/f$x"
21 > hg add "d1/f$x"
22 > echo d2/$x > "d2/f$x"
22 > echo d2/$x > "d2/f$x"
23 > hg add "d2/f$x"
23 > hg add "d2/f$x"
24 > done
24 > done
25 $ hg commit -m "Add d1 and d2"
25 $ hg commit -m "Add d1 and d2"
26 $ for x in `$TESTDIR/seq.py 10`
26 $ for x in `$TESTDIR/seq.py 10`
27 > do
27 > do
28 > echo f$x rev2 > "f$x"
28 > echo f$x rev2 > "f$x"
29 > echo d1/f$x rev2 > "d1/f$x"
29 > echo d1/f$x rev2 > "d1/f$x"
30 > echo d2/f$x rev2 > "d2/f$x"
30 > echo d2/f$x rev2 > "d2/f$x"
31 > hg commit -m "Commit rev2 of f$x, d1/f$x, d2/f$x"
31 > hg commit -m "Commit rev2 of f$x, d1/f$x, d2/f$x"
32 > done
32 > done
33 $ cd ..
33 $ cd ..
34
34
35 narrow and shallow clone the d2 directory
35 narrow and shallow clone the d2 directory
36
36
37 $ hg clone --narrow ssh://user@dummy/master shallow --include "d2" --depth 2
37 $ hg clone --narrow ssh://user@dummy/master shallow --include "d2" --depth 2
38 requesting all changes
38 requesting all changes
39 adding changesets
39 adding changesets
40 adding manifests
40 adding manifests
41 adding file changes
41 adding file changes
42 added 4 changesets with 13 changes to 10 files
42 added 4 changesets with 13 changes to 10 files
43 new changesets *:* (glob)
43 new changesets *:* (glob)
44 updating to branch default
44 updating to branch default
45 10 files updated, 0 files merged, 0 files removed, 0 files unresolved
45 10 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 $ cd shallow
46 $ cd shallow
47 $ hg log -T '{rev}{if(ellipsis,"...")}: {desc}\n'
47 $ hg log -T '{rev}{if(ellipsis,"...")}: {desc}\n'
48 3: Commit rev2 of f10, d1/f10, d2/f10
48 3: Commit rev2 of f10, d1/f10, d2/f10
49 2: Commit rev2 of f9, d1/f9, d2/f9
49 2: Commit rev2 of f9, d1/f9, d2/f9
50 1: Commit rev2 of f8, d1/f8, d2/f8
50 1: Commit rev2 of f8, d1/f8, d2/f8
51 0...: Commit rev2 of f7, d1/f7, d2/f7
51 0...: Commit rev2 of f7, d1/f7, d2/f7
52 $ hg update 0
52 $ hg update 0
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
54 $ cat d2/f7 d2/f8
54 $ cat d2/f7 d2/f8
55 d2/f7 rev2
55 d2/f7 rev2
56 d2/8
56 d2/8
57
57
58 $ cd ..
58 $ cd ..
59
59
60 change every upstream file once
60 change every upstream file once
61
61
62 $ cd master
62 $ cd master
63 $ for x in `$TESTDIR/seq.py 10`
63 $ for x in `$TESTDIR/seq.py 10`
64 > do
64 > do
65 > echo f$x rev3 > "f$x"
65 > echo f$x rev3 > "f$x"
66 > echo d1/f$x rev3 > "d1/f$x"
66 > echo d1/f$x rev3 > "d1/f$x"
67 > echo d2/f$x rev3 > "d2/f$x"
67 > echo d2/f$x rev3 > "d2/f$x"
68 > hg commit -m "Commit rev3 of f$x, d1/f$x, d2/f$x"
68 > hg commit -m "Commit rev3 of f$x, d1/f$x, d2/f$x"
69 > done
69 > done
70 $ cd ..
70 $ cd ..
71
71
72 pull new changes with --depth specified. There were 10 changes to the d2
72 pull new changes with --depth specified. There were 10 changes to the d2
73 directory but the shallow pull should only fetch 3.
73 directory but the shallow pull should only fetch 3.
74
74
75 $ cd shallow
75 $ cd shallow
76 $ hg pull --depth 2
76 $ hg pull --depth 2
77 pulling from ssh://user@dummy/master
77 pulling from ssh://user@dummy/master
78 searching for changes
78 searching for changes
79 adding changesets
79 adding changesets
80 adding manifests
80 adding manifests
81 adding file changes
81 adding file changes
82 added 4 changesets with 10 changes to 10 files
82 added 4 changesets with 10 changes to 10 files
83 new changesets *:* (glob)
83 new changesets *:* (glob)
84 (run 'hg update' to get a working copy)
84 (run 'hg update' to get a working copy)
85 $ hg log -T '{rev}{if(ellipsis,"...")}: {desc}\n'
85 $ hg log -T '{rev}{if(ellipsis,"...")}: {desc}\n'
86 7: Commit rev3 of f10, d1/f10, d2/f10
86 7: Commit rev3 of f10, d1/f10, d2/f10
87 6: Commit rev3 of f9, d1/f9, d2/f9
87 6: Commit rev3 of f9, d1/f9, d2/f9
88 5: Commit rev3 of f8, d1/f8, d2/f8
88 5: Commit rev3 of f8, d1/f8, d2/f8
89 4...: Commit rev3 of f7, d1/f7, d2/f7
89 4...: Commit rev3 of f7, d1/f7, d2/f7
90 3: Commit rev2 of f10, d1/f10, d2/f10
90 3: Commit rev2 of f10, d1/f10, d2/f10
91 2: Commit rev2 of f9, d1/f9, d2/f9
91 2: Commit rev2 of f9, d1/f9, d2/f9
92 1: Commit rev2 of f8, d1/f8, d2/f8
92 1: Commit rev2 of f8, d1/f8, d2/f8
93 0...: Commit rev2 of f7, d1/f7, d2/f7
93 0...: Commit rev2 of f7, d1/f7, d2/f7
94
94
95 $ hg update 4
95 $ hg update 4
96 10 files updated, 0 files merged, 0 files removed, 0 files unresolved
96 10 files updated, 0 files merged, 0 files removed, 0 files unresolved
97 $ cat d2/f7 d2/f8
97 $ cat d2/f7 d2/f8
98 d2/f7 rev3
98 d2/f7 rev3
99 d2/f8 rev2
99 d2/f8 rev2
100 $ hg update 7
100 $ hg update 7
101 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
101 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 $ cat d2/f10
102 $ cat d2/f10
103 d2/f10 rev3
103 d2/f10 rev3
104
104
105 $ cd ..
105 $ cd ..
106
106
107 cannot clone with zero or negative depth
107 cannot clone with zero or negative depth
108
108
109 $ hg clone --narrow ssh://user@dummy/master bad --include "d2" --depth 0
109 $ hg clone --narrow ssh://user@dummy/master bad --include "d2" --depth 0
110 requesting all changes
110 requesting all changes
111 remote: abort: depth must be positive, got 0
111 remote: abort: depth must be positive, got 0
112 abort: pull failed on remote
112 abort: pull failed on remote
113 [255]
113 [100]
114 $ hg clone --narrow ssh://user@dummy/master bad --include "d2" --depth -1
114 $ hg clone --narrow ssh://user@dummy/master bad --include "d2" --depth -1
115 requesting all changes
115 requesting all changes
116 remote: abort: depth must be positive, got -1
116 remote: abort: depth must be positive, got -1
117 abort: pull failed on remote
117 abort: pull failed on remote
118 [255]
118 [100]
@@ -1,124 +1,124 b''
1 #require no-windows
1 #require no-windows
2
2
3 $ . "$TESTDIR/remotefilelog-library.sh"
3 $ . "$TESTDIR/remotefilelog-library.sh"
4
4
5 $ hg init master
5 $ hg init master
6 $ cd master
6 $ cd master
7 $ echo treemanifest >> .hg/requires
7 $ echo treemanifest >> .hg/requires
8 $ cat >> .hg/hgrc <<EOF
8 $ cat >> .hg/hgrc <<EOF
9 > [remotefilelog]
9 > [remotefilelog]
10 > server=True
10 > server=True
11 > EOF
11 > EOF
12 # uppercase directory name to test encoding
12 # uppercase directory name to test encoding
13 $ mkdir -p A/B
13 $ mkdir -p A/B
14 $ echo x > A/B/x
14 $ echo x > A/B/x
15 $ hg commit -qAm x
15 $ hg commit -qAm x
16
16
17 $ cd ..
17 $ cd ..
18
18
19 # shallow clone from full
19 # shallow clone from full
20
20
21 $ hgcloneshallow ssh://user@dummy/master shallow --noupdate
21 $ hgcloneshallow ssh://user@dummy/master shallow --noupdate
22 streaming all changes
22 streaming all changes
23 4 files to transfer, 449 bytes of data
23 4 files to transfer, 449 bytes of data
24 transferred 449 bytes in * seconds (*/sec) (glob)
24 transferred 449 bytes in * seconds (*/sec) (glob)
25 searching for changes
25 searching for changes
26 no changes found
26 no changes found
27 $ cd shallow
27 $ cd shallow
28 $ cat .hg/requires
28 $ cat .hg/requires
29 dotencode
29 dotencode
30 exp-remotefilelog-repo-req-1
30 exp-remotefilelog-repo-req-1
31 fncache
31 fncache
32 generaldelta
32 generaldelta
33 persistent-nodemap (rust !)
33 persistent-nodemap (rust !)
34 revlog-compression-zstd (zstd !)
34 revlog-compression-zstd (zstd !)
35 revlogv1
35 revlogv1
36 sparserevlog
36 sparserevlog
37 store
37 store
38 treemanifest
38 treemanifest
39 $ find .hg/store/meta | sort
39 $ find .hg/store/meta | sort
40 .hg/store/meta
40 .hg/store/meta
41 .hg/store/meta/_a
41 .hg/store/meta/_a
42 .hg/store/meta/_a/00manifest.i
42 .hg/store/meta/_a/00manifest.i
43 .hg/store/meta/_a/_b
43 .hg/store/meta/_a/_b
44 .hg/store/meta/_a/_b/00manifest.i
44 .hg/store/meta/_a/_b/00manifest.i
45
45
46 $ hg update
46 $ hg update
47 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
47 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
48 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob)
48 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob)
49
49
50 $ cat A/B/x
50 $ cat A/B/x
51 x
51 x
52
52
53 $ ls .hg/store/data
53 $ ls .hg/store/data
54 $ echo foo > A/B/F
54 $ echo foo > A/B/F
55 $ hg add A/B/F
55 $ hg add A/B/F
56 $ hg ci -m 'local content'
56 $ hg ci -m 'local content'
57 $ ls .hg/store/data
57 $ ls .hg/store/data
58 ca31988f085bfb945cb8115b78fabdee40f741aa
58 ca31988f085bfb945cb8115b78fabdee40f741aa
59
59
60 $ cd ..
60 $ cd ..
61
61
62 # shallow clone from shallow
62 # shallow clone from shallow
63
63
64 $ hgcloneshallow ssh://user@dummy/shallow shallow2 --noupdate
64 $ hgcloneshallow ssh://user@dummy/shallow shallow2 --noupdate
65 streaming all changes
65 streaming all changes
66 5 files to transfer, 1008 bytes of data
66 5 files to transfer, 1008 bytes of data
67 transferred 1008 bytes in * seconds (*/sec) (glob)
67 transferred 1008 bytes in * seconds (*/sec) (glob)
68 searching for changes
68 searching for changes
69 no changes found
69 no changes found
70 $ cd shallow2
70 $ cd shallow2
71 $ cat .hg/requires
71 $ cat .hg/requires
72 dotencode
72 dotencode
73 exp-remotefilelog-repo-req-1
73 exp-remotefilelog-repo-req-1
74 fncache
74 fncache
75 generaldelta
75 generaldelta
76 persistent-nodemap (rust !)
76 persistent-nodemap (rust !)
77 revlog-compression-zstd (zstd !)
77 revlog-compression-zstd (zstd !)
78 revlogv1
78 revlogv1
79 sparserevlog
79 sparserevlog
80 store
80 store
81 treemanifest
81 treemanifest
82 $ ls .hg/store/data
82 $ ls .hg/store/data
83 ca31988f085bfb945cb8115b78fabdee40f741aa
83 ca31988f085bfb945cb8115b78fabdee40f741aa
84
84
85 $ hg update
85 $ hg update
86 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
86 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
87
87
88 $ cat A/B/x
88 $ cat A/B/x
89 x
89 x
90
90
91 $ cd ..
91 $ cd ..
92
92
93 # full clone from shallow
93 # full clone from shallow
94 # - send stderr to /dev/null because the order of stdout/err causes
94 # - send stderr to /dev/null because the order of stdout/err causes
95 # flakiness here
95 # flakiness here
96 $ hg clone --noupdate ssh://user@dummy/shallow full 2>/dev/null
96 $ hg clone --noupdate ssh://user@dummy/shallow full 2>/dev/null
97 streaming all changes
97 streaming all changes
98 [255]
98 [100]
99
99
100 # getbundle full clone
100 # getbundle full clone
101
101
102 $ printf '[server]\npreferuncompressed=False\n' >> master/.hg/hgrc
102 $ printf '[server]\npreferuncompressed=False\n' >> master/.hg/hgrc
103 $ hgcloneshallow ssh://user@dummy/master shallow3
103 $ hgcloneshallow ssh://user@dummy/master shallow3
104 requesting all changes
104 requesting all changes
105 adding changesets
105 adding changesets
106 adding manifests
106 adding manifests
107 adding file changes
107 adding file changes
108 added 1 changesets with 0 changes to 0 files
108 added 1 changesets with 0 changes to 0 files
109 new changesets 18d955ee7ba0
109 new changesets 18d955ee7ba0
110 updating to branch default
110 updating to branch default
111 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
112
112
113 $ ls shallow3/.hg/store/data
113 $ ls shallow3/.hg/store/data
114 $ cat shallow3/.hg/requires
114 $ cat shallow3/.hg/requires
115 dotencode
115 dotencode
116 exp-remotefilelog-repo-req-1
116 exp-remotefilelog-repo-req-1
117 fncache
117 fncache
118 generaldelta
118 generaldelta
119 persistent-nodemap (rust !)
119 persistent-nodemap (rust !)
120 revlog-compression-zstd (zstd !)
120 revlog-compression-zstd (zstd !)
121 revlogv1
121 revlogv1
122 sparserevlog
122 sparserevlog
123 store
123 store
124 treemanifest
124 treemanifest
@@ -1,121 +1,121 b''
1 #require no-windows
1 #require no-windows
2
2
3 $ . "$TESTDIR/remotefilelog-library.sh"
3 $ . "$TESTDIR/remotefilelog-library.sh"
4
4
5 $ hg init master
5 $ hg init master
6 $ cd master
6 $ cd master
7 $ cat >> .hg/hgrc <<EOF
7 $ cat >> .hg/hgrc <<EOF
8 > [remotefilelog]
8 > [remotefilelog]
9 > server=True
9 > server=True
10 > EOF
10 > EOF
11 $ echo x > x
11 $ echo x > x
12 $ hg commit -qAm x
12 $ hg commit -qAm x
13
13
14 $ cd ..
14 $ cd ..
15
15
16 # shallow clone from full
16 # shallow clone from full
17
17
18 $ hgcloneshallow ssh://user@dummy/master shallow --noupdate
18 $ hgcloneshallow ssh://user@dummy/master shallow --noupdate
19 streaming all changes
19 streaming all changes
20 2 files to transfer, 227 bytes of data
20 2 files to transfer, 227 bytes of data
21 transferred 227 bytes in * seconds (*/sec) (glob)
21 transferred 227 bytes in * seconds (*/sec) (glob)
22 searching for changes
22 searching for changes
23 no changes found
23 no changes found
24 $ cd shallow
24 $ cd shallow
25 $ cat .hg/requires
25 $ cat .hg/requires
26 dotencode
26 dotencode
27 exp-remotefilelog-repo-req-1
27 exp-remotefilelog-repo-req-1
28 fncache
28 fncache
29 generaldelta
29 generaldelta
30 persistent-nodemap (rust !)
30 persistent-nodemap (rust !)
31 revlog-compression-zstd (zstd !)
31 revlog-compression-zstd (zstd !)
32 revlogv1
32 revlogv1
33 sparserevlog
33 sparserevlog
34 store
34 store
35
35
36 $ hg update
36 $ hg update
37 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
37 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
38 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob)
38 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob)
39
39
40 $ cat x
40 $ cat x
41 x
41 x
42
42
43 $ ls .hg/store/data
43 $ ls .hg/store/data
44 $ echo foo > f
44 $ echo foo > f
45 $ hg add f
45 $ hg add f
46 $ hg ci -m 'local content'
46 $ hg ci -m 'local content'
47 $ ls .hg/store/data
47 $ ls .hg/store/data
48 4a0a19218e082a343a1b17e5333409af9d98f0f5
48 4a0a19218e082a343a1b17e5333409af9d98f0f5
49
49
50 $ cd ..
50 $ cd ..
51
51
52 # shallow clone from shallow
52 # shallow clone from shallow
53
53
54 $ hgcloneshallow ssh://user@dummy/shallow shallow2 --noupdate
54 $ hgcloneshallow ssh://user@dummy/shallow shallow2 --noupdate
55 streaming all changes
55 streaming all changes
56 3 files to transfer, 564 bytes of data
56 3 files to transfer, 564 bytes of data
57 transferred 564 bytes in * seconds (*/sec) (glob)
57 transferred 564 bytes in * seconds (*/sec) (glob)
58 searching for changes
58 searching for changes
59 no changes found
59 no changes found
60 $ cd shallow2
60 $ cd shallow2
61 $ cat .hg/requires
61 $ cat .hg/requires
62 dotencode
62 dotencode
63 exp-remotefilelog-repo-req-1
63 exp-remotefilelog-repo-req-1
64 fncache
64 fncache
65 generaldelta
65 generaldelta
66 persistent-nodemap (rust !)
66 persistent-nodemap (rust !)
67 revlog-compression-zstd (zstd !)
67 revlog-compression-zstd (zstd !)
68 revlogv1
68 revlogv1
69 sparserevlog
69 sparserevlog
70 store
70 store
71 $ ls .hg/store/data
71 $ ls .hg/store/data
72 4a0a19218e082a343a1b17e5333409af9d98f0f5
72 4a0a19218e082a343a1b17e5333409af9d98f0f5
73
73
74 $ hg update
74 $ hg update
75 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
75 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
76
76
77 $ cat x
77 $ cat x
78 x
78 x
79
79
80 $ cd ..
80 $ cd ..
81
81
82 # full clone from shallow
82 # full clone from shallow
83
83
84 Note: the output to STDERR comes from a different process to the output on
84 Note: the output to STDERR comes from a different process to the output on
85 STDOUT and their relative ordering is not deterministic. As a result, the test
85 STDOUT and their relative ordering is not deterministic. As a result, the test
86 was failing sporadically. To avoid this, we capture STDERR to a file and
86 was failing sporadically. To avoid this, we capture STDERR to a file and
87 check its contents separately.
87 check its contents separately.
88
88
89 $ TEMP_STDERR=full-clone-from-shallow.stderr.tmp
89 $ TEMP_STDERR=full-clone-from-shallow.stderr.tmp
90 $ hg clone --noupdate ssh://user@dummy/shallow full 2>$TEMP_STDERR
90 $ hg clone --noupdate ssh://user@dummy/shallow full 2>$TEMP_STDERR
91 streaming all changes
91 streaming all changes
92 [255]
92 [100]
93 $ cat $TEMP_STDERR
93 $ cat $TEMP_STDERR
94 remote: abort: Cannot clone from a shallow repo to a full repo.
94 remote: abort: Cannot clone from a shallow repo to a full repo.
95 abort: pull failed on remote
95 abort: pull failed on remote
96 $ rm $TEMP_STDERR
96 $ rm $TEMP_STDERR
97
97
98 # getbundle full clone
98 # getbundle full clone
99
99
100 $ printf '[server]\npreferuncompressed=False\n' >> master/.hg/hgrc
100 $ printf '[server]\npreferuncompressed=False\n' >> master/.hg/hgrc
101 $ hgcloneshallow ssh://user@dummy/master shallow3
101 $ hgcloneshallow ssh://user@dummy/master shallow3
102 requesting all changes
102 requesting all changes
103 adding changesets
103 adding changesets
104 adding manifests
104 adding manifests
105 adding file changes
105 adding file changes
106 added 1 changesets with 0 changes to 0 files
106 added 1 changesets with 0 changes to 0 files
107 new changesets b292c1e3311f
107 new changesets b292c1e3311f
108 updating to branch default
108 updating to branch default
109 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
109 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
110
110
111 $ ls shallow3/.hg/store/data
111 $ ls shallow3/.hg/store/data
112 $ cat shallow3/.hg/requires
112 $ cat shallow3/.hg/requires
113 dotencode
113 dotencode
114 exp-remotefilelog-repo-req-1
114 exp-remotefilelog-repo-req-1
115 fncache
115 fncache
116 generaldelta
116 generaldelta
117 persistent-nodemap (rust !)
117 persistent-nodemap (rust !)
118 revlog-compression-zstd (zstd !)
118 revlog-compression-zstd (zstd !)
119 revlogv1
119 revlogv1
120 sparserevlog
120 sparserevlog
121 store
121 store
@@ -1,733 +1,733 b''
1 #testcases sshv1 sshv2
1 #testcases sshv1 sshv2
2
2
3 #if sshv2
3 #if sshv2
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [experimental]
5 > [experimental]
6 > sshpeer.advertise-v2 = true
6 > sshpeer.advertise-v2 = true
7 > sshserver.support-v2 = true
7 > sshserver.support-v2 = true
8 > EOF
8 > EOF
9 #endif
9 #endif
10
10
11 This test tries to exercise the ssh functionality with a dummy script
11 This test tries to exercise the ssh functionality with a dummy script
12
12
13 creating 'remote' repo
13 creating 'remote' repo
14
14
15 $ hg init remote
15 $ hg init remote
16 $ cd remote
16 $ cd remote
17 $ echo this > foo
17 $ echo this > foo
18 $ echo this > fooO
18 $ echo this > fooO
19 $ hg ci -A -m "init" foo fooO
19 $ hg ci -A -m "init" foo fooO
20
20
21 insert a closed branch (issue4428)
21 insert a closed branch (issue4428)
22
22
23 $ hg up null
23 $ hg up null
24 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
24 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
25 $ hg branch closed
25 $ hg branch closed
26 marked working directory as branch closed
26 marked working directory as branch closed
27 (branches are permanent and global, did you want a bookmark?)
27 (branches are permanent and global, did you want a bookmark?)
28 $ hg ci -mc0
28 $ hg ci -mc0
29 $ hg ci --close-branch -mc1
29 $ hg ci --close-branch -mc1
30 $ hg up -q default
30 $ hg up -q default
31
31
32 configure for serving
32 configure for serving
33
33
34 $ cat <<EOF > .hg/hgrc
34 $ cat <<EOF > .hg/hgrc
35 > [server]
35 > [server]
36 > uncompressed = True
36 > uncompressed = True
37 >
37 >
38 > [hooks]
38 > [hooks]
39 > changegroup = sh -c "printenv.py --line changegroup-in-remote 0 ../dummylog"
39 > changegroup = sh -c "printenv.py --line changegroup-in-remote 0 ../dummylog"
40 > EOF
40 > EOF
41 $ cd $TESTTMP
41 $ cd $TESTTMP
42
42
43 repo not found error
43 repo not found error
44
44
45 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
45 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
46 remote: abort: repository nonexistent not found
46 remote: abort: repository nonexistent not found
47 abort: no suitable response from remote hg
47 abort: no suitable response from remote hg
48 [255]
48 [255]
49 $ hg clone -q -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
49 $ hg clone -q -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
50 remote: abort: repository nonexistent not found
50 remote: abort: repository nonexistent not found
51 abort: no suitable response from remote hg
51 abort: no suitable response from remote hg
52 [255]
52 [255]
53
53
54 non-existent absolute path
54 non-existent absolute path
55
55
56 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
56 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
57 remote: abort: repository $TESTTMP/nonexistent not found
57 remote: abort: repository $TESTTMP/nonexistent not found
58 abort: no suitable response from remote hg
58 abort: no suitable response from remote hg
59 [255]
59 [255]
60
60
61 clone remote via stream
61 clone remote via stream
62
62
63 #if no-reposimplestore
63 #if no-reposimplestore
64
64
65 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/remote local-stream
65 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/remote local-stream
66 streaming all changes
66 streaming all changes
67 8 files to transfer, 827 bytes of data (no-zstd !)
67 8 files to transfer, 827 bytes of data (no-zstd !)
68 transferred 827 bytes in * seconds (*) (glob) (no-zstd !)
68 transferred 827 bytes in * seconds (*) (glob) (no-zstd !)
69 8 files to transfer, 846 bytes of data (zstd !)
69 8 files to transfer, 846 bytes of data (zstd !)
70 transferred * bytes in * seconds (* */sec) (glob) (zstd !)
70 transferred * bytes in * seconds (* */sec) (glob) (zstd !)
71 updating to branch default
71 updating to branch default
72 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
72 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
73 $ cd local-stream
73 $ cd local-stream
74 $ hg verify
74 $ hg verify
75 checking changesets
75 checking changesets
76 checking manifests
76 checking manifests
77 crosschecking files in changesets and manifests
77 crosschecking files in changesets and manifests
78 checking files
78 checking files
79 checked 3 changesets with 2 changes to 2 files
79 checked 3 changesets with 2 changes to 2 files
80 $ hg branches
80 $ hg branches
81 default 0:1160648e36ce
81 default 0:1160648e36ce
82 $ cd $TESTTMP
82 $ cd $TESTTMP
83
83
84 clone bookmarks via stream
84 clone bookmarks via stream
85
85
86 $ hg -R local-stream book mybook
86 $ hg -R local-stream book mybook
87 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/local-stream stream2
87 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/local-stream stream2
88 streaming all changes
88 streaming all changes
89 16 files to transfer, * of data (glob)
89 16 files to transfer, * of data (glob)
90 transferred * in * seconds (*) (glob)
90 transferred * in * seconds (*) (glob)
91 updating to branch default
91 updating to branch default
92 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
92 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
93 $ cd stream2
93 $ cd stream2
94 $ hg book
94 $ hg book
95 mybook 0:1160648e36ce
95 mybook 0:1160648e36ce
96 $ cd $TESTTMP
96 $ cd $TESTTMP
97 $ rm -rf local-stream stream2
97 $ rm -rf local-stream stream2
98
98
99 #endif
99 #endif
100
100
101 clone remote via pull
101 clone remote via pull
102
102
103 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
103 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
104 requesting all changes
104 requesting all changes
105 adding changesets
105 adding changesets
106 adding manifests
106 adding manifests
107 adding file changes
107 adding file changes
108 added 3 changesets with 2 changes to 2 files
108 added 3 changesets with 2 changes to 2 files
109 new changesets 1160648e36ce:ad076bfb429d
109 new changesets 1160648e36ce:ad076bfb429d
110 updating to branch default
110 updating to branch default
111 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
112
112
113 verify
113 verify
114
114
115 $ cd local
115 $ cd local
116 $ hg verify
116 $ hg verify
117 checking changesets
117 checking changesets
118 checking manifests
118 checking manifests
119 crosschecking files in changesets and manifests
119 crosschecking files in changesets and manifests
120 checking files
120 checking files
121 checked 3 changesets with 2 changes to 2 files
121 checked 3 changesets with 2 changes to 2 files
122 $ cat >> .hg/hgrc <<EOF
122 $ cat >> .hg/hgrc <<EOF
123 > [hooks]
123 > [hooks]
124 > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog"
124 > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog"
125 > EOF
125 > EOF
126
126
127 empty default pull
127 empty default pull
128
128
129 $ hg paths
129 $ hg paths
130 default = ssh://user@dummy/remote
130 default = ssh://user@dummy/remote
131 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
131 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
132 pulling from ssh://user@dummy/remote
132 pulling from ssh://user@dummy/remote
133 searching for changes
133 searching for changes
134 no changes found
134 no changes found
135
135
136 pull from wrong ssh URL
136 pull from wrong ssh URL
137
137
138 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
138 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
139 pulling from ssh://user@dummy/doesnotexist
139 pulling from ssh://user@dummy/doesnotexist
140 remote: abort: repository doesnotexist not found
140 remote: abort: repository doesnotexist not found
141 abort: no suitable response from remote hg
141 abort: no suitable response from remote hg
142 [255]
142 [255]
143
143
144 local change
144 local change
145
145
146 $ echo bleah > foo
146 $ echo bleah > foo
147 $ hg ci -m "add"
147 $ hg ci -m "add"
148
148
149 updating rc
149 updating rc
150
150
151 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
151 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
152 $ echo "[ui]" >> .hg/hgrc
152 $ echo "[ui]" >> .hg/hgrc
153 $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
153 $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
154
154
155 find outgoing
155 find outgoing
156
156
157 $ hg out ssh://user@dummy/remote
157 $ hg out ssh://user@dummy/remote
158 comparing with ssh://user@dummy/remote
158 comparing with ssh://user@dummy/remote
159 searching for changes
159 searching for changes
160 changeset: 3:a28a9d1a809c
160 changeset: 3:a28a9d1a809c
161 tag: tip
161 tag: tip
162 parent: 0:1160648e36ce
162 parent: 0:1160648e36ce
163 user: test
163 user: test
164 date: Thu Jan 01 00:00:00 1970 +0000
164 date: Thu Jan 01 00:00:00 1970 +0000
165 summary: add
165 summary: add
166
166
167
167
168 find incoming on the remote side
168 find incoming on the remote side
169
169
170 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
170 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
171 comparing with ssh://user@dummy/local
171 comparing with ssh://user@dummy/local
172 searching for changes
172 searching for changes
173 changeset: 3:a28a9d1a809c
173 changeset: 3:a28a9d1a809c
174 tag: tip
174 tag: tip
175 parent: 0:1160648e36ce
175 parent: 0:1160648e36ce
176 user: test
176 user: test
177 date: Thu Jan 01 00:00:00 1970 +0000
177 date: Thu Jan 01 00:00:00 1970 +0000
178 summary: add
178 summary: add
179
179
180
180
181 find incoming on the remote side (using absolute path)
181 find incoming on the remote side (using absolute path)
182
182
183 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
183 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
184 comparing with ssh://user@dummy/$TESTTMP/local
184 comparing with ssh://user@dummy/$TESTTMP/local
185 searching for changes
185 searching for changes
186 changeset: 3:a28a9d1a809c
186 changeset: 3:a28a9d1a809c
187 tag: tip
187 tag: tip
188 parent: 0:1160648e36ce
188 parent: 0:1160648e36ce
189 user: test
189 user: test
190 date: Thu Jan 01 00:00:00 1970 +0000
190 date: Thu Jan 01 00:00:00 1970 +0000
191 summary: add
191 summary: add
192
192
193
193
194 push
194 push
195
195
196 $ hg push
196 $ hg push
197 pushing to ssh://user@dummy/remote
197 pushing to ssh://user@dummy/remote
198 searching for changes
198 searching for changes
199 remote: adding changesets
199 remote: adding changesets
200 remote: adding manifests
200 remote: adding manifests
201 remote: adding file changes
201 remote: adding file changes
202 remote: added 1 changesets with 1 changes to 1 files
202 remote: added 1 changesets with 1 changes to 1 files
203 $ cd $TESTTMP/remote
203 $ cd $TESTTMP/remote
204
204
205 check remote tip
205 check remote tip
206
206
207 $ hg tip
207 $ hg tip
208 changeset: 3:a28a9d1a809c
208 changeset: 3:a28a9d1a809c
209 tag: tip
209 tag: tip
210 parent: 0:1160648e36ce
210 parent: 0:1160648e36ce
211 user: test
211 user: test
212 date: Thu Jan 01 00:00:00 1970 +0000
212 date: Thu Jan 01 00:00:00 1970 +0000
213 summary: add
213 summary: add
214
214
215 $ hg verify
215 $ hg verify
216 checking changesets
216 checking changesets
217 checking manifests
217 checking manifests
218 crosschecking files in changesets and manifests
218 crosschecking files in changesets and manifests
219 checking files
219 checking files
220 checked 4 changesets with 3 changes to 2 files
220 checked 4 changesets with 3 changes to 2 files
221 $ hg cat -r tip foo
221 $ hg cat -r tip foo
222 bleah
222 bleah
223 $ echo z > z
223 $ echo z > z
224 $ hg ci -A -m z z
224 $ hg ci -A -m z z
225 created new head
225 created new head
226
226
227 test pushkeys and bookmarks
227 test pushkeys and bookmarks
228
228
229 $ cd $TESTTMP/local
229 $ cd $TESTTMP/local
230 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
230 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
231 bookmarks
231 bookmarks
232 namespaces
232 namespaces
233 phases
233 phases
234 $ hg book foo -r 0
234 $ hg book foo -r 0
235 $ hg out -B --config paths.default=bogus://invalid --config paths.default:pushurl=`hg paths default`
235 $ hg out -B --config paths.default=bogus://invalid --config paths.default:pushurl=`hg paths default`
236 comparing with ssh://user@dummy/remote
236 comparing with ssh://user@dummy/remote
237 searching for changed bookmarks
237 searching for changed bookmarks
238 foo 1160648e36ce
238 foo 1160648e36ce
239 $ hg push -B foo
239 $ hg push -B foo
240 pushing to ssh://user@dummy/remote
240 pushing to ssh://user@dummy/remote
241 searching for changes
241 searching for changes
242 no changes found
242 no changes found
243 exporting bookmark foo
243 exporting bookmark foo
244 [1]
244 [1]
245 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
245 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
246 foo 1160648e36cec0054048a7edc4110c6f84fde594
246 foo 1160648e36cec0054048a7edc4110c6f84fde594
247 $ hg book -f foo
247 $ hg book -f foo
248 $ hg push --traceback
248 $ hg push --traceback
249 pushing to ssh://user@dummy/remote
249 pushing to ssh://user@dummy/remote
250 searching for changes
250 searching for changes
251 no changes found
251 no changes found
252 updating bookmark foo
252 updating bookmark foo
253 [1]
253 [1]
254 $ hg book -d foo
254 $ hg book -d foo
255 $ hg in -B
255 $ hg in -B
256 comparing with ssh://user@dummy/remote
256 comparing with ssh://user@dummy/remote
257 searching for changed bookmarks
257 searching for changed bookmarks
258 foo a28a9d1a809c
258 foo a28a9d1a809c
259 $ hg book -f -r 0 foo
259 $ hg book -f -r 0 foo
260 $ hg pull -B foo
260 $ hg pull -B foo
261 pulling from ssh://user@dummy/remote
261 pulling from ssh://user@dummy/remote
262 no changes found
262 no changes found
263 updating bookmark foo
263 updating bookmark foo
264 $ hg book -d foo
264 $ hg book -d foo
265 $ hg push -B foo
265 $ hg push -B foo
266 pushing to ssh://user@dummy/remote
266 pushing to ssh://user@dummy/remote
267 searching for changes
267 searching for changes
268 no changes found
268 no changes found
269 deleting remote bookmark foo
269 deleting remote bookmark foo
270 [1]
270 [1]
271
271
272 a bad, evil hook that prints to stdout
272 a bad, evil hook that prints to stdout
273
273
274 $ cat <<EOF > $TESTTMP/badhook
274 $ cat <<EOF > $TESTTMP/badhook
275 > import sys
275 > import sys
276 > sys.stdout.write("KABOOM\n")
276 > sys.stdout.write("KABOOM\n")
277 > sys.stdout.flush()
277 > sys.stdout.flush()
278 > EOF
278 > EOF
279
279
280 $ cat <<EOF > $TESTTMP/badpyhook.py
280 $ cat <<EOF > $TESTTMP/badpyhook.py
281 > import sys
281 > import sys
282 > def hook(ui, repo, hooktype, **kwargs):
282 > def hook(ui, repo, hooktype, **kwargs):
283 > sys.stdout.write("KABOOM IN PROCESS\n")
283 > sys.stdout.write("KABOOM IN PROCESS\n")
284 > sys.stdout.flush()
284 > sys.stdout.flush()
285 > EOF
285 > EOF
286
286
287 $ cat <<EOF >> ../remote/.hg/hgrc
287 $ cat <<EOF >> ../remote/.hg/hgrc
288 > [hooks]
288 > [hooks]
289 > changegroup.stdout = "$PYTHON" $TESTTMP/badhook
289 > changegroup.stdout = "$PYTHON" $TESTTMP/badhook
290 > changegroup.pystdout = python:$TESTTMP/badpyhook.py:hook
290 > changegroup.pystdout = python:$TESTTMP/badpyhook.py:hook
291 > EOF
291 > EOF
292 $ echo r > r
292 $ echo r > r
293 $ hg ci -A -m z r
293 $ hg ci -A -m z r
294
294
295 push should succeed even though it has an unexpected response
295 push should succeed even though it has an unexpected response
296
296
297 $ hg push
297 $ hg push
298 pushing to ssh://user@dummy/remote
298 pushing to ssh://user@dummy/remote
299 searching for changes
299 searching for changes
300 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
300 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
301 remote: adding changesets
301 remote: adding changesets
302 remote: adding manifests
302 remote: adding manifests
303 remote: adding file changes
303 remote: adding file changes
304 remote: added 1 changesets with 1 changes to 1 files
304 remote: added 1 changesets with 1 changes to 1 files
305 remote: KABOOM
305 remote: KABOOM
306 remote: KABOOM IN PROCESS
306 remote: KABOOM IN PROCESS
307 $ hg -R ../remote heads
307 $ hg -R ../remote heads
308 changeset: 5:1383141674ec
308 changeset: 5:1383141674ec
309 tag: tip
309 tag: tip
310 parent: 3:a28a9d1a809c
310 parent: 3:a28a9d1a809c
311 user: test
311 user: test
312 date: Thu Jan 01 00:00:00 1970 +0000
312 date: Thu Jan 01 00:00:00 1970 +0000
313 summary: z
313 summary: z
314
314
315 changeset: 4:6c0482d977a3
315 changeset: 4:6c0482d977a3
316 parent: 0:1160648e36ce
316 parent: 0:1160648e36ce
317 user: test
317 user: test
318 date: Thu Jan 01 00:00:00 1970 +0000
318 date: Thu Jan 01 00:00:00 1970 +0000
319 summary: z
319 summary: z
320
320
321
321
322 #if chg
322 #if chg
323
323
324 try again with remote chg, which should succeed as well
324 try again with remote chg, which should succeed as well
325
325
326 $ hg rollback -R ../remote
326 $ hg rollback -R ../remote
327 repository tip rolled back to revision 4 (undo serve)
327 repository tip rolled back to revision 4 (undo serve)
328
328
329 $ hg push --config ui.remotecmd=chg
329 $ hg push --config ui.remotecmd=chg
330 pushing to ssh://user@dummy/remote
330 pushing to ssh://user@dummy/remote
331 searching for changes
331 searching for changes
332 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
332 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
333 remote: adding changesets
333 remote: adding changesets
334 remote: adding manifests
334 remote: adding manifests
335 remote: adding file changes
335 remote: adding file changes
336 remote: added 1 changesets with 1 changes to 1 files (py3 !)
336 remote: added 1 changesets with 1 changes to 1 files (py3 !)
337 remote: KABOOM
337 remote: KABOOM
338 remote: KABOOM IN PROCESS
338 remote: KABOOM IN PROCESS
339 remote: added 1 changesets with 1 changes to 1 files (no-py3 !)
339 remote: added 1 changesets with 1 changes to 1 files (no-py3 !)
340
340
341 #endif
341 #endif
342
342
343 clone bookmarks
343 clone bookmarks
344
344
345 $ hg -R ../remote bookmark test
345 $ hg -R ../remote bookmark test
346 $ hg -R ../remote bookmarks
346 $ hg -R ../remote bookmarks
347 * test 4:6c0482d977a3
347 * test 4:6c0482d977a3
348 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
348 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
349 requesting all changes
349 requesting all changes
350 adding changesets
350 adding changesets
351 adding manifests
351 adding manifests
352 adding file changes
352 adding file changes
353 added 6 changesets with 5 changes to 4 files (+1 heads)
353 added 6 changesets with 5 changes to 4 files (+1 heads)
354 new changesets 1160648e36ce:1383141674ec
354 new changesets 1160648e36ce:1383141674ec
355 updating to branch default
355 updating to branch default
356 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
356 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
357 $ hg -R local-bookmarks bookmarks
357 $ hg -R local-bookmarks bookmarks
358 test 4:6c0482d977a3
358 test 4:6c0482d977a3
359
359
360 passwords in ssh urls are not supported
360 passwords in ssh urls are not supported
361 (we use a glob here because different Python versions give different
361 (we use a glob here because different Python versions give different
362 results here)
362 results here)
363
363
364 $ hg push ssh://user:erroneouspwd@dummy/remote
364 $ hg push ssh://user:erroneouspwd@dummy/remote
365 pushing to ssh://user:*@dummy/remote (glob)
365 pushing to ssh://user:*@dummy/remote (glob)
366 abort: password in URL not supported
366 abort: password in URL not supported
367 [255]
367 [255]
368
368
369 $ cd $TESTTMP
369 $ cd $TESTTMP
370
370
371 hide outer repo
371 hide outer repo
372 $ hg init
372 $ hg init
373
373
374 Test remote paths with spaces (issue2983):
374 Test remote paths with spaces (issue2983):
375
375
376 $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
376 $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
377 $ touch "$TESTTMP/a repo/test"
377 $ touch "$TESTTMP/a repo/test"
378 $ hg -R 'a repo' commit -A -m "test"
378 $ hg -R 'a repo' commit -A -m "test"
379 adding test
379 adding test
380 $ hg -R 'a repo' tag tag
380 $ hg -R 'a repo' tag tag
381 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
381 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
382 73649e48688a
382 73649e48688a
383
383
384 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
384 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
385 abort: unknown revision 'noNoNO'
385 abort: unknown revision 'noNoNO'
386 [255]
386 [255]
387
387
388 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
388 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
389
389
390 $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
390 $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
391 destination directory: a repo
391 destination directory: a repo
392 abort: destination 'a repo' is not empty
392 abort: destination 'a repo' is not empty
393 [10]
393 [10]
394
394
395 #if no-rhg
395 #if no-rhg
396 Make sure hg is really paranoid in serve --stdio mode. It used to be
396 Make sure hg is really paranoid in serve --stdio mode. It used to be
397 possible to get a debugger REPL by specifying a repo named --debugger.
397 possible to get a debugger REPL by specifying a repo named --debugger.
398 $ hg -R --debugger serve --stdio
398 $ hg -R --debugger serve --stdio
399 abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio']
399 abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio']
400 [255]
400 [255]
401 $ hg -R --config=ui.debugger=yes serve --stdio
401 $ hg -R --config=ui.debugger=yes serve --stdio
402 abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio']
402 abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio']
403 [255]
403 [255]
404 Abbreviations of 'serve' also don't work, to avoid shenanigans.
404 Abbreviations of 'serve' also don't work, to avoid shenanigans.
405 $ hg -R narf serv --stdio
405 $ hg -R narf serv --stdio
406 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio']
406 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio']
407 [255]
407 [255]
408 #else
408 #else
409 rhg aborts early on -R without a repository at that path
409 rhg aborts early on -R without a repository at that path
410 $ hg -R --debugger serve --stdio
410 $ hg -R --debugger serve --stdio
411 abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio'] (missing-correct-output !)
411 abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio'] (missing-correct-output !)
412 abort: repository --debugger not found (known-bad-output !)
412 abort: repository --debugger not found (known-bad-output !)
413 [255]
413 [255]
414 $ hg -R --config=ui.debugger=yes serve --stdio
414 $ hg -R --config=ui.debugger=yes serve --stdio
415 abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio'] (missing-correct-output !)
415 abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio'] (missing-correct-output !)
416 abort: repository --config=ui.debugger=yes not found (known-bad-output !)
416 abort: repository --config=ui.debugger=yes not found (known-bad-output !)
417 [255]
417 [255]
418 $ hg -R narf serv --stdio
418 $ hg -R narf serv --stdio
419 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] (missing-correct-output !)
419 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] (missing-correct-output !)
420 abort: repository narf not found (known-bad-output !)
420 abort: repository narf not found (known-bad-output !)
421 [255]
421 [255]
422 If the repo does exist, rhg finds an unsupported command and falls back to Python
422 If the repo does exist, rhg finds an unsupported command and falls back to Python
423 which still does the right thing
423 which still does the right thing
424 $ hg init narf
424 $ hg init narf
425 $ hg -R narf serv --stdio
425 $ hg -R narf serv --stdio
426 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio']
426 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio']
427 [255]
427 [255]
428 #endif
428 #endif
429
429
430 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
430 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
431 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
431 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
432 parameters:
432 parameters:
433
433
434 $ cat > ssh.sh << EOF
434 $ cat > ssh.sh << EOF
435 > userhost="\$1"
435 > userhost="\$1"
436 > SSH_ORIGINAL_COMMAND="\$2"
436 > SSH_ORIGINAL_COMMAND="\$2"
437 > export SSH_ORIGINAL_COMMAND
437 > export SSH_ORIGINAL_COMMAND
438 > PYTHONPATH="$PYTHONPATH"
438 > PYTHONPATH="$PYTHONPATH"
439 > export PYTHONPATH
439 > export PYTHONPATH
440 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
440 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
441 > EOF
441 > EOF
442
442
443 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
443 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
444 73649e48688a
444 73649e48688a
445
445
446 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
446 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
447 remote: Illegal repository "$TESTTMP/a'repo"
447 remote: Illegal repository "$TESTTMP/a'repo"
448 abort: no suitable response from remote hg
448 abort: no suitable response from remote hg
449 [255]
449 [255]
450
450
451 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
451 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
452 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
452 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
453 abort: no suitable response from remote hg
453 abort: no suitable response from remote hg
454 [255]
454 [255]
455
455
456 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" "$PYTHON" "$TESTDIR/../contrib/hg-ssh"
456 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" "$PYTHON" "$TESTDIR/../contrib/hg-ssh"
457 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
457 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
458 [255]
458 [255]
459
459
460 Test hg-ssh in read-only mode:
460 Test hg-ssh in read-only mode:
461
461
462 $ cat > ssh.sh << EOF
462 $ cat > ssh.sh << EOF
463 > userhost="\$1"
463 > userhost="\$1"
464 > SSH_ORIGINAL_COMMAND="\$2"
464 > SSH_ORIGINAL_COMMAND="\$2"
465 > export SSH_ORIGINAL_COMMAND
465 > export SSH_ORIGINAL_COMMAND
466 > PYTHONPATH="$PYTHONPATH"
466 > PYTHONPATH="$PYTHONPATH"
467 > export PYTHONPATH
467 > export PYTHONPATH
468 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
468 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
469 > EOF
469 > EOF
470
470
471 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
471 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
472 requesting all changes
472 requesting all changes
473 adding changesets
473 adding changesets
474 adding manifests
474 adding manifests
475 adding file changes
475 adding file changes
476 added 6 changesets with 5 changes to 4 files (+1 heads)
476 added 6 changesets with 5 changes to 4 files (+1 heads)
477 new changesets 1160648e36ce:1383141674ec
477 new changesets 1160648e36ce:1383141674ec
478 updating to branch default
478 updating to branch default
479 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
479 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
480
480
481 $ cd read-only-local
481 $ cd read-only-local
482 $ echo "baz" > bar
482 $ echo "baz" > bar
483 $ hg ci -A -m "unpushable commit" bar
483 $ hg ci -A -m "unpushable commit" bar
484 $ hg push --ssh "sh ../ssh.sh"
484 $ hg push --ssh "sh ../ssh.sh"
485 pushing to ssh://user@dummy/*/remote (glob)
485 pushing to ssh://user@dummy/*/remote (glob)
486 searching for changes
486 searching for changes
487 remote: Permission denied
487 remote: Permission denied
488 remote: pretxnopen.hg-ssh hook failed
488 remote: pretxnopen.hg-ssh hook failed
489 abort: push failed on remote
489 abort: push failed on remote
490 [255]
490 [100]
491
491
492 $ cd $TESTTMP
492 $ cd $TESTTMP
493
493
494 stderr from remote commands should be printed before stdout from local code (issue4336)
494 stderr from remote commands should be printed before stdout from local code (issue4336)
495
495
496 $ hg clone remote stderr-ordering
496 $ hg clone remote stderr-ordering
497 updating to branch default
497 updating to branch default
498 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
498 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
499 $ cd stderr-ordering
499 $ cd stderr-ordering
500 $ cat >> localwrite.py << EOF
500 $ cat >> localwrite.py << EOF
501 > from mercurial import exchange, extensions
501 > from mercurial import exchange, extensions
502 >
502 >
503 > def wrappedpush(orig, repo, *args, **kwargs):
503 > def wrappedpush(orig, repo, *args, **kwargs):
504 > res = orig(repo, *args, **kwargs)
504 > res = orig(repo, *args, **kwargs)
505 > repo.ui.write(b'local stdout\n')
505 > repo.ui.write(b'local stdout\n')
506 > repo.ui.flush()
506 > repo.ui.flush()
507 > return res
507 > return res
508 >
508 >
509 > def extsetup(ui):
509 > def extsetup(ui):
510 > extensions.wrapfunction(exchange, b'push', wrappedpush)
510 > extensions.wrapfunction(exchange, b'push', wrappedpush)
511 > EOF
511 > EOF
512
512
513 $ cat >> .hg/hgrc << EOF
513 $ cat >> .hg/hgrc << EOF
514 > [paths]
514 > [paths]
515 > default-push = ssh://user@dummy/remote
515 > default-push = ssh://user@dummy/remote
516 > [ui]
516 > [ui]
517 > ssh = "$PYTHON" "$TESTDIR/dummyssh"
517 > ssh = "$PYTHON" "$TESTDIR/dummyssh"
518 > [extensions]
518 > [extensions]
519 > localwrite = localwrite.py
519 > localwrite = localwrite.py
520 > EOF
520 > EOF
521
521
522 $ echo localwrite > foo
522 $ echo localwrite > foo
523 $ hg commit -m 'testing localwrite'
523 $ hg commit -m 'testing localwrite'
524 $ hg push
524 $ hg push
525 pushing to ssh://user@dummy/remote
525 pushing to ssh://user@dummy/remote
526 searching for changes
526 searching for changes
527 remote: adding changesets
527 remote: adding changesets
528 remote: adding manifests
528 remote: adding manifests
529 remote: adding file changes
529 remote: adding file changes
530 remote: added 1 changesets with 1 changes to 1 files
530 remote: added 1 changesets with 1 changes to 1 files
531 remote: KABOOM
531 remote: KABOOM
532 remote: KABOOM IN PROCESS
532 remote: KABOOM IN PROCESS
533 local stdout
533 local stdout
534
534
535 debug output
535 debug output
536
536
537 $ hg pull --debug ssh://user@dummy/remote --config devel.debug.peer-request=yes
537 $ hg pull --debug ssh://user@dummy/remote --config devel.debug.peer-request=yes
538 pulling from ssh://user@dummy/remote
538 pulling from ssh://user@dummy/remote
539 running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
539 running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
540 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
540 sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
541 devel-peer-request: hello+between
541 devel-peer-request: hello+between
542 devel-peer-request: pairs: 81 bytes
542 devel-peer-request: pairs: 81 bytes
543 sending hello command
543 sending hello command
544 sending between command
544 sending between command
545 remote: 444 (sshv1 no-rust !)
545 remote: 444 (sshv1 no-rust !)
546 remote: 463 (sshv1 rust !)
546 remote: 463 (sshv1 rust !)
547 protocol upgraded to exp-ssh-v2-0003 (sshv2 !)
547 protocol upgraded to exp-ssh-v2-0003 (sshv2 !)
548 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !)
548 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !)
549 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !)
549 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !)
550 remote: 1 (sshv1 !)
550 remote: 1 (sshv1 !)
551 devel-peer-request: protocaps
551 devel-peer-request: protocaps
552 devel-peer-request: caps: * bytes (glob)
552 devel-peer-request: caps: * bytes (glob)
553 sending protocaps command
553 sending protocaps command
554 query 1; heads
554 query 1; heads
555 devel-peer-request: batched-content
555 devel-peer-request: batched-content
556 devel-peer-request: - heads (0 arguments)
556 devel-peer-request: - heads (0 arguments)
557 devel-peer-request: - known (1 arguments)
557 devel-peer-request: - known (1 arguments)
558 devel-peer-request: batch
558 devel-peer-request: batch
559 devel-peer-request: cmds: 141 bytes
559 devel-peer-request: cmds: 141 bytes
560 sending batch command
560 sending batch command
561 searching for changes
561 searching for changes
562 all remote heads known locally
562 all remote heads known locally
563 no changes found
563 no changes found
564 devel-peer-request: getbundle
564 devel-peer-request: getbundle
565 devel-peer-request: bookmarks: 1 bytes
565 devel-peer-request: bookmarks: 1 bytes
566 devel-peer-request: bundlecaps: 270 bytes
566 devel-peer-request: bundlecaps: 270 bytes
567 devel-peer-request: cg: 1 bytes
567 devel-peer-request: cg: 1 bytes
568 devel-peer-request: common: 122 bytes
568 devel-peer-request: common: 122 bytes
569 devel-peer-request: heads: 122 bytes
569 devel-peer-request: heads: 122 bytes
570 devel-peer-request: listkeys: 9 bytes
570 devel-peer-request: listkeys: 9 bytes
571 devel-peer-request: phases: 1 bytes
571 devel-peer-request: phases: 1 bytes
572 sending getbundle command
572 sending getbundle command
573 bundle2-input-bundle: with-transaction
573 bundle2-input-bundle: with-transaction
574 bundle2-input-part: "bookmarks" supported
574 bundle2-input-part: "bookmarks" supported
575 bundle2-input-part: total payload size 26
575 bundle2-input-part: total payload size 26
576 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
576 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
577 bundle2-input-part: total payload size 45
577 bundle2-input-part: total payload size 45
578 bundle2-input-part: "phase-heads" supported
578 bundle2-input-part: "phase-heads" supported
579 bundle2-input-part: total payload size 72
579 bundle2-input-part: total payload size 72
580 bundle2-input-bundle: 3 parts total
580 bundle2-input-bundle: 3 parts total
581 checking for updated bookmarks
581 checking for updated bookmarks
582
582
583 $ cd $TESTTMP
583 $ cd $TESTTMP
584
584
585 $ cat dummylog
585 $ cat dummylog
586 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
586 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
587 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
587 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
588 Got arguments 1:user@dummy 2:hg -R $TESTTMP/nonexistent serve --stdio
588 Got arguments 1:user@dummy 2:hg -R $TESTTMP/nonexistent serve --stdio
589 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
589 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
590 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio (no-reposimplestore !)
590 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio (no-reposimplestore !)
591 Got arguments 1:user@dummy 2:hg -R remote serve --stdio (no-reposimplestore !)
591 Got arguments 1:user@dummy 2:hg -R remote serve --stdio (no-reposimplestore !)
592 Got arguments 1:user@dummy 2:hg -R remote serve --stdio (no-reposimplestore !)
592 Got arguments 1:user@dummy 2:hg -R remote serve --stdio (no-reposimplestore !)
593 Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
593 Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
594 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
594 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
595 Got arguments 1:user@dummy 2:hg -R local serve --stdio
595 Got arguments 1:user@dummy 2:hg -R local serve --stdio
596 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
596 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
597 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
597 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
598 changegroup-in-remote hook: HG_BUNDLE2=1
598 changegroup-in-remote hook: HG_BUNDLE2=1
599 HG_HOOKNAME=changegroup
599 HG_HOOKNAME=changegroup
600 HG_HOOKTYPE=changegroup
600 HG_HOOKTYPE=changegroup
601 HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60
601 HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60
602 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60
602 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60
603 HG_SOURCE=serve
603 HG_SOURCE=serve
604 HG_TXNID=TXN:$ID$
604 HG_TXNID=TXN:$ID$
605 HG_TXNNAME=serve
605 HG_TXNNAME=serve
606 HG_URL=remote:ssh:$LOCALIP
606 HG_URL=remote:ssh:$LOCALIP
607
607
608 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
608 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
609 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
609 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
610 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
610 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
611 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
611 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
612 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
612 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
613 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
613 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
614 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
614 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
615 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
615 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
616 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
616 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
617 changegroup-in-remote hook: HG_BUNDLE2=1
617 changegroup-in-remote hook: HG_BUNDLE2=1
618 HG_HOOKNAME=changegroup
618 HG_HOOKNAME=changegroup
619 HG_HOOKTYPE=changegroup
619 HG_HOOKTYPE=changegroup
620 HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6
620 HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6
621 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6
621 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6
622 HG_SOURCE=serve
622 HG_SOURCE=serve
623 HG_TXNID=TXN:$ID$
623 HG_TXNID=TXN:$ID$
624 HG_TXNNAME=serve
624 HG_TXNNAME=serve
625 HG_URL=remote:ssh:$LOCALIP
625 HG_URL=remote:ssh:$LOCALIP
626
626
627 Got arguments 1:user@dummy 2:chg -R remote serve --stdio (chg !)
627 Got arguments 1:user@dummy 2:chg -R remote serve --stdio (chg !)
628 changegroup-in-remote hook: HG_BUNDLE2=1 (chg !)
628 changegroup-in-remote hook: HG_BUNDLE2=1 (chg !)
629 HG_HOOKNAME=changegroup (chg !)
629 HG_HOOKNAME=changegroup (chg !)
630 HG_HOOKTYPE=changegroup (chg !)
630 HG_HOOKTYPE=changegroup (chg !)
631 HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !)
631 HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !)
632 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !)
632 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 (chg !)
633 HG_SOURCE=serve (chg !)
633 HG_SOURCE=serve (chg !)
634 HG_TXNID=TXN:$ID$ (chg !)
634 HG_TXNID=TXN:$ID$ (chg !)
635 HG_TXNNAME=serve (chg !)
635 HG_TXNNAME=serve (chg !)
636 HG_URL=remote:ssh:$LOCALIP (chg !)
636 HG_URL=remote:ssh:$LOCALIP (chg !)
637 (chg !)
637 (chg !)
638 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
638 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
639 Got arguments 1:user@dummy 2:hg init 'a repo'
639 Got arguments 1:user@dummy 2:hg init 'a repo'
640 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
640 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
641 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
641 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
642 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
642 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
643 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
643 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
644 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
644 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
645 changegroup-in-remote hook: HG_BUNDLE2=1
645 changegroup-in-remote hook: HG_BUNDLE2=1
646 HG_HOOKNAME=changegroup
646 HG_HOOKNAME=changegroup
647 HG_HOOKTYPE=changegroup
647 HG_HOOKTYPE=changegroup
648 HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8
648 HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8
649 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8
649 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8
650 HG_SOURCE=serve
650 HG_SOURCE=serve
651 HG_TXNID=TXN:$ID$
651 HG_TXNID=TXN:$ID$
652 HG_TXNNAME=serve
652 HG_TXNNAME=serve
653 HG_URL=remote:ssh:$LOCALIP
653 HG_URL=remote:ssh:$LOCALIP
654
654
655 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
655 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
656
656
657
657
658 remote hook failure is attributed to remote
658 remote hook failure is attributed to remote
659
659
660 $ cat > $TESTTMP/failhook << EOF
660 $ cat > $TESTTMP/failhook << EOF
661 > def hook(ui, repo, **kwargs):
661 > def hook(ui, repo, **kwargs):
662 > ui.write(b'hook failure!\n')
662 > ui.write(b'hook failure!\n')
663 > ui.flush()
663 > ui.flush()
664 > return 1
664 > return 1
665 > EOF
665 > EOF
666
666
667 $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
667 $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
668
668
669 $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
669 $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
670 $ cd hookout
670 $ cd hookout
671 $ touch hookfailure
671 $ touch hookfailure
672 $ hg -q commit -A -m 'remote hook failure'
672 $ hg -q commit -A -m 'remote hook failure'
673 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
673 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
674 pushing to ssh://user@dummy/remote
674 pushing to ssh://user@dummy/remote
675 searching for changes
675 searching for changes
676 remote: adding changesets
676 remote: adding changesets
677 remote: adding manifests
677 remote: adding manifests
678 remote: adding file changes
678 remote: adding file changes
679 remote: hook failure!
679 remote: hook failure!
680 remote: transaction abort!
680 remote: transaction abort!
681 remote: rollback completed
681 remote: rollback completed
682 remote: pretxnchangegroup.fail hook failed
682 remote: pretxnchangegroup.fail hook failed
683 abort: push failed on remote
683 abort: push failed on remote
684 [255]
684 [100]
685
685
686 abort during pull is properly reported as such
686 abort during pull is properly reported as such
687
687
688 $ echo morefoo >> ../remote/foo
688 $ echo morefoo >> ../remote/foo
689 $ hg -R ../remote commit --message "more foo to be pulled"
689 $ hg -R ../remote commit --message "more foo to be pulled"
690 $ cat >> ../remote/.hg/hgrc << EOF
690 $ cat >> ../remote/.hg/hgrc << EOF
691 > [extensions]
691 > [extensions]
692 > crash = ${TESTDIR}/crashgetbundler.py
692 > crash = ${TESTDIR}/crashgetbundler.py
693 > EOF
693 > EOF
694 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
694 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
695 pulling from ssh://user@dummy/remote
695 pulling from ssh://user@dummy/remote
696 searching for changes
696 searching for changes
697 remote: abort: this is an exercise
697 remote: abort: this is an exercise
698 abort: pull failed on remote
698 abort: pull failed on remote
699 [255]
699 [100]
700
700
701 abort with no error hint when there is a ssh problem when pulling
701 abort with no error hint when there is a ssh problem when pulling
702
702
703 $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
703 $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
704 pulling from ssh://brokenrepository/
704 pulling from ssh://brokenrepository/
705 abort: no suitable response from remote hg
705 abort: no suitable response from remote hg
706 [255]
706 [255]
707
707
708 abort with configured error hint when there is a ssh problem when pulling
708 abort with configured error hint when there is a ssh problem when pulling
709
709
710 $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" \
710 $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" \
711 > --config ui.ssherrorhint="Please see http://company/internalwiki/ssh.html"
711 > --config ui.ssherrorhint="Please see http://company/internalwiki/ssh.html"
712 pulling from ssh://brokenrepository/
712 pulling from ssh://brokenrepository/
713 abort: no suitable response from remote hg
713 abort: no suitable response from remote hg
714 (Please see http://company/internalwiki/ssh.html)
714 (Please see http://company/internalwiki/ssh.html)
715 [255]
715 [255]
716
716
717 test that custom environment is passed down to ssh executable
717 test that custom environment is passed down to ssh executable
718 $ cat >>dumpenv <<EOF
718 $ cat >>dumpenv <<EOF
719 > #! /bin/sh
719 > #! /bin/sh
720 > echo \$VAR >&2
720 > echo \$VAR >&2
721 > EOF
721 > EOF
722 $ chmod +x dumpenv
722 $ chmod +x dumpenv
723 $ hg pull ssh://something --config ui.ssh="sh dumpenv"
723 $ hg pull ssh://something --config ui.ssh="sh dumpenv"
724 pulling from ssh://something/
724 pulling from ssh://something/
725 remote:
725 remote:
726 abort: no suitable response from remote hg
726 abort: no suitable response from remote hg
727 [255]
727 [255]
728 $ hg pull ssh://something --config ui.ssh="sh dumpenv" --config sshenv.VAR=17
728 $ hg pull ssh://something --config ui.ssh="sh dumpenv" --config sshenv.VAR=17
729 pulling from ssh://something/
729 pulling from ssh://something/
730 remote: 17
730 remote: 17
731 abort: no suitable response from remote hg
731 abort: no suitable response from remote hg
732 [255]
732 [255]
733
733
General Comments 0
You need to be logged in to leave comments. Login now