##// END OF EJS Templates
bundle2: keep hint close to the primary message when remote abort...
Pierre-Yves David -
r30908:4c8dcb49 stable
parent child Browse files
Show More
@@ -1,2006 +1,2008
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 base85,
19 base85,
20 bookmarks as bookmod,
20 bookmarks as bookmod,
21 bundle2,
21 bundle2,
22 changegroup,
22 changegroup,
23 discovery,
23 discovery,
24 error,
24 error,
25 lock as lockmod,
25 lock as lockmod,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pushkey,
28 pushkey,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 tags,
32 tags,
33 url as urlmod,
33 url as urlmod,
34 util,
34 util,
35 )
35 )
36
36
37 urlerr = util.urlerr
37 urlerr = util.urlerr
38 urlreq = util.urlreq
38 urlreq = util.urlreq
39
39
40 # Maps bundle version human names to changegroup versions.
40 # Maps bundle version human names to changegroup versions.
41 _bundlespeccgversions = {'v1': '01',
41 _bundlespeccgversions = {'v1': '01',
42 'v2': '02',
42 'v2': '02',
43 'packed1': 's1',
43 'packed1': 's1',
44 'bundle2': '02', #legacy
44 'bundle2': '02', #legacy
45 }
45 }
46
46
47 def parsebundlespec(repo, spec, strict=True, externalnames=False):
47 def parsebundlespec(repo, spec, strict=True, externalnames=False):
48 """Parse a bundle string specification into parts.
48 """Parse a bundle string specification into parts.
49
49
50 Bundle specifications denote a well-defined bundle/exchange format.
50 Bundle specifications denote a well-defined bundle/exchange format.
51 The content of a given specification should not change over time in
51 The content of a given specification should not change over time in
52 order to ensure that bundles produced by a newer version of Mercurial are
52 order to ensure that bundles produced by a newer version of Mercurial are
53 readable from an older version.
53 readable from an older version.
54
54
55 The string currently has the form:
55 The string currently has the form:
56
56
57 <compression>-<type>[;<parameter0>[;<parameter1>]]
57 <compression>-<type>[;<parameter0>[;<parameter1>]]
58
58
59 Where <compression> is one of the supported compression formats
59 Where <compression> is one of the supported compression formats
60 and <type> is (currently) a version string. A ";" can follow the type and
60 and <type> is (currently) a version string. A ";" can follow the type and
61 all text afterwards is interpreted as URI encoded, ";" delimited key=value
61 all text afterwards is interpreted as URI encoded, ";" delimited key=value
62 pairs.
62 pairs.
63
63
64 If ``strict`` is True (the default) <compression> is required. Otherwise,
64 If ``strict`` is True (the default) <compression> is required. Otherwise,
65 it is optional.
65 it is optional.
66
66
67 If ``externalnames`` is False (the default), the human-centric names will
67 If ``externalnames`` is False (the default), the human-centric names will
68 be converted to their internal representation.
68 be converted to their internal representation.
69
69
70 Returns a 3-tuple of (compression, version, parameters). Compression will
70 Returns a 3-tuple of (compression, version, parameters). Compression will
71 be ``None`` if not in strict mode and a compression isn't defined.
71 be ``None`` if not in strict mode and a compression isn't defined.
72
72
73 An ``InvalidBundleSpecification`` is raised when the specification is
73 An ``InvalidBundleSpecification`` is raised when the specification is
74 not syntactically well formed.
74 not syntactically well formed.
75
75
76 An ``UnsupportedBundleSpecification`` is raised when the compression or
76 An ``UnsupportedBundleSpecification`` is raised when the compression or
77 bundle type/version is not recognized.
77 bundle type/version is not recognized.
78
78
79 Note: this function will likely eventually return a more complex data
79 Note: this function will likely eventually return a more complex data
80 structure, including bundle2 part information.
80 structure, including bundle2 part information.
81 """
81 """
82 def parseparams(s):
82 def parseparams(s):
83 if ';' not in s:
83 if ';' not in s:
84 return s, {}
84 return s, {}
85
85
86 params = {}
86 params = {}
87 version, paramstr = s.split(';', 1)
87 version, paramstr = s.split(';', 1)
88
88
89 for p in paramstr.split(';'):
89 for p in paramstr.split(';'):
90 if '=' not in p:
90 if '=' not in p:
91 raise error.InvalidBundleSpecification(
91 raise error.InvalidBundleSpecification(
92 _('invalid bundle specification: '
92 _('invalid bundle specification: '
93 'missing "=" in parameter: %s') % p)
93 'missing "=" in parameter: %s') % p)
94
94
95 key, value = p.split('=', 1)
95 key, value = p.split('=', 1)
96 key = urlreq.unquote(key)
96 key = urlreq.unquote(key)
97 value = urlreq.unquote(value)
97 value = urlreq.unquote(value)
98 params[key] = value
98 params[key] = value
99
99
100 return version, params
100 return version, params
101
101
102
102
103 if strict and '-' not in spec:
103 if strict and '-' not in spec:
104 raise error.InvalidBundleSpecification(
104 raise error.InvalidBundleSpecification(
105 _('invalid bundle specification; '
105 _('invalid bundle specification; '
106 'must be prefixed with compression: %s') % spec)
106 'must be prefixed with compression: %s') % spec)
107
107
108 if '-' in spec:
108 if '-' in spec:
109 compression, version = spec.split('-', 1)
109 compression, version = spec.split('-', 1)
110
110
111 if compression not in util.compengines.supportedbundlenames:
111 if compression not in util.compengines.supportedbundlenames:
112 raise error.UnsupportedBundleSpecification(
112 raise error.UnsupportedBundleSpecification(
113 _('%s compression is not supported') % compression)
113 _('%s compression is not supported') % compression)
114
114
115 version, params = parseparams(version)
115 version, params = parseparams(version)
116
116
117 if version not in _bundlespeccgversions:
117 if version not in _bundlespeccgversions:
118 raise error.UnsupportedBundleSpecification(
118 raise error.UnsupportedBundleSpecification(
119 _('%s is not a recognized bundle version') % version)
119 _('%s is not a recognized bundle version') % version)
120 else:
120 else:
121 # Value could be just the compression or just the version, in which
121 # Value could be just the compression or just the version, in which
122 # case some defaults are assumed (but only when not in strict mode).
122 # case some defaults are assumed (but only when not in strict mode).
123 assert not strict
123 assert not strict
124
124
125 spec, params = parseparams(spec)
125 spec, params = parseparams(spec)
126
126
127 if spec in util.compengines.supportedbundlenames:
127 if spec in util.compengines.supportedbundlenames:
128 compression = spec
128 compression = spec
129 version = 'v1'
129 version = 'v1'
130 if 'generaldelta' in repo.requirements:
130 if 'generaldelta' in repo.requirements:
131 version = 'v2'
131 version = 'v2'
132 elif spec in _bundlespeccgversions:
132 elif spec in _bundlespeccgversions:
133 if spec == 'packed1':
133 if spec == 'packed1':
134 compression = 'none'
134 compression = 'none'
135 else:
135 else:
136 compression = 'bzip2'
136 compression = 'bzip2'
137 version = spec
137 version = spec
138 else:
138 else:
139 raise error.UnsupportedBundleSpecification(
139 raise error.UnsupportedBundleSpecification(
140 _('%s is not a recognized bundle specification') % spec)
140 _('%s is not a recognized bundle specification') % spec)
141
141
142 # The specification for packed1 can optionally declare the data formats
142 # The specification for packed1 can optionally declare the data formats
143 # required to apply it. If we see this metadata, compare against what the
143 # required to apply it. If we see this metadata, compare against what the
144 # repo supports and error if the bundle isn't compatible.
144 # repo supports and error if the bundle isn't compatible.
145 if version == 'packed1' and 'requirements' in params:
145 if version == 'packed1' and 'requirements' in params:
146 requirements = set(params['requirements'].split(','))
146 requirements = set(params['requirements'].split(','))
147 missingreqs = requirements - repo.supportedformats
147 missingreqs = requirements - repo.supportedformats
148 if missingreqs:
148 if missingreqs:
149 raise error.UnsupportedBundleSpecification(
149 raise error.UnsupportedBundleSpecification(
150 _('missing support for repository features: %s') %
150 _('missing support for repository features: %s') %
151 ', '.join(sorted(missingreqs)))
151 ', '.join(sorted(missingreqs)))
152
152
153 if not externalnames:
153 if not externalnames:
154 engine = util.compengines.forbundlename(compression)
154 engine = util.compengines.forbundlename(compression)
155 compression = engine.bundletype()[1]
155 compression = engine.bundletype()[1]
156 version = _bundlespeccgversions[version]
156 version = _bundlespeccgversions[version]
157 return compression, version, params
157 return compression, version, params
158
158
159 def readbundle(ui, fh, fname, vfs=None):
159 def readbundle(ui, fh, fname, vfs=None):
160 header = changegroup.readexactly(fh, 4)
160 header = changegroup.readexactly(fh, 4)
161
161
162 alg = None
162 alg = None
163 if not fname:
163 if not fname:
164 fname = "stream"
164 fname = "stream"
165 if not header.startswith('HG') and header.startswith('\0'):
165 if not header.startswith('HG') and header.startswith('\0'):
166 fh = changegroup.headerlessfixup(fh, header)
166 fh = changegroup.headerlessfixup(fh, header)
167 header = "HG10"
167 header = "HG10"
168 alg = 'UN'
168 alg = 'UN'
169 elif vfs:
169 elif vfs:
170 fname = vfs.join(fname)
170 fname = vfs.join(fname)
171
171
172 magic, version = header[0:2], header[2:4]
172 magic, version = header[0:2], header[2:4]
173
173
174 if magic != 'HG':
174 if magic != 'HG':
175 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
175 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
176 if version == '10':
176 if version == '10':
177 if alg is None:
177 if alg is None:
178 alg = changegroup.readexactly(fh, 2)
178 alg = changegroup.readexactly(fh, 2)
179 return changegroup.cg1unpacker(fh, alg)
179 return changegroup.cg1unpacker(fh, alg)
180 elif version.startswith('2'):
180 elif version.startswith('2'):
181 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
181 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
182 elif version == 'S1':
182 elif version == 'S1':
183 return streamclone.streamcloneapplier(fh)
183 return streamclone.streamcloneapplier(fh)
184 else:
184 else:
185 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
185 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
186
186
187 def getbundlespec(ui, fh):
187 def getbundlespec(ui, fh):
188 """Infer the bundlespec from a bundle file handle.
188 """Infer the bundlespec from a bundle file handle.
189
189
190 The input file handle is seeked and the original seek position is not
190 The input file handle is seeked and the original seek position is not
191 restored.
191 restored.
192 """
192 """
193 def speccompression(alg):
193 def speccompression(alg):
194 try:
194 try:
195 return util.compengines.forbundletype(alg).bundletype()[0]
195 return util.compengines.forbundletype(alg).bundletype()[0]
196 except KeyError:
196 except KeyError:
197 return None
197 return None
198
198
199 b = readbundle(ui, fh, None)
199 b = readbundle(ui, fh, None)
200 if isinstance(b, changegroup.cg1unpacker):
200 if isinstance(b, changegroup.cg1unpacker):
201 alg = b._type
201 alg = b._type
202 if alg == '_truncatedBZ':
202 if alg == '_truncatedBZ':
203 alg = 'BZ'
203 alg = 'BZ'
204 comp = speccompression(alg)
204 comp = speccompression(alg)
205 if not comp:
205 if not comp:
206 raise error.Abort(_('unknown compression algorithm: %s') % alg)
206 raise error.Abort(_('unknown compression algorithm: %s') % alg)
207 return '%s-v1' % comp
207 return '%s-v1' % comp
208 elif isinstance(b, bundle2.unbundle20):
208 elif isinstance(b, bundle2.unbundle20):
209 if 'Compression' in b.params:
209 if 'Compression' in b.params:
210 comp = speccompression(b.params['Compression'])
210 comp = speccompression(b.params['Compression'])
211 if not comp:
211 if not comp:
212 raise error.Abort(_('unknown compression algorithm: %s') % comp)
212 raise error.Abort(_('unknown compression algorithm: %s') % comp)
213 else:
213 else:
214 comp = 'none'
214 comp = 'none'
215
215
216 version = None
216 version = None
217 for part in b.iterparts():
217 for part in b.iterparts():
218 if part.type == 'changegroup':
218 if part.type == 'changegroup':
219 version = part.params['version']
219 version = part.params['version']
220 if version in ('01', '02'):
220 if version in ('01', '02'):
221 version = 'v2'
221 version = 'v2'
222 else:
222 else:
223 raise error.Abort(_('changegroup version %s does not have '
223 raise error.Abort(_('changegroup version %s does not have '
224 'a known bundlespec') % version,
224 'a known bundlespec') % version,
225 hint=_('try upgrading your Mercurial '
225 hint=_('try upgrading your Mercurial '
226 'client'))
226 'client'))
227
227
228 if not version:
228 if not version:
229 raise error.Abort(_('could not identify changegroup version in '
229 raise error.Abort(_('could not identify changegroup version in '
230 'bundle'))
230 'bundle'))
231
231
232 return '%s-%s' % (comp, version)
232 return '%s-%s' % (comp, version)
233 elif isinstance(b, streamclone.streamcloneapplier):
233 elif isinstance(b, streamclone.streamcloneapplier):
234 requirements = streamclone.readbundle1header(fh)[2]
234 requirements = streamclone.readbundle1header(fh)[2]
235 params = 'requirements=%s' % ','.join(sorted(requirements))
235 params = 'requirements=%s' % ','.join(sorted(requirements))
236 return 'none-packed1;%s' % urlreq.quote(params)
236 return 'none-packed1;%s' % urlreq.quote(params)
237 else:
237 else:
238 raise error.Abort(_('unknown bundle type: %s') % b)
238 raise error.Abort(_('unknown bundle type: %s') % b)
239
239
240 def buildobsmarkerspart(bundler, markers):
240 def buildobsmarkerspart(bundler, markers):
241 """add an obsmarker part to the bundler with <markers>
241 """add an obsmarker part to the bundler with <markers>
242
242
243 No part is created if markers is empty.
243 No part is created if markers is empty.
244 Raises ValueError if the bundler doesn't support any known obsmarker format.
244 Raises ValueError if the bundler doesn't support any known obsmarker format.
245 """
245 """
246 if markers:
246 if markers:
247 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
247 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
248 version = obsolete.commonversion(remoteversions)
248 version = obsolete.commonversion(remoteversions)
249 if version is None:
249 if version is None:
250 raise ValueError('bundler does not support common obsmarker format')
250 raise ValueError('bundler does not support common obsmarker format')
251 stream = obsolete.encodemarkers(markers, True, version=version)
251 stream = obsolete.encodemarkers(markers, True, version=version)
252 return bundler.newpart('obsmarkers', data=stream)
252 return bundler.newpart('obsmarkers', data=stream)
253 return None
253 return None
254
254
255 def _computeoutgoing(repo, heads, common):
255 def _computeoutgoing(repo, heads, common):
256 """Computes which revs are outgoing given a set of common
256 """Computes which revs are outgoing given a set of common
257 and a set of heads.
257 and a set of heads.
258
258
259 This is a separate function so extensions can have access to
259 This is a separate function so extensions can have access to
260 the logic.
260 the logic.
261
261
262 Returns a discovery.outgoing object.
262 Returns a discovery.outgoing object.
263 """
263 """
264 cl = repo.changelog
264 cl = repo.changelog
265 if common:
265 if common:
266 hasnode = cl.hasnode
266 hasnode = cl.hasnode
267 common = [n for n in common if hasnode(n)]
267 common = [n for n in common if hasnode(n)]
268 else:
268 else:
269 common = [nullid]
269 common = [nullid]
270 if not heads:
270 if not heads:
271 heads = cl.heads()
271 heads = cl.heads()
272 return discovery.outgoing(repo, common, heads)
272 return discovery.outgoing(repo, common, heads)
273
273
274 def _forcebundle1(op):
274 def _forcebundle1(op):
275 """return true if a pull/push must use bundle1
275 """return true if a pull/push must use bundle1
276
276
277 This function is used to allow testing of the older bundle version"""
277 This function is used to allow testing of the older bundle version"""
278 ui = op.repo.ui
278 ui = op.repo.ui
279 forcebundle1 = False
279 forcebundle1 = False
280 # The goal is this config is to allow developer to choose the bundle
280 # The goal is this config is to allow developer to choose the bundle
281 # version used during exchanged. This is especially handy during test.
281 # version used during exchanged. This is especially handy during test.
282 # Value is a list of bundle version to be picked from, highest version
282 # Value is a list of bundle version to be picked from, highest version
283 # should be used.
283 # should be used.
284 #
284 #
285 # developer config: devel.legacy.exchange
285 # developer config: devel.legacy.exchange
286 exchange = ui.configlist('devel', 'legacy.exchange')
286 exchange = ui.configlist('devel', 'legacy.exchange')
287 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
287 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
288 return forcebundle1 or not op.remote.capable('bundle2')
288 return forcebundle1 or not op.remote.capable('bundle2')
289
289
290 class pushoperation(object):
290 class pushoperation(object):
291 """A object that represent a single push operation
291 """A object that represent a single push operation
292
292
293 Its purpose is to carry push related state and very common operations.
293 Its purpose is to carry push related state and very common operations.
294
294
295 A new pushoperation should be created at the beginning of each push and
295 A new pushoperation should be created at the beginning of each push and
296 discarded afterward.
296 discarded afterward.
297 """
297 """
298
298
299 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
299 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
300 bookmarks=()):
300 bookmarks=()):
301 # repo we push from
301 # repo we push from
302 self.repo = repo
302 self.repo = repo
303 self.ui = repo.ui
303 self.ui = repo.ui
304 # repo we push to
304 # repo we push to
305 self.remote = remote
305 self.remote = remote
306 # force option provided
306 # force option provided
307 self.force = force
307 self.force = force
308 # revs to be pushed (None is "all")
308 # revs to be pushed (None is "all")
309 self.revs = revs
309 self.revs = revs
310 # bookmark explicitly pushed
310 # bookmark explicitly pushed
311 self.bookmarks = bookmarks
311 self.bookmarks = bookmarks
312 # allow push of new branch
312 # allow push of new branch
313 self.newbranch = newbranch
313 self.newbranch = newbranch
314 # did a local lock get acquired?
314 # did a local lock get acquired?
315 self.locallocked = None
315 self.locallocked = None
316 # step already performed
316 # step already performed
317 # (used to check what steps have been already performed through bundle2)
317 # (used to check what steps have been already performed through bundle2)
318 self.stepsdone = set()
318 self.stepsdone = set()
319 # Integer version of the changegroup push result
319 # Integer version of the changegroup push result
320 # - None means nothing to push
320 # - None means nothing to push
321 # - 0 means HTTP error
321 # - 0 means HTTP error
322 # - 1 means we pushed and remote head count is unchanged *or*
322 # - 1 means we pushed and remote head count is unchanged *or*
323 # we have outgoing changesets but refused to push
323 # we have outgoing changesets but refused to push
324 # - other values as described by addchangegroup()
324 # - other values as described by addchangegroup()
325 self.cgresult = None
325 self.cgresult = None
326 # Boolean value for the bookmark push
326 # Boolean value for the bookmark push
327 self.bkresult = None
327 self.bkresult = None
328 # discover.outgoing object (contains common and outgoing data)
328 # discover.outgoing object (contains common and outgoing data)
329 self.outgoing = None
329 self.outgoing = None
330 # all remote heads before the push
330 # all remote heads before the push
331 self.remoteheads = None
331 self.remoteheads = None
332 # testable as a boolean indicating if any nodes are missing locally.
332 # testable as a boolean indicating if any nodes are missing locally.
333 self.incoming = None
333 self.incoming = None
334 # phases changes that must be pushed along side the changesets
334 # phases changes that must be pushed along side the changesets
335 self.outdatedphases = None
335 self.outdatedphases = None
336 # phases changes that must be pushed if changeset push fails
336 # phases changes that must be pushed if changeset push fails
337 self.fallbackoutdatedphases = None
337 self.fallbackoutdatedphases = None
338 # outgoing obsmarkers
338 # outgoing obsmarkers
339 self.outobsmarkers = set()
339 self.outobsmarkers = set()
340 # outgoing bookmarks
340 # outgoing bookmarks
341 self.outbookmarks = []
341 self.outbookmarks = []
342 # transaction manager
342 # transaction manager
343 self.trmanager = None
343 self.trmanager = None
344 # map { pushkey partid -> callback handling failure}
344 # map { pushkey partid -> callback handling failure}
345 # used to handle exception from mandatory pushkey part failure
345 # used to handle exception from mandatory pushkey part failure
346 self.pkfailcb = {}
346 self.pkfailcb = {}
347
347
348 @util.propertycache
348 @util.propertycache
349 def futureheads(self):
349 def futureheads(self):
350 """future remote heads if the changeset push succeeds"""
350 """future remote heads if the changeset push succeeds"""
351 return self.outgoing.missingheads
351 return self.outgoing.missingheads
352
352
353 @util.propertycache
353 @util.propertycache
354 def fallbackheads(self):
354 def fallbackheads(self):
355 """future remote heads if the changeset push fails"""
355 """future remote heads if the changeset push fails"""
356 if self.revs is None:
356 if self.revs is None:
357 # not target to push, all common are relevant
357 # not target to push, all common are relevant
358 return self.outgoing.commonheads
358 return self.outgoing.commonheads
359 unfi = self.repo.unfiltered()
359 unfi = self.repo.unfiltered()
360 # I want cheads = heads(::missingheads and ::commonheads)
360 # I want cheads = heads(::missingheads and ::commonheads)
361 # (missingheads is revs with secret changeset filtered out)
361 # (missingheads is revs with secret changeset filtered out)
362 #
362 #
363 # This can be expressed as:
363 # This can be expressed as:
364 # cheads = ( (missingheads and ::commonheads)
364 # cheads = ( (missingheads and ::commonheads)
365 # + (commonheads and ::missingheads))"
365 # + (commonheads and ::missingheads))"
366 # )
366 # )
367 #
367 #
368 # while trying to push we already computed the following:
368 # while trying to push we already computed the following:
369 # common = (::commonheads)
369 # common = (::commonheads)
370 # missing = ((commonheads::missingheads) - commonheads)
370 # missing = ((commonheads::missingheads) - commonheads)
371 #
371 #
372 # We can pick:
372 # We can pick:
373 # * missingheads part of common (::commonheads)
373 # * missingheads part of common (::commonheads)
374 common = self.outgoing.common
374 common = self.outgoing.common
375 nm = self.repo.changelog.nodemap
375 nm = self.repo.changelog.nodemap
376 cheads = [node for node in self.revs if nm[node] in common]
376 cheads = [node for node in self.revs if nm[node] in common]
377 # and
377 # and
378 # * commonheads parents on missing
378 # * commonheads parents on missing
379 revset = unfi.set('%ln and parents(roots(%ln))',
379 revset = unfi.set('%ln and parents(roots(%ln))',
380 self.outgoing.commonheads,
380 self.outgoing.commonheads,
381 self.outgoing.missing)
381 self.outgoing.missing)
382 cheads.extend(c.node() for c in revset)
382 cheads.extend(c.node() for c in revset)
383 return cheads
383 return cheads
384
384
385 @property
385 @property
386 def commonheads(self):
386 def commonheads(self):
387 """set of all common heads after changeset bundle push"""
387 """set of all common heads after changeset bundle push"""
388 if self.cgresult:
388 if self.cgresult:
389 return self.futureheads
389 return self.futureheads
390 else:
390 else:
391 return self.fallbackheads
391 return self.fallbackheads
392
392
393 # mapping of message used when pushing bookmark
393 # mapping of message used when pushing bookmark
394 bookmsgmap = {'update': (_("updating bookmark %s\n"),
394 bookmsgmap = {'update': (_("updating bookmark %s\n"),
395 _('updating bookmark %s failed!\n')),
395 _('updating bookmark %s failed!\n')),
396 'export': (_("exporting bookmark %s\n"),
396 'export': (_("exporting bookmark %s\n"),
397 _('exporting bookmark %s failed!\n')),
397 _('exporting bookmark %s failed!\n')),
398 'delete': (_("deleting remote bookmark %s\n"),
398 'delete': (_("deleting remote bookmark %s\n"),
399 _('deleting remote bookmark %s failed!\n')),
399 _('deleting remote bookmark %s failed!\n')),
400 }
400 }
401
401
402
402
403 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
403 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
404 opargs=None):
404 opargs=None):
405 '''Push outgoing changesets (limited by revs) from a local
405 '''Push outgoing changesets (limited by revs) from a local
406 repository to remote. Return an integer:
406 repository to remote. Return an integer:
407 - None means nothing to push
407 - None means nothing to push
408 - 0 means HTTP error
408 - 0 means HTTP error
409 - 1 means we pushed and remote head count is unchanged *or*
409 - 1 means we pushed and remote head count is unchanged *or*
410 we have outgoing changesets but refused to push
410 we have outgoing changesets but refused to push
411 - other values as described by addchangegroup()
411 - other values as described by addchangegroup()
412 '''
412 '''
413 if opargs is None:
413 if opargs is None:
414 opargs = {}
414 opargs = {}
415 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
415 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
416 **opargs)
416 **opargs)
417 if pushop.remote.local():
417 if pushop.remote.local():
418 missing = (set(pushop.repo.requirements)
418 missing = (set(pushop.repo.requirements)
419 - pushop.remote.local().supported)
419 - pushop.remote.local().supported)
420 if missing:
420 if missing:
421 msg = _("required features are not"
421 msg = _("required features are not"
422 " supported in the destination:"
422 " supported in the destination:"
423 " %s") % (', '.join(sorted(missing)))
423 " %s") % (', '.join(sorted(missing)))
424 raise error.Abort(msg)
424 raise error.Abort(msg)
425
425
426 # there are two ways to push to remote repo:
426 # there are two ways to push to remote repo:
427 #
427 #
428 # addchangegroup assumes local user can lock remote
428 # addchangegroup assumes local user can lock remote
429 # repo (local filesystem, old ssh servers).
429 # repo (local filesystem, old ssh servers).
430 #
430 #
431 # unbundle assumes local user cannot lock remote repo (new ssh
431 # unbundle assumes local user cannot lock remote repo (new ssh
432 # servers, http servers).
432 # servers, http servers).
433
433
434 if not pushop.remote.canpush():
434 if not pushop.remote.canpush():
435 raise error.Abort(_("destination does not support push"))
435 raise error.Abort(_("destination does not support push"))
436 # get local lock as we might write phase data
436 # get local lock as we might write phase data
437 localwlock = locallock = None
437 localwlock = locallock = None
438 try:
438 try:
439 # bundle2 push may receive a reply bundle touching bookmarks or other
439 # bundle2 push may receive a reply bundle touching bookmarks or other
440 # things requiring the wlock. Take it now to ensure proper ordering.
440 # things requiring the wlock. Take it now to ensure proper ordering.
441 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
441 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
442 if (not _forcebundle1(pushop)) and maypushback:
442 if (not _forcebundle1(pushop)) and maypushback:
443 localwlock = pushop.repo.wlock()
443 localwlock = pushop.repo.wlock()
444 locallock = pushop.repo.lock()
444 locallock = pushop.repo.lock()
445 pushop.locallocked = True
445 pushop.locallocked = True
446 except IOError as err:
446 except IOError as err:
447 pushop.locallocked = False
447 pushop.locallocked = False
448 if err.errno != errno.EACCES:
448 if err.errno != errno.EACCES:
449 raise
449 raise
450 # source repo cannot be locked.
450 # source repo cannot be locked.
451 # We do not abort the push, but just disable the local phase
451 # We do not abort the push, but just disable the local phase
452 # synchronisation.
452 # synchronisation.
453 msg = 'cannot lock source repository: %s\n' % err
453 msg = 'cannot lock source repository: %s\n' % err
454 pushop.ui.debug(msg)
454 pushop.ui.debug(msg)
455 try:
455 try:
456 if pushop.locallocked:
456 if pushop.locallocked:
457 pushop.trmanager = transactionmanager(pushop.repo,
457 pushop.trmanager = transactionmanager(pushop.repo,
458 'push-response',
458 'push-response',
459 pushop.remote.url())
459 pushop.remote.url())
460 pushop.repo.checkpush(pushop)
460 pushop.repo.checkpush(pushop)
461 lock = None
461 lock = None
462 unbundle = pushop.remote.capable('unbundle')
462 unbundle = pushop.remote.capable('unbundle')
463 if not unbundle:
463 if not unbundle:
464 lock = pushop.remote.lock()
464 lock = pushop.remote.lock()
465 try:
465 try:
466 _pushdiscovery(pushop)
466 _pushdiscovery(pushop)
467 if not _forcebundle1(pushop):
467 if not _forcebundle1(pushop):
468 _pushbundle2(pushop)
468 _pushbundle2(pushop)
469 _pushchangeset(pushop)
469 _pushchangeset(pushop)
470 _pushsyncphase(pushop)
470 _pushsyncphase(pushop)
471 _pushobsolete(pushop)
471 _pushobsolete(pushop)
472 _pushbookmark(pushop)
472 _pushbookmark(pushop)
473 finally:
473 finally:
474 if lock is not None:
474 if lock is not None:
475 lock.release()
475 lock.release()
476 if pushop.trmanager:
476 if pushop.trmanager:
477 pushop.trmanager.close()
477 pushop.trmanager.close()
478 finally:
478 finally:
479 if pushop.trmanager:
479 if pushop.trmanager:
480 pushop.trmanager.release()
480 pushop.trmanager.release()
481 if locallock is not None:
481 if locallock is not None:
482 locallock.release()
482 locallock.release()
483 if localwlock is not None:
483 if localwlock is not None:
484 localwlock.release()
484 localwlock.release()
485
485
486 return pushop
486 return pushop
487
487
488 # list of steps to perform discovery before push
488 # list of steps to perform discovery before push
489 pushdiscoveryorder = []
489 pushdiscoveryorder = []
490
490
491 # Mapping between step name and function
491 # Mapping between step name and function
492 #
492 #
493 # This exists to help extensions wrap steps if necessary
493 # This exists to help extensions wrap steps if necessary
494 pushdiscoverymapping = {}
494 pushdiscoverymapping = {}
495
495
496 def pushdiscovery(stepname):
496 def pushdiscovery(stepname):
497 """decorator for function performing discovery before push
497 """decorator for function performing discovery before push
498
498
499 The function is added to the step -> function mapping and appended to the
499 The function is added to the step -> function mapping and appended to the
500 list of steps. Beware that decorated function will be added in order (this
500 list of steps. Beware that decorated function will be added in order (this
501 may matter).
501 may matter).
502
502
503 You can only use this decorator for a new step, if you want to wrap a step
503 You can only use this decorator for a new step, if you want to wrap a step
504 from an extension, change the pushdiscovery dictionary directly."""
504 from an extension, change the pushdiscovery dictionary directly."""
505 def dec(func):
505 def dec(func):
506 assert stepname not in pushdiscoverymapping
506 assert stepname not in pushdiscoverymapping
507 pushdiscoverymapping[stepname] = func
507 pushdiscoverymapping[stepname] = func
508 pushdiscoveryorder.append(stepname)
508 pushdiscoveryorder.append(stepname)
509 return func
509 return func
510 return dec
510 return dec
511
511
512 def _pushdiscovery(pushop):
512 def _pushdiscovery(pushop):
513 """Run all discovery steps"""
513 """Run all discovery steps"""
514 for stepname in pushdiscoveryorder:
514 for stepname in pushdiscoveryorder:
515 step = pushdiscoverymapping[stepname]
515 step = pushdiscoverymapping[stepname]
516 step(pushop)
516 step(pushop)
517
517
518 @pushdiscovery('changeset')
518 @pushdiscovery('changeset')
519 def _pushdiscoverychangeset(pushop):
519 def _pushdiscoverychangeset(pushop):
520 """discover the changeset that need to be pushed"""
520 """discover the changeset that need to be pushed"""
521 fci = discovery.findcommonincoming
521 fci = discovery.findcommonincoming
522 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
522 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
523 common, inc, remoteheads = commoninc
523 common, inc, remoteheads = commoninc
524 fco = discovery.findcommonoutgoing
524 fco = discovery.findcommonoutgoing
525 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
525 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
526 commoninc=commoninc, force=pushop.force)
526 commoninc=commoninc, force=pushop.force)
527 pushop.outgoing = outgoing
527 pushop.outgoing = outgoing
528 pushop.remoteheads = remoteheads
528 pushop.remoteheads = remoteheads
529 pushop.incoming = inc
529 pushop.incoming = inc
530
530
531 @pushdiscovery('phase')
531 @pushdiscovery('phase')
532 def _pushdiscoveryphase(pushop):
532 def _pushdiscoveryphase(pushop):
533 """discover the phase that needs to be pushed
533 """discover the phase that needs to be pushed
534
534
535 (computed for both success and failure case for changesets push)"""
535 (computed for both success and failure case for changesets push)"""
536 outgoing = pushop.outgoing
536 outgoing = pushop.outgoing
537 unfi = pushop.repo.unfiltered()
537 unfi = pushop.repo.unfiltered()
538 remotephases = pushop.remote.listkeys('phases')
538 remotephases = pushop.remote.listkeys('phases')
539 publishing = remotephases.get('publishing', False)
539 publishing = remotephases.get('publishing', False)
540 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
540 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
541 and remotephases # server supports phases
541 and remotephases # server supports phases
542 and not pushop.outgoing.missing # no changesets to be pushed
542 and not pushop.outgoing.missing # no changesets to be pushed
543 and publishing):
543 and publishing):
544 # When:
544 # When:
545 # - this is a subrepo push
545 # - this is a subrepo push
546 # - and remote support phase
546 # - and remote support phase
547 # - and no changeset are to be pushed
547 # - and no changeset are to be pushed
548 # - and remote is publishing
548 # - and remote is publishing
549 # We may be in issue 3871 case!
549 # We may be in issue 3871 case!
550 # We drop the possible phase synchronisation done by
550 # We drop the possible phase synchronisation done by
551 # courtesy to publish changesets possibly locally draft
551 # courtesy to publish changesets possibly locally draft
552 # on the remote.
552 # on the remote.
553 remotephases = {'publishing': 'True'}
553 remotephases = {'publishing': 'True'}
554 ana = phases.analyzeremotephases(pushop.repo,
554 ana = phases.analyzeremotephases(pushop.repo,
555 pushop.fallbackheads,
555 pushop.fallbackheads,
556 remotephases)
556 remotephases)
557 pheads, droots = ana
557 pheads, droots = ana
558 extracond = ''
558 extracond = ''
559 if not publishing:
559 if not publishing:
560 extracond = ' and public()'
560 extracond = ' and public()'
561 revset = 'heads((%%ln::%%ln) %s)' % extracond
561 revset = 'heads((%%ln::%%ln) %s)' % extracond
562 # Get the list of all revs draft on remote by public here.
562 # Get the list of all revs draft on remote by public here.
563 # XXX Beware that revset break if droots is not strictly
563 # XXX Beware that revset break if droots is not strictly
564 # XXX root we may want to ensure it is but it is costly
564 # XXX root we may want to ensure it is but it is costly
565 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
565 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
566 if not outgoing.missing:
566 if not outgoing.missing:
567 future = fallback
567 future = fallback
568 else:
568 else:
569 # adds changeset we are going to push as draft
569 # adds changeset we are going to push as draft
570 #
570 #
571 # should not be necessary for publishing server, but because of an
571 # should not be necessary for publishing server, but because of an
572 # issue fixed in xxxxx we have to do it anyway.
572 # issue fixed in xxxxx we have to do it anyway.
573 fdroots = list(unfi.set('roots(%ln + %ln::)',
573 fdroots = list(unfi.set('roots(%ln + %ln::)',
574 outgoing.missing, droots))
574 outgoing.missing, droots))
575 fdroots = [f.node() for f in fdroots]
575 fdroots = [f.node() for f in fdroots]
576 future = list(unfi.set(revset, fdroots, pushop.futureheads))
576 future = list(unfi.set(revset, fdroots, pushop.futureheads))
577 pushop.outdatedphases = future
577 pushop.outdatedphases = future
578 pushop.fallbackoutdatedphases = fallback
578 pushop.fallbackoutdatedphases = fallback
579
579
580 @pushdiscovery('obsmarker')
580 @pushdiscovery('obsmarker')
581 def _pushdiscoveryobsmarkers(pushop):
581 def _pushdiscoveryobsmarkers(pushop):
582 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
582 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
583 and pushop.repo.obsstore
583 and pushop.repo.obsstore
584 and 'obsolete' in pushop.remote.listkeys('namespaces')):
584 and 'obsolete' in pushop.remote.listkeys('namespaces')):
585 repo = pushop.repo
585 repo = pushop.repo
586 # very naive computation, that can be quite expensive on big repo.
586 # very naive computation, that can be quite expensive on big repo.
587 # However: evolution is currently slow on them anyway.
587 # However: evolution is currently slow on them anyway.
588 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
588 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
589 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
589 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
590
590
591 @pushdiscovery('bookmarks')
591 @pushdiscovery('bookmarks')
592 def _pushdiscoverybookmarks(pushop):
592 def _pushdiscoverybookmarks(pushop):
593 ui = pushop.ui
593 ui = pushop.ui
594 repo = pushop.repo.unfiltered()
594 repo = pushop.repo.unfiltered()
595 remote = pushop.remote
595 remote = pushop.remote
596 ui.debug("checking for updated bookmarks\n")
596 ui.debug("checking for updated bookmarks\n")
597 ancestors = ()
597 ancestors = ()
598 if pushop.revs:
598 if pushop.revs:
599 revnums = map(repo.changelog.rev, pushop.revs)
599 revnums = map(repo.changelog.rev, pushop.revs)
600 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
600 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
601 remotebookmark = remote.listkeys('bookmarks')
601 remotebookmark = remote.listkeys('bookmarks')
602
602
603 explicit = set([repo._bookmarks.expandname(bookmark)
603 explicit = set([repo._bookmarks.expandname(bookmark)
604 for bookmark in pushop.bookmarks])
604 for bookmark in pushop.bookmarks])
605
605
606 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
606 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
607 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
607 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
608
608
609 def safehex(x):
609 def safehex(x):
610 if x is None:
610 if x is None:
611 return x
611 return x
612 return hex(x)
612 return hex(x)
613
613
614 def hexifycompbookmarks(bookmarks):
614 def hexifycompbookmarks(bookmarks):
615 for b, scid, dcid in bookmarks:
615 for b, scid, dcid in bookmarks:
616 yield b, safehex(scid), safehex(dcid)
616 yield b, safehex(scid), safehex(dcid)
617
617
618 comp = [hexifycompbookmarks(marks) for marks in comp]
618 comp = [hexifycompbookmarks(marks) for marks in comp]
619 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
619 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
620
620
621 for b, scid, dcid in advsrc:
621 for b, scid, dcid in advsrc:
622 if b in explicit:
622 if b in explicit:
623 explicit.remove(b)
623 explicit.remove(b)
624 if not ancestors or repo[scid].rev() in ancestors:
624 if not ancestors or repo[scid].rev() in ancestors:
625 pushop.outbookmarks.append((b, dcid, scid))
625 pushop.outbookmarks.append((b, dcid, scid))
626 # search added bookmark
626 # search added bookmark
627 for b, scid, dcid in addsrc:
627 for b, scid, dcid in addsrc:
628 if b in explicit:
628 if b in explicit:
629 explicit.remove(b)
629 explicit.remove(b)
630 pushop.outbookmarks.append((b, '', scid))
630 pushop.outbookmarks.append((b, '', scid))
631 # search for overwritten bookmark
631 # search for overwritten bookmark
632 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
632 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
633 if b in explicit:
633 if b in explicit:
634 explicit.remove(b)
634 explicit.remove(b)
635 pushop.outbookmarks.append((b, dcid, scid))
635 pushop.outbookmarks.append((b, dcid, scid))
636 # search for bookmark to delete
636 # search for bookmark to delete
637 for b, scid, dcid in adddst:
637 for b, scid, dcid in adddst:
638 if b in explicit:
638 if b in explicit:
639 explicit.remove(b)
639 explicit.remove(b)
640 # treat as "deleted locally"
640 # treat as "deleted locally"
641 pushop.outbookmarks.append((b, dcid, ''))
641 pushop.outbookmarks.append((b, dcid, ''))
642 # identical bookmarks shouldn't get reported
642 # identical bookmarks shouldn't get reported
643 for b, scid, dcid in same:
643 for b, scid, dcid in same:
644 if b in explicit:
644 if b in explicit:
645 explicit.remove(b)
645 explicit.remove(b)
646
646
647 if explicit:
647 if explicit:
648 explicit = sorted(explicit)
648 explicit = sorted(explicit)
649 # we should probably list all of them
649 # we should probably list all of them
650 ui.warn(_('bookmark %s does not exist on the local '
650 ui.warn(_('bookmark %s does not exist on the local '
651 'or remote repository!\n') % explicit[0])
651 'or remote repository!\n') % explicit[0])
652 pushop.bkresult = 2
652 pushop.bkresult = 2
653
653
654 pushop.outbookmarks.sort()
654 pushop.outbookmarks.sort()
655
655
656 def _pushcheckoutgoing(pushop):
656 def _pushcheckoutgoing(pushop):
657 outgoing = pushop.outgoing
657 outgoing = pushop.outgoing
658 unfi = pushop.repo.unfiltered()
658 unfi = pushop.repo.unfiltered()
659 if not outgoing.missing:
659 if not outgoing.missing:
660 # nothing to push
660 # nothing to push
661 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
661 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
662 return False
662 return False
663 # something to push
663 # something to push
664 if not pushop.force:
664 if not pushop.force:
665 # if repo.obsstore == False --> no obsolete
665 # if repo.obsstore == False --> no obsolete
666 # then, save the iteration
666 # then, save the iteration
667 if unfi.obsstore:
667 if unfi.obsstore:
668 # this message are here for 80 char limit reason
668 # this message are here for 80 char limit reason
669 mso = _("push includes obsolete changeset: %s!")
669 mso = _("push includes obsolete changeset: %s!")
670 mst = {"unstable": _("push includes unstable changeset: %s!"),
670 mst = {"unstable": _("push includes unstable changeset: %s!"),
671 "bumped": _("push includes bumped changeset: %s!"),
671 "bumped": _("push includes bumped changeset: %s!"),
672 "divergent": _("push includes divergent changeset: %s!")}
672 "divergent": _("push includes divergent changeset: %s!")}
673 # If we are to push if there is at least one
673 # If we are to push if there is at least one
674 # obsolete or unstable changeset in missing, at
674 # obsolete or unstable changeset in missing, at
675 # least one of the missinghead will be obsolete or
675 # least one of the missinghead will be obsolete or
676 # unstable. So checking heads only is ok
676 # unstable. So checking heads only is ok
677 for node in outgoing.missingheads:
677 for node in outgoing.missingheads:
678 ctx = unfi[node]
678 ctx = unfi[node]
679 if ctx.obsolete():
679 if ctx.obsolete():
680 raise error.Abort(mso % ctx)
680 raise error.Abort(mso % ctx)
681 elif ctx.troubled():
681 elif ctx.troubled():
682 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
682 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
683
683
684 discovery.checkheads(pushop)
684 discovery.checkheads(pushop)
685 return True
685 return True
686
686
687 # List of names of steps to perform for an outgoing bundle2, order matters.
687 # List of names of steps to perform for an outgoing bundle2, order matters.
688 b2partsgenorder = []
688 b2partsgenorder = []
689
689
690 # Mapping between step name and function
690 # Mapping between step name and function
691 #
691 #
692 # This exists to help extensions wrap steps if necessary
692 # This exists to help extensions wrap steps if necessary
693 b2partsgenmapping = {}
693 b2partsgenmapping = {}
694
694
695 def b2partsgenerator(stepname, idx=None):
695 def b2partsgenerator(stepname, idx=None):
696 """decorator for function generating bundle2 part
696 """decorator for function generating bundle2 part
697
697
698 The function is added to the step -> function mapping and appended to the
698 The function is added to the step -> function mapping and appended to the
699 list of steps. Beware that decorated functions will be added in order
699 list of steps. Beware that decorated functions will be added in order
700 (this may matter).
700 (this may matter).
701
701
702 You can only use this decorator for new steps, if you want to wrap a step
702 You can only use this decorator for new steps, if you want to wrap a step
703 from an extension, attack the b2partsgenmapping dictionary directly."""
703 from an extension, attack the b2partsgenmapping dictionary directly."""
704 def dec(func):
704 def dec(func):
705 assert stepname not in b2partsgenmapping
705 assert stepname not in b2partsgenmapping
706 b2partsgenmapping[stepname] = func
706 b2partsgenmapping[stepname] = func
707 if idx is None:
707 if idx is None:
708 b2partsgenorder.append(stepname)
708 b2partsgenorder.append(stepname)
709 else:
709 else:
710 b2partsgenorder.insert(idx, stepname)
710 b2partsgenorder.insert(idx, stepname)
711 return func
711 return func
712 return dec
712 return dec
713
713
714 def _pushb2ctxcheckheads(pushop, bundler):
714 def _pushb2ctxcheckheads(pushop, bundler):
715 """Generate race condition checking parts
715 """Generate race condition checking parts
716
716
717 Exists as an independent function to aid extensions
717 Exists as an independent function to aid extensions
718 """
718 """
719 if not pushop.force:
719 if not pushop.force:
720 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
720 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
721
721
722 @b2partsgenerator('changeset')
722 @b2partsgenerator('changeset')
723 def _pushb2ctx(pushop, bundler):
723 def _pushb2ctx(pushop, bundler):
724 """handle changegroup push through bundle2
724 """handle changegroup push through bundle2
725
725
726 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
726 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
727 """
727 """
728 if 'changesets' in pushop.stepsdone:
728 if 'changesets' in pushop.stepsdone:
729 return
729 return
730 pushop.stepsdone.add('changesets')
730 pushop.stepsdone.add('changesets')
731 # Send known heads to the server for race detection.
731 # Send known heads to the server for race detection.
732 if not _pushcheckoutgoing(pushop):
732 if not _pushcheckoutgoing(pushop):
733 return
733 return
734 pushop.repo.prepushoutgoinghooks(pushop)
734 pushop.repo.prepushoutgoinghooks(pushop)
735
735
736 _pushb2ctxcheckheads(pushop, bundler)
736 _pushb2ctxcheckheads(pushop, bundler)
737
737
738 b2caps = bundle2.bundle2caps(pushop.remote)
738 b2caps = bundle2.bundle2caps(pushop.remote)
739 version = '01'
739 version = '01'
740 cgversions = b2caps.get('changegroup')
740 cgversions = b2caps.get('changegroup')
741 if cgversions: # 3.1 and 3.2 ship with an empty value
741 if cgversions: # 3.1 and 3.2 ship with an empty value
742 cgversions = [v for v in cgversions
742 cgversions = [v for v in cgversions
743 if v in changegroup.supportedoutgoingversions(
743 if v in changegroup.supportedoutgoingversions(
744 pushop.repo)]
744 pushop.repo)]
745 if not cgversions:
745 if not cgversions:
746 raise ValueError(_('no common changegroup version'))
746 raise ValueError(_('no common changegroup version'))
747 version = max(cgversions)
747 version = max(cgversions)
748 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
748 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
749 pushop.outgoing,
749 pushop.outgoing,
750 version=version)
750 version=version)
751 cgpart = bundler.newpart('changegroup', data=cg)
751 cgpart = bundler.newpart('changegroup', data=cg)
752 if cgversions:
752 if cgversions:
753 cgpart.addparam('version', version)
753 cgpart.addparam('version', version)
754 if 'treemanifest' in pushop.repo.requirements:
754 if 'treemanifest' in pushop.repo.requirements:
755 cgpart.addparam('treemanifest', '1')
755 cgpart.addparam('treemanifest', '1')
756 def handlereply(op):
756 def handlereply(op):
757 """extract addchangegroup returns from server reply"""
757 """extract addchangegroup returns from server reply"""
758 cgreplies = op.records.getreplies(cgpart.id)
758 cgreplies = op.records.getreplies(cgpart.id)
759 assert len(cgreplies['changegroup']) == 1
759 assert len(cgreplies['changegroup']) == 1
760 pushop.cgresult = cgreplies['changegroup'][0]['return']
760 pushop.cgresult = cgreplies['changegroup'][0]['return']
761 return handlereply
761 return handlereply
762
762
763 @b2partsgenerator('phase')
763 @b2partsgenerator('phase')
764 def _pushb2phases(pushop, bundler):
764 def _pushb2phases(pushop, bundler):
765 """handle phase push through bundle2"""
765 """handle phase push through bundle2"""
766 if 'phases' in pushop.stepsdone:
766 if 'phases' in pushop.stepsdone:
767 return
767 return
768 b2caps = bundle2.bundle2caps(pushop.remote)
768 b2caps = bundle2.bundle2caps(pushop.remote)
769 if not 'pushkey' in b2caps:
769 if not 'pushkey' in b2caps:
770 return
770 return
771 pushop.stepsdone.add('phases')
771 pushop.stepsdone.add('phases')
772 part2node = []
772 part2node = []
773
773
774 def handlefailure(pushop, exc):
774 def handlefailure(pushop, exc):
775 targetid = int(exc.partid)
775 targetid = int(exc.partid)
776 for partid, node in part2node:
776 for partid, node in part2node:
777 if partid == targetid:
777 if partid == targetid:
778 raise error.Abort(_('updating %s to public failed') % node)
778 raise error.Abort(_('updating %s to public failed') % node)
779
779
780 enc = pushkey.encode
780 enc = pushkey.encode
781 for newremotehead in pushop.outdatedphases:
781 for newremotehead in pushop.outdatedphases:
782 part = bundler.newpart('pushkey')
782 part = bundler.newpart('pushkey')
783 part.addparam('namespace', enc('phases'))
783 part.addparam('namespace', enc('phases'))
784 part.addparam('key', enc(newremotehead.hex()))
784 part.addparam('key', enc(newremotehead.hex()))
785 part.addparam('old', enc(str(phases.draft)))
785 part.addparam('old', enc(str(phases.draft)))
786 part.addparam('new', enc(str(phases.public)))
786 part.addparam('new', enc(str(phases.public)))
787 part2node.append((part.id, newremotehead))
787 part2node.append((part.id, newremotehead))
788 pushop.pkfailcb[part.id] = handlefailure
788 pushop.pkfailcb[part.id] = handlefailure
789
789
790 def handlereply(op):
790 def handlereply(op):
791 for partid, node in part2node:
791 for partid, node in part2node:
792 partrep = op.records.getreplies(partid)
792 partrep = op.records.getreplies(partid)
793 results = partrep['pushkey']
793 results = partrep['pushkey']
794 assert len(results) <= 1
794 assert len(results) <= 1
795 msg = None
795 msg = None
796 if not results:
796 if not results:
797 msg = _('server ignored update of %s to public!\n') % node
797 msg = _('server ignored update of %s to public!\n') % node
798 elif not int(results[0]['return']):
798 elif not int(results[0]['return']):
799 msg = _('updating %s to public failed!\n') % node
799 msg = _('updating %s to public failed!\n') % node
800 if msg is not None:
800 if msg is not None:
801 pushop.ui.warn(msg)
801 pushop.ui.warn(msg)
802 return handlereply
802 return handlereply
803
803
804 @b2partsgenerator('obsmarkers')
804 @b2partsgenerator('obsmarkers')
805 def _pushb2obsmarkers(pushop, bundler):
805 def _pushb2obsmarkers(pushop, bundler):
806 if 'obsmarkers' in pushop.stepsdone:
806 if 'obsmarkers' in pushop.stepsdone:
807 return
807 return
808 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
808 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
809 if obsolete.commonversion(remoteversions) is None:
809 if obsolete.commonversion(remoteversions) is None:
810 return
810 return
811 pushop.stepsdone.add('obsmarkers')
811 pushop.stepsdone.add('obsmarkers')
812 if pushop.outobsmarkers:
812 if pushop.outobsmarkers:
813 markers = sorted(pushop.outobsmarkers)
813 markers = sorted(pushop.outobsmarkers)
814 buildobsmarkerspart(bundler, markers)
814 buildobsmarkerspart(bundler, markers)
815
815
816 @b2partsgenerator('bookmarks')
816 @b2partsgenerator('bookmarks')
817 def _pushb2bookmarks(pushop, bundler):
817 def _pushb2bookmarks(pushop, bundler):
818 """handle bookmark push through bundle2"""
818 """handle bookmark push through bundle2"""
819 if 'bookmarks' in pushop.stepsdone:
819 if 'bookmarks' in pushop.stepsdone:
820 return
820 return
821 b2caps = bundle2.bundle2caps(pushop.remote)
821 b2caps = bundle2.bundle2caps(pushop.remote)
822 if 'pushkey' not in b2caps:
822 if 'pushkey' not in b2caps:
823 return
823 return
824 pushop.stepsdone.add('bookmarks')
824 pushop.stepsdone.add('bookmarks')
825 part2book = []
825 part2book = []
826 enc = pushkey.encode
826 enc = pushkey.encode
827
827
828 def handlefailure(pushop, exc):
828 def handlefailure(pushop, exc):
829 targetid = int(exc.partid)
829 targetid = int(exc.partid)
830 for partid, book, action in part2book:
830 for partid, book, action in part2book:
831 if partid == targetid:
831 if partid == targetid:
832 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
832 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
833 # we should not be called for part we did not generated
833 # we should not be called for part we did not generated
834 assert False
834 assert False
835
835
836 for book, old, new in pushop.outbookmarks:
836 for book, old, new in pushop.outbookmarks:
837 part = bundler.newpart('pushkey')
837 part = bundler.newpart('pushkey')
838 part.addparam('namespace', enc('bookmarks'))
838 part.addparam('namespace', enc('bookmarks'))
839 part.addparam('key', enc(book))
839 part.addparam('key', enc(book))
840 part.addparam('old', enc(old))
840 part.addparam('old', enc(old))
841 part.addparam('new', enc(new))
841 part.addparam('new', enc(new))
842 action = 'update'
842 action = 'update'
843 if not old:
843 if not old:
844 action = 'export'
844 action = 'export'
845 elif not new:
845 elif not new:
846 action = 'delete'
846 action = 'delete'
847 part2book.append((part.id, book, action))
847 part2book.append((part.id, book, action))
848 pushop.pkfailcb[part.id] = handlefailure
848 pushop.pkfailcb[part.id] = handlefailure
849
849
850 def handlereply(op):
850 def handlereply(op):
851 ui = pushop.ui
851 ui = pushop.ui
852 for partid, book, action in part2book:
852 for partid, book, action in part2book:
853 partrep = op.records.getreplies(partid)
853 partrep = op.records.getreplies(partid)
854 results = partrep['pushkey']
854 results = partrep['pushkey']
855 assert len(results) <= 1
855 assert len(results) <= 1
856 if not results:
856 if not results:
857 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
857 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
858 else:
858 else:
859 ret = int(results[0]['return'])
859 ret = int(results[0]['return'])
860 if ret:
860 if ret:
861 ui.status(bookmsgmap[action][0] % book)
861 ui.status(bookmsgmap[action][0] % book)
862 else:
862 else:
863 ui.warn(bookmsgmap[action][1] % book)
863 ui.warn(bookmsgmap[action][1] % book)
864 if pushop.bkresult is not None:
864 if pushop.bkresult is not None:
865 pushop.bkresult = 1
865 pushop.bkresult = 1
866 return handlereply
866 return handlereply
867
867
868
868
869 def _pushbundle2(pushop):
869 def _pushbundle2(pushop):
870 """push data to the remote using bundle2
870 """push data to the remote using bundle2
871
871
872 The only currently supported type of data is changegroup but this will
872 The only currently supported type of data is changegroup but this will
873 evolve in the future."""
873 evolve in the future."""
874 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
874 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
875 pushback = (pushop.trmanager
875 pushback = (pushop.trmanager
876 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
876 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
877
877
878 # create reply capability
878 # create reply capability
879 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
879 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
880 allowpushback=pushback))
880 allowpushback=pushback))
881 bundler.newpart('replycaps', data=capsblob)
881 bundler.newpart('replycaps', data=capsblob)
882 replyhandlers = []
882 replyhandlers = []
883 for partgenname in b2partsgenorder:
883 for partgenname in b2partsgenorder:
884 partgen = b2partsgenmapping[partgenname]
884 partgen = b2partsgenmapping[partgenname]
885 ret = partgen(pushop, bundler)
885 ret = partgen(pushop, bundler)
886 if callable(ret):
886 if callable(ret):
887 replyhandlers.append(ret)
887 replyhandlers.append(ret)
888 # do not push if nothing to push
888 # do not push if nothing to push
889 if bundler.nbparts <= 1:
889 if bundler.nbparts <= 1:
890 return
890 return
891 stream = util.chunkbuffer(bundler.getchunks())
891 stream = util.chunkbuffer(bundler.getchunks())
892 try:
892 try:
893 try:
893 try:
894 reply = pushop.remote.unbundle(
894 reply = pushop.remote.unbundle(
895 stream, ['force'], pushop.remote.url())
895 stream, ['force'], pushop.remote.url())
896 except error.BundleValueError as exc:
896 except error.BundleValueError as exc:
897 raise error.Abort(_('missing support for %s') % exc)
897 raise error.Abort(_('missing support for %s') % exc)
898 try:
898 try:
899 trgetter = None
899 trgetter = None
900 if pushback:
900 if pushback:
901 trgetter = pushop.trmanager.transaction
901 trgetter = pushop.trmanager.transaction
902 op = bundle2.processbundle(pushop.repo, reply, trgetter)
902 op = bundle2.processbundle(pushop.repo, reply, trgetter)
903 except error.BundleValueError as exc:
903 except error.BundleValueError as exc:
904 raise error.Abort(_('missing support for %s') % exc)
904 raise error.Abort(_('missing support for %s') % exc)
905 except bundle2.AbortFromPart as exc:
905 except bundle2.AbortFromPart as exc:
906 pushop.ui.status(_('remote: %s\n') % exc)
906 pushop.ui.status(_('remote: %s\n') % exc)
907 raise error.Abort(_('push failed on remote'), hint=exc.hint)
907 if exc.hint is not None:
908 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
909 raise error.Abort(_('push failed on remote'))
908 except error.PushkeyFailed as exc:
910 except error.PushkeyFailed as exc:
909 partid = int(exc.partid)
911 partid = int(exc.partid)
910 if partid not in pushop.pkfailcb:
912 if partid not in pushop.pkfailcb:
911 raise
913 raise
912 pushop.pkfailcb[partid](pushop, exc)
914 pushop.pkfailcb[partid](pushop, exc)
913 for rephand in replyhandlers:
915 for rephand in replyhandlers:
914 rephand(op)
916 rephand(op)
915
917
916 def _pushchangeset(pushop):
918 def _pushchangeset(pushop):
917 """Make the actual push of changeset bundle to remote repo"""
919 """Make the actual push of changeset bundle to remote repo"""
918 if 'changesets' in pushop.stepsdone:
920 if 'changesets' in pushop.stepsdone:
919 return
921 return
920 pushop.stepsdone.add('changesets')
922 pushop.stepsdone.add('changesets')
921 if not _pushcheckoutgoing(pushop):
923 if not _pushcheckoutgoing(pushop):
922 return
924 return
923 pushop.repo.prepushoutgoinghooks(pushop)
925 pushop.repo.prepushoutgoinghooks(pushop)
924 outgoing = pushop.outgoing
926 outgoing = pushop.outgoing
925 unbundle = pushop.remote.capable('unbundle')
927 unbundle = pushop.remote.capable('unbundle')
926 # TODO: get bundlecaps from remote
928 # TODO: get bundlecaps from remote
927 bundlecaps = None
929 bundlecaps = None
928 # create a changegroup from local
930 # create a changegroup from local
929 if pushop.revs is None and not (outgoing.excluded
931 if pushop.revs is None and not (outgoing.excluded
930 or pushop.repo.changelog.filteredrevs):
932 or pushop.repo.changelog.filteredrevs):
931 # push everything,
933 # push everything,
932 # use the fast path, no race possible on push
934 # use the fast path, no race possible on push
933 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
935 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
934 cg = changegroup.getsubset(pushop.repo,
936 cg = changegroup.getsubset(pushop.repo,
935 outgoing,
937 outgoing,
936 bundler,
938 bundler,
937 'push',
939 'push',
938 fastpath=True)
940 fastpath=True)
939 else:
941 else:
940 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
942 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
941 bundlecaps)
943 bundlecaps)
942
944
943 # apply changegroup to remote
945 # apply changegroup to remote
944 if unbundle:
946 if unbundle:
945 # local repo finds heads on server, finds out what
947 # local repo finds heads on server, finds out what
946 # revs it must push. once revs transferred, if server
948 # revs it must push. once revs transferred, if server
947 # finds it has different heads (someone else won
949 # finds it has different heads (someone else won
948 # commit/push race), server aborts.
950 # commit/push race), server aborts.
949 if pushop.force:
951 if pushop.force:
950 remoteheads = ['force']
952 remoteheads = ['force']
951 else:
953 else:
952 remoteheads = pushop.remoteheads
954 remoteheads = pushop.remoteheads
953 # ssh: return remote's addchangegroup()
955 # ssh: return remote's addchangegroup()
954 # http: return remote's addchangegroup() or 0 for error
956 # http: return remote's addchangegroup() or 0 for error
955 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
957 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
956 pushop.repo.url())
958 pushop.repo.url())
957 else:
959 else:
958 # we return an integer indicating remote head count
960 # we return an integer indicating remote head count
959 # change
961 # change
960 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
962 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
961 pushop.repo.url())
963 pushop.repo.url())
962
964
963 def _pushsyncphase(pushop):
965 def _pushsyncphase(pushop):
964 """synchronise phase information locally and remotely"""
966 """synchronise phase information locally and remotely"""
965 cheads = pushop.commonheads
967 cheads = pushop.commonheads
966 # even when we don't push, exchanging phase data is useful
968 # even when we don't push, exchanging phase data is useful
967 remotephases = pushop.remote.listkeys('phases')
969 remotephases = pushop.remote.listkeys('phases')
968 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
970 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
969 and remotephases # server supports phases
971 and remotephases # server supports phases
970 and pushop.cgresult is None # nothing was pushed
972 and pushop.cgresult is None # nothing was pushed
971 and remotephases.get('publishing', False)):
973 and remotephases.get('publishing', False)):
972 # When:
974 # When:
973 # - this is a subrepo push
975 # - this is a subrepo push
974 # - and remote support phase
976 # - and remote support phase
975 # - and no changeset was pushed
977 # - and no changeset was pushed
976 # - and remote is publishing
978 # - and remote is publishing
977 # We may be in issue 3871 case!
979 # We may be in issue 3871 case!
978 # We drop the possible phase synchronisation done by
980 # We drop the possible phase synchronisation done by
979 # courtesy to publish changesets possibly locally draft
981 # courtesy to publish changesets possibly locally draft
980 # on the remote.
982 # on the remote.
981 remotephases = {'publishing': 'True'}
983 remotephases = {'publishing': 'True'}
982 if not remotephases: # old server or public only reply from non-publishing
984 if not remotephases: # old server or public only reply from non-publishing
983 _localphasemove(pushop, cheads)
985 _localphasemove(pushop, cheads)
984 # don't push any phase data as there is nothing to push
986 # don't push any phase data as there is nothing to push
985 else:
987 else:
986 ana = phases.analyzeremotephases(pushop.repo, cheads,
988 ana = phases.analyzeremotephases(pushop.repo, cheads,
987 remotephases)
989 remotephases)
988 pheads, droots = ana
990 pheads, droots = ana
989 ### Apply remote phase on local
991 ### Apply remote phase on local
990 if remotephases.get('publishing', False):
992 if remotephases.get('publishing', False):
991 _localphasemove(pushop, cheads)
993 _localphasemove(pushop, cheads)
992 else: # publish = False
994 else: # publish = False
993 _localphasemove(pushop, pheads)
995 _localphasemove(pushop, pheads)
994 _localphasemove(pushop, cheads, phases.draft)
996 _localphasemove(pushop, cheads, phases.draft)
995 ### Apply local phase on remote
997 ### Apply local phase on remote
996
998
997 if pushop.cgresult:
999 if pushop.cgresult:
998 if 'phases' in pushop.stepsdone:
1000 if 'phases' in pushop.stepsdone:
999 # phases already pushed though bundle2
1001 # phases already pushed though bundle2
1000 return
1002 return
1001 outdated = pushop.outdatedphases
1003 outdated = pushop.outdatedphases
1002 else:
1004 else:
1003 outdated = pushop.fallbackoutdatedphases
1005 outdated = pushop.fallbackoutdatedphases
1004
1006
1005 pushop.stepsdone.add('phases')
1007 pushop.stepsdone.add('phases')
1006
1008
1007 # filter heads already turned public by the push
1009 # filter heads already turned public by the push
1008 outdated = [c for c in outdated if c.node() not in pheads]
1010 outdated = [c for c in outdated if c.node() not in pheads]
1009 # fallback to independent pushkey command
1011 # fallback to independent pushkey command
1010 for newremotehead in outdated:
1012 for newremotehead in outdated:
1011 r = pushop.remote.pushkey('phases',
1013 r = pushop.remote.pushkey('phases',
1012 newremotehead.hex(),
1014 newremotehead.hex(),
1013 str(phases.draft),
1015 str(phases.draft),
1014 str(phases.public))
1016 str(phases.public))
1015 if not r:
1017 if not r:
1016 pushop.ui.warn(_('updating %s to public failed!\n')
1018 pushop.ui.warn(_('updating %s to public failed!\n')
1017 % newremotehead)
1019 % newremotehead)
1018
1020
1019 def _localphasemove(pushop, nodes, phase=phases.public):
1021 def _localphasemove(pushop, nodes, phase=phases.public):
1020 """move <nodes> to <phase> in the local source repo"""
1022 """move <nodes> to <phase> in the local source repo"""
1021 if pushop.trmanager:
1023 if pushop.trmanager:
1022 phases.advanceboundary(pushop.repo,
1024 phases.advanceboundary(pushop.repo,
1023 pushop.trmanager.transaction(),
1025 pushop.trmanager.transaction(),
1024 phase,
1026 phase,
1025 nodes)
1027 nodes)
1026 else:
1028 else:
1027 # repo is not locked, do not change any phases!
1029 # repo is not locked, do not change any phases!
1028 # Informs the user that phases should have been moved when
1030 # Informs the user that phases should have been moved when
1029 # applicable.
1031 # applicable.
1030 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1032 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1031 phasestr = phases.phasenames[phase]
1033 phasestr = phases.phasenames[phase]
1032 if actualmoves:
1034 if actualmoves:
1033 pushop.ui.status(_('cannot lock source repo, skipping '
1035 pushop.ui.status(_('cannot lock source repo, skipping '
1034 'local %s phase update\n') % phasestr)
1036 'local %s phase update\n') % phasestr)
1035
1037
1036 def _pushobsolete(pushop):
1038 def _pushobsolete(pushop):
1037 """utility function to push obsolete markers to a remote"""
1039 """utility function to push obsolete markers to a remote"""
1038 if 'obsmarkers' in pushop.stepsdone:
1040 if 'obsmarkers' in pushop.stepsdone:
1039 return
1041 return
1040 repo = pushop.repo
1042 repo = pushop.repo
1041 remote = pushop.remote
1043 remote = pushop.remote
1042 pushop.stepsdone.add('obsmarkers')
1044 pushop.stepsdone.add('obsmarkers')
1043 if pushop.outobsmarkers:
1045 if pushop.outobsmarkers:
1044 pushop.ui.debug('try to push obsolete markers to remote\n')
1046 pushop.ui.debug('try to push obsolete markers to remote\n')
1045 rslts = []
1047 rslts = []
1046 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1048 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1047 for key in sorted(remotedata, reverse=True):
1049 for key in sorted(remotedata, reverse=True):
1048 # reverse sort to ensure we end with dump0
1050 # reverse sort to ensure we end with dump0
1049 data = remotedata[key]
1051 data = remotedata[key]
1050 rslts.append(remote.pushkey('obsolete', key, '', data))
1052 rslts.append(remote.pushkey('obsolete', key, '', data))
1051 if [r for r in rslts if not r]:
1053 if [r for r in rslts if not r]:
1052 msg = _('failed to push some obsolete markers!\n')
1054 msg = _('failed to push some obsolete markers!\n')
1053 repo.ui.warn(msg)
1055 repo.ui.warn(msg)
1054
1056
1055 def _pushbookmark(pushop):
1057 def _pushbookmark(pushop):
1056 """Update bookmark position on remote"""
1058 """Update bookmark position on remote"""
1057 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1059 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1058 return
1060 return
1059 pushop.stepsdone.add('bookmarks')
1061 pushop.stepsdone.add('bookmarks')
1060 ui = pushop.ui
1062 ui = pushop.ui
1061 remote = pushop.remote
1063 remote = pushop.remote
1062
1064
1063 for b, old, new in pushop.outbookmarks:
1065 for b, old, new in pushop.outbookmarks:
1064 action = 'update'
1066 action = 'update'
1065 if not old:
1067 if not old:
1066 action = 'export'
1068 action = 'export'
1067 elif not new:
1069 elif not new:
1068 action = 'delete'
1070 action = 'delete'
1069 if remote.pushkey('bookmarks', b, old, new):
1071 if remote.pushkey('bookmarks', b, old, new):
1070 ui.status(bookmsgmap[action][0] % b)
1072 ui.status(bookmsgmap[action][0] % b)
1071 else:
1073 else:
1072 ui.warn(bookmsgmap[action][1] % b)
1074 ui.warn(bookmsgmap[action][1] % b)
1073 # discovery can have set the value form invalid entry
1075 # discovery can have set the value form invalid entry
1074 if pushop.bkresult is not None:
1076 if pushop.bkresult is not None:
1075 pushop.bkresult = 1
1077 pushop.bkresult = 1
1076
1078
1077 class pulloperation(object):
1079 class pulloperation(object):
1078 """A object that represent a single pull operation
1080 """A object that represent a single pull operation
1079
1081
1080 It purpose is to carry pull related state and very common operation.
1082 It purpose is to carry pull related state and very common operation.
1081
1083
1082 A new should be created at the beginning of each pull and discarded
1084 A new should be created at the beginning of each pull and discarded
1083 afterward.
1085 afterward.
1084 """
1086 """
1085
1087
1086 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1088 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1087 remotebookmarks=None, streamclonerequested=None):
1089 remotebookmarks=None, streamclonerequested=None):
1088 # repo we pull into
1090 # repo we pull into
1089 self.repo = repo
1091 self.repo = repo
1090 # repo we pull from
1092 # repo we pull from
1091 self.remote = remote
1093 self.remote = remote
1092 # revision we try to pull (None is "all")
1094 # revision we try to pull (None is "all")
1093 self.heads = heads
1095 self.heads = heads
1094 # bookmark pulled explicitly
1096 # bookmark pulled explicitly
1095 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1097 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1096 for bookmark in bookmarks]
1098 for bookmark in bookmarks]
1097 # do we force pull?
1099 # do we force pull?
1098 self.force = force
1100 self.force = force
1099 # whether a streaming clone was requested
1101 # whether a streaming clone was requested
1100 self.streamclonerequested = streamclonerequested
1102 self.streamclonerequested = streamclonerequested
1101 # transaction manager
1103 # transaction manager
1102 self.trmanager = None
1104 self.trmanager = None
1103 # set of common changeset between local and remote before pull
1105 # set of common changeset between local and remote before pull
1104 self.common = None
1106 self.common = None
1105 # set of pulled head
1107 # set of pulled head
1106 self.rheads = None
1108 self.rheads = None
1107 # list of missing changeset to fetch remotely
1109 # list of missing changeset to fetch remotely
1108 self.fetch = None
1110 self.fetch = None
1109 # remote bookmarks data
1111 # remote bookmarks data
1110 self.remotebookmarks = remotebookmarks
1112 self.remotebookmarks = remotebookmarks
1111 # result of changegroup pulling (used as return code by pull)
1113 # result of changegroup pulling (used as return code by pull)
1112 self.cgresult = None
1114 self.cgresult = None
1113 # list of step already done
1115 # list of step already done
1114 self.stepsdone = set()
1116 self.stepsdone = set()
1115 # Whether we attempted a clone from pre-generated bundles.
1117 # Whether we attempted a clone from pre-generated bundles.
1116 self.clonebundleattempted = False
1118 self.clonebundleattempted = False
1117
1119
1118 @util.propertycache
1120 @util.propertycache
1119 def pulledsubset(self):
1121 def pulledsubset(self):
1120 """heads of the set of changeset target by the pull"""
1122 """heads of the set of changeset target by the pull"""
1121 # compute target subset
1123 # compute target subset
1122 if self.heads is None:
1124 if self.heads is None:
1123 # We pulled every thing possible
1125 # We pulled every thing possible
1124 # sync on everything common
1126 # sync on everything common
1125 c = set(self.common)
1127 c = set(self.common)
1126 ret = list(self.common)
1128 ret = list(self.common)
1127 for n in self.rheads:
1129 for n in self.rheads:
1128 if n not in c:
1130 if n not in c:
1129 ret.append(n)
1131 ret.append(n)
1130 return ret
1132 return ret
1131 else:
1133 else:
1132 # We pulled a specific subset
1134 # We pulled a specific subset
1133 # sync on this subset
1135 # sync on this subset
1134 return self.heads
1136 return self.heads
1135
1137
1136 @util.propertycache
1138 @util.propertycache
1137 def canusebundle2(self):
1139 def canusebundle2(self):
1138 return not _forcebundle1(self)
1140 return not _forcebundle1(self)
1139
1141
1140 @util.propertycache
1142 @util.propertycache
1141 def remotebundle2caps(self):
1143 def remotebundle2caps(self):
1142 return bundle2.bundle2caps(self.remote)
1144 return bundle2.bundle2caps(self.remote)
1143
1145
1144 def gettransaction(self):
1146 def gettransaction(self):
1145 # deprecated; talk to trmanager directly
1147 # deprecated; talk to trmanager directly
1146 return self.trmanager.transaction()
1148 return self.trmanager.transaction()
1147
1149
1148 class transactionmanager(object):
1150 class transactionmanager(object):
1149 """An object to manage the life cycle of a transaction
1151 """An object to manage the life cycle of a transaction
1150
1152
1151 It creates the transaction on demand and calls the appropriate hooks when
1153 It creates the transaction on demand and calls the appropriate hooks when
1152 closing the transaction."""
1154 closing the transaction."""
1153 def __init__(self, repo, source, url):
1155 def __init__(self, repo, source, url):
1154 self.repo = repo
1156 self.repo = repo
1155 self.source = source
1157 self.source = source
1156 self.url = url
1158 self.url = url
1157 self._tr = None
1159 self._tr = None
1158
1160
1159 def transaction(self):
1161 def transaction(self):
1160 """Return an open transaction object, constructing if necessary"""
1162 """Return an open transaction object, constructing if necessary"""
1161 if not self._tr:
1163 if not self._tr:
1162 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1164 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1163 self._tr = self.repo.transaction(trname)
1165 self._tr = self.repo.transaction(trname)
1164 self._tr.hookargs['source'] = self.source
1166 self._tr.hookargs['source'] = self.source
1165 self._tr.hookargs['url'] = self.url
1167 self._tr.hookargs['url'] = self.url
1166 return self._tr
1168 return self._tr
1167
1169
1168 def close(self):
1170 def close(self):
1169 """close transaction if created"""
1171 """close transaction if created"""
1170 if self._tr is not None:
1172 if self._tr is not None:
1171 self._tr.close()
1173 self._tr.close()
1172
1174
1173 def release(self):
1175 def release(self):
1174 """release transaction if created"""
1176 """release transaction if created"""
1175 if self._tr is not None:
1177 if self._tr is not None:
1176 self._tr.release()
1178 self._tr.release()
1177
1179
1178 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1180 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1179 streamclonerequested=None):
1181 streamclonerequested=None):
1180 """Fetch repository data from a remote.
1182 """Fetch repository data from a remote.
1181
1183
1182 This is the main function used to retrieve data from a remote repository.
1184 This is the main function used to retrieve data from a remote repository.
1183
1185
1184 ``repo`` is the local repository to clone into.
1186 ``repo`` is the local repository to clone into.
1185 ``remote`` is a peer instance.
1187 ``remote`` is a peer instance.
1186 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1188 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1187 default) means to pull everything from the remote.
1189 default) means to pull everything from the remote.
1188 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1190 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1189 default, all remote bookmarks are pulled.
1191 default, all remote bookmarks are pulled.
1190 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1192 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1191 initialization.
1193 initialization.
1192 ``streamclonerequested`` is a boolean indicating whether a "streaming
1194 ``streamclonerequested`` is a boolean indicating whether a "streaming
1193 clone" is requested. A "streaming clone" is essentially a raw file copy
1195 clone" is requested. A "streaming clone" is essentially a raw file copy
1194 of revlogs from the server. This only works when the local repository is
1196 of revlogs from the server. This only works when the local repository is
1195 empty. The default value of ``None`` means to respect the server
1197 empty. The default value of ``None`` means to respect the server
1196 configuration for preferring stream clones.
1198 configuration for preferring stream clones.
1197
1199
1198 Returns the ``pulloperation`` created for this pull.
1200 Returns the ``pulloperation`` created for this pull.
1199 """
1201 """
1200 if opargs is None:
1202 if opargs is None:
1201 opargs = {}
1203 opargs = {}
1202 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1204 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1203 streamclonerequested=streamclonerequested, **opargs)
1205 streamclonerequested=streamclonerequested, **opargs)
1204 if pullop.remote.local():
1206 if pullop.remote.local():
1205 missing = set(pullop.remote.requirements) - pullop.repo.supported
1207 missing = set(pullop.remote.requirements) - pullop.repo.supported
1206 if missing:
1208 if missing:
1207 msg = _("required features are not"
1209 msg = _("required features are not"
1208 " supported in the destination:"
1210 " supported in the destination:"
1209 " %s") % (', '.join(sorted(missing)))
1211 " %s") % (', '.join(sorted(missing)))
1210 raise error.Abort(msg)
1212 raise error.Abort(msg)
1211
1213
1212 wlock = lock = None
1214 wlock = lock = None
1213 try:
1215 try:
1214 wlock = pullop.repo.wlock()
1216 wlock = pullop.repo.wlock()
1215 lock = pullop.repo.lock()
1217 lock = pullop.repo.lock()
1216 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1218 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1217 streamclone.maybeperformlegacystreamclone(pullop)
1219 streamclone.maybeperformlegacystreamclone(pullop)
1218 # This should ideally be in _pullbundle2(). However, it needs to run
1220 # This should ideally be in _pullbundle2(). However, it needs to run
1219 # before discovery to avoid extra work.
1221 # before discovery to avoid extra work.
1220 _maybeapplyclonebundle(pullop)
1222 _maybeapplyclonebundle(pullop)
1221 _pulldiscovery(pullop)
1223 _pulldiscovery(pullop)
1222 if pullop.canusebundle2:
1224 if pullop.canusebundle2:
1223 _pullbundle2(pullop)
1225 _pullbundle2(pullop)
1224 _pullchangeset(pullop)
1226 _pullchangeset(pullop)
1225 _pullphase(pullop)
1227 _pullphase(pullop)
1226 _pullbookmarks(pullop)
1228 _pullbookmarks(pullop)
1227 _pullobsolete(pullop)
1229 _pullobsolete(pullop)
1228 pullop.trmanager.close()
1230 pullop.trmanager.close()
1229 finally:
1231 finally:
1230 lockmod.release(pullop.trmanager, lock, wlock)
1232 lockmod.release(pullop.trmanager, lock, wlock)
1231
1233
1232 return pullop
1234 return pullop
1233
1235
1234 # list of steps to perform discovery before pull
1236 # list of steps to perform discovery before pull
1235 pulldiscoveryorder = []
1237 pulldiscoveryorder = []
1236
1238
1237 # Mapping between step name and function
1239 # Mapping between step name and function
1238 #
1240 #
1239 # This exists to help extensions wrap steps if necessary
1241 # This exists to help extensions wrap steps if necessary
1240 pulldiscoverymapping = {}
1242 pulldiscoverymapping = {}
1241
1243
1242 def pulldiscovery(stepname):
1244 def pulldiscovery(stepname):
1243 """decorator for function performing discovery before pull
1245 """decorator for function performing discovery before pull
1244
1246
1245 The function is added to the step -> function mapping and appended to the
1247 The function is added to the step -> function mapping and appended to the
1246 list of steps. Beware that decorated function will be added in order (this
1248 list of steps. Beware that decorated function will be added in order (this
1247 may matter).
1249 may matter).
1248
1250
1249 You can only use this decorator for a new step, if you want to wrap a step
1251 You can only use this decorator for a new step, if you want to wrap a step
1250 from an extension, change the pulldiscovery dictionary directly."""
1252 from an extension, change the pulldiscovery dictionary directly."""
1251 def dec(func):
1253 def dec(func):
1252 assert stepname not in pulldiscoverymapping
1254 assert stepname not in pulldiscoverymapping
1253 pulldiscoverymapping[stepname] = func
1255 pulldiscoverymapping[stepname] = func
1254 pulldiscoveryorder.append(stepname)
1256 pulldiscoveryorder.append(stepname)
1255 return func
1257 return func
1256 return dec
1258 return dec
1257
1259
1258 def _pulldiscovery(pullop):
1260 def _pulldiscovery(pullop):
1259 """Run all discovery steps"""
1261 """Run all discovery steps"""
1260 for stepname in pulldiscoveryorder:
1262 for stepname in pulldiscoveryorder:
1261 step = pulldiscoverymapping[stepname]
1263 step = pulldiscoverymapping[stepname]
1262 step(pullop)
1264 step(pullop)
1263
1265
1264 @pulldiscovery('b1:bookmarks')
1266 @pulldiscovery('b1:bookmarks')
1265 def _pullbookmarkbundle1(pullop):
1267 def _pullbookmarkbundle1(pullop):
1266 """fetch bookmark data in bundle1 case
1268 """fetch bookmark data in bundle1 case
1267
1269
1268 If not using bundle2, we have to fetch bookmarks before changeset
1270 If not using bundle2, we have to fetch bookmarks before changeset
1269 discovery to reduce the chance and impact of race conditions."""
1271 discovery to reduce the chance and impact of race conditions."""
1270 if pullop.remotebookmarks is not None:
1272 if pullop.remotebookmarks is not None:
1271 return
1273 return
1272 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1274 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1273 # all known bundle2 servers now support listkeys, but lets be nice with
1275 # all known bundle2 servers now support listkeys, but lets be nice with
1274 # new implementation.
1276 # new implementation.
1275 return
1277 return
1276 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1278 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1277
1279
1278
1280
1279 @pulldiscovery('changegroup')
1281 @pulldiscovery('changegroup')
1280 def _pulldiscoverychangegroup(pullop):
1282 def _pulldiscoverychangegroup(pullop):
1281 """discovery phase for the pull
1283 """discovery phase for the pull
1282
1284
1283 Current handle changeset discovery only, will change handle all discovery
1285 Current handle changeset discovery only, will change handle all discovery
1284 at some point."""
1286 at some point."""
1285 tmp = discovery.findcommonincoming(pullop.repo,
1287 tmp = discovery.findcommonincoming(pullop.repo,
1286 pullop.remote,
1288 pullop.remote,
1287 heads=pullop.heads,
1289 heads=pullop.heads,
1288 force=pullop.force)
1290 force=pullop.force)
1289 common, fetch, rheads = tmp
1291 common, fetch, rheads = tmp
1290 nm = pullop.repo.unfiltered().changelog.nodemap
1292 nm = pullop.repo.unfiltered().changelog.nodemap
1291 if fetch and rheads:
1293 if fetch and rheads:
1292 # If a remote heads in filtered locally, lets drop it from the unknown
1294 # If a remote heads in filtered locally, lets drop it from the unknown
1293 # remote heads and put in back in common.
1295 # remote heads and put in back in common.
1294 #
1296 #
1295 # This is a hackish solution to catch most of "common but locally
1297 # This is a hackish solution to catch most of "common but locally
1296 # hidden situation". We do not performs discovery on unfiltered
1298 # hidden situation". We do not performs discovery on unfiltered
1297 # repository because it end up doing a pathological amount of round
1299 # repository because it end up doing a pathological amount of round
1298 # trip for w huge amount of changeset we do not care about.
1300 # trip for w huge amount of changeset we do not care about.
1299 #
1301 #
1300 # If a set of such "common but filtered" changeset exist on the server
1302 # If a set of such "common but filtered" changeset exist on the server
1301 # but are not including a remote heads, we'll not be able to detect it,
1303 # but are not including a remote heads, we'll not be able to detect it,
1302 scommon = set(common)
1304 scommon = set(common)
1303 filteredrheads = []
1305 filteredrheads = []
1304 for n in rheads:
1306 for n in rheads:
1305 if n in nm:
1307 if n in nm:
1306 if n not in scommon:
1308 if n not in scommon:
1307 common.append(n)
1309 common.append(n)
1308 else:
1310 else:
1309 filteredrheads.append(n)
1311 filteredrheads.append(n)
1310 if not filteredrheads:
1312 if not filteredrheads:
1311 fetch = []
1313 fetch = []
1312 rheads = filteredrheads
1314 rheads = filteredrheads
1313 pullop.common = common
1315 pullop.common = common
1314 pullop.fetch = fetch
1316 pullop.fetch = fetch
1315 pullop.rheads = rheads
1317 pullop.rheads = rheads
1316
1318
1317 def _pullbundle2(pullop):
1319 def _pullbundle2(pullop):
1318 """pull data using bundle2
1320 """pull data using bundle2
1319
1321
1320 For now, the only supported data are changegroup."""
1322 For now, the only supported data are changegroup."""
1321 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1323 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1322
1324
1323 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1325 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1324
1326
1325 # pulling changegroup
1327 # pulling changegroup
1326 pullop.stepsdone.add('changegroup')
1328 pullop.stepsdone.add('changegroup')
1327
1329
1328 kwargs['common'] = pullop.common
1330 kwargs['common'] = pullop.common
1329 kwargs['heads'] = pullop.heads or pullop.rheads
1331 kwargs['heads'] = pullop.heads or pullop.rheads
1330 kwargs['cg'] = pullop.fetch
1332 kwargs['cg'] = pullop.fetch
1331 if 'listkeys' in pullop.remotebundle2caps:
1333 if 'listkeys' in pullop.remotebundle2caps:
1332 kwargs['listkeys'] = ['phases']
1334 kwargs['listkeys'] = ['phases']
1333 if pullop.remotebookmarks is None:
1335 if pullop.remotebookmarks is None:
1334 # make sure to always includes bookmark data when migrating
1336 # make sure to always includes bookmark data when migrating
1335 # `hg incoming --bundle` to using this function.
1337 # `hg incoming --bundle` to using this function.
1336 kwargs['listkeys'].append('bookmarks')
1338 kwargs['listkeys'].append('bookmarks')
1337
1339
1338 # If this is a full pull / clone and the server supports the clone bundles
1340 # If this is a full pull / clone and the server supports the clone bundles
1339 # feature, tell the server whether we attempted a clone bundle. The
1341 # feature, tell the server whether we attempted a clone bundle. The
1340 # presence of this flag indicates the client supports clone bundles. This
1342 # presence of this flag indicates the client supports clone bundles. This
1341 # will enable the server to treat clients that support clone bundles
1343 # will enable the server to treat clients that support clone bundles
1342 # differently from those that don't.
1344 # differently from those that don't.
1343 if (pullop.remote.capable('clonebundles')
1345 if (pullop.remote.capable('clonebundles')
1344 and pullop.heads is None and list(pullop.common) == [nullid]):
1346 and pullop.heads is None and list(pullop.common) == [nullid]):
1345 kwargs['cbattempted'] = pullop.clonebundleattempted
1347 kwargs['cbattempted'] = pullop.clonebundleattempted
1346
1348
1347 if streaming:
1349 if streaming:
1348 pullop.repo.ui.status(_('streaming all changes\n'))
1350 pullop.repo.ui.status(_('streaming all changes\n'))
1349 elif not pullop.fetch:
1351 elif not pullop.fetch:
1350 pullop.repo.ui.status(_("no changes found\n"))
1352 pullop.repo.ui.status(_("no changes found\n"))
1351 pullop.cgresult = 0
1353 pullop.cgresult = 0
1352 else:
1354 else:
1353 if pullop.heads is None and list(pullop.common) == [nullid]:
1355 if pullop.heads is None and list(pullop.common) == [nullid]:
1354 pullop.repo.ui.status(_("requesting all changes\n"))
1356 pullop.repo.ui.status(_("requesting all changes\n"))
1355 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1357 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1356 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1358 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1357 if obsolete.commonversion(remoteversions) is not None:
1359 if obsolete.commonversion(remoteversions) is not None:
1358 kwargs['obsmarkers'] = True
1360 kwargs['obsmarkers'] = True
1359 pullop.stepsdone.add('obsmarkers')
1361 pullop.stepsdone.add('obsmarkers')
1360 _pullbundle2extraprepare(pullop, kwargs)
1362 _pullbundle2extraprepare(pullop, kwargs)
1361 bundle = pullop.remote.getbundle('pull', **kwargs)
1363 bundle = pullop.remote.getbundle('pull', **kwargs)
1362 try:
1364 try:
1363 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1365 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1364 except error.BundleValueError as exc:
1366 except error.BundleValueError as exc:
1365 raise error.Abort(_('missing support for %s') % exc)
1367 raise error.Abort(_('missing support for %s') % exc)
1366
1368
1367 if pullop.fetch:
1369 if pullop.fetch:
1368 results = [cg['return'] for cg in op.records['changegroup']]
1370 results = [cg['return'] for cg in op.records['changegroup']]
1369 pullop.cgresult = changegroup.combineresults(results)
1371 pullop.cgresult = changegroup.combineresults(results)
1370
1372
1371 # processing phases change
1373 # processing phases change
1372 for namespace, value in op.records['listkeys']:
1374 for namespace, value in op.records['listkeys']:
1373 if namespace == 'phases':
1375 if namespace == 'phases':
1374 _pullapplyphases(pullop, value)
1376 _pullapplyphases(pullop, value)
1375
1377
1376 # processing bookmark update
1378 # processing bookmark update
1377 for namespace, value in op.records['listkeys']:
1379 for namespace, value in op.records['listkeys']:
1378 if namespace == 'bookmarks':
1380 if namespace == 'bookmarks':
1379 pullop.remotebookmarks = value
1381 pullop.remotebookmarks = value
1380
1382
1381 # bookmark data were either already there or pulled in the bundle
1383 # bookmark data were either already there or pulled in the bundle
1382 if pullop.remotebookmarks is not None:
1384 if pullop.remotebookmarks is not None:
1383 _pullbookmarks(pullop)
1385 _pullbookmarks(pullop)
1384
1386
1385 def _pullbundle2extraprepare(pullop, kwargs):
1387 def _pullbundle2extraprepare(pullop, kwargs):
1386 """hook function so that extensions can extend the getbundle call"""
1388 """hook function so that extensions can extend the getbundle call"""
1387 pass
1389 pass
1388
1390
1389 def _pullchangeset(pullop):
1391 def _pullchangeset(pullop):
1390 """pull changeset from unbundle into the local repo"""
1392 """pull changeset from unbundle into the local repo"""
1391 # We delay the open of the transaction as late as possible so we
1393 # We delay the open of the transaction as late as possible so we
1392 # don't open transaction for nothing or you break future useful
1394 # don't open transaction for nothing or you break future useful
1393 # rollback call
1395 # rollback call
1394 if 'changegroup' in pullop.stepsdone:
1396 if 'changegroup' in pullop.stepsdone:
1395 return
1397 return
1396 pullop.stepsdone.add('changegroup')
1398 pullop.stepsdone.add('changegroup')
1397 if not pullop.fetch:
1399 if not pullop.fetch:
1398 pullop.repo.ui.status(_("no changes found\n"))
1400 pullop.repo.ui.status(_("no changes found\n"))
1399 pullop.cgresult = 0
1401 pullop.cgresult = 0
1400 return
1402 return
1401 pullop.gettransaction()
1403 pullop.gettransaction()
1402 if pullop.heads is None and list(pullop.common) == [nullid]:
1404 if pullop.heads is None and list(pullop.common) == [nullid]:
1403 pullop.repo.ui.status(_("requesting all changes\n"))
1405 pullop.repo.ui.status(_("requesting all changes\n"))
1404 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1406 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1405 # issue1320, avoid a race if remote changed after discovery
1407 # issue1320, avoid a race if remote changed after discovery
1406 pullop.heads = pullop.rheads
1408 pullop.heads = pullop.rheads
1407
1409
1408 if pullop.remote.capable('getbundle'):
1410 if pullop.remote.capable('getbundle'):
1409 # TODO: get bundlecaps from remote
1411 # TODO: get bundlecaps from remote
1410 cg = pullop.remote.getbundle('pull', common=pullop.common,
1412 cg = pullop.remote.getbundle('pull', common=pullop.common,
1411 heads=pullop.heads or pullop.rheads)
1413 heads=pullop.heads or pullop.rheads)
1412 elif pullop.heads is None:
1414 elif pullop.heads is None:
1413 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1415 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1414 elif not pullop.remote.capable('changegroupsubset'):
1416 elif not pullop.remote.capable('changegroupsubset'):
1415 raise error.Abort(_("partial pull cannot be done because "
1417 raise error.Abort(_("partial pull cannot be done because "
1416 "other repository doesn't support "
1418 "other repository doesn't support "
1417 "changegroupsubset."))
1419 "changegroupsubset."))
1418 else:
1420 else:
1419 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1421 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1420 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1422 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1421
1423
1422 def _pullphase(pullop):
1424 def _pullphase(pullop):
1423 # Get remote phases data from remote
1425 # Get remote phases data from remote
1424 if 'phases' in pullop.stepsdone:
1426 if 'phases' in pullop.stepsdone:
1425 return
1427 return
1426 remotephases = pullop.remote.listkeys('phases')
1428 remotephases = pullop.remote.listkeys('phases')
1427 _pullapplyphases(pullop, remotephases)
1429 _pullapplyphases(pullop, remotephases)
1428
1430
1429 def _pullapplyphases(pullop, remotephases):
1431 def _pullapplyphases(pullop, remotephases):
1430 """apply phase movement from observed remote state"""
1432 """apply phase movement from observed remote state"""
1431 if 'phases' in pullop.stepsdone:
1433 if 'phases' in pullop.stepsdone:
1432 return
1434 return
1433 pullop.stepsdone.add('phases')
1435 pullop.stepsdone.add('phases')
1434 publishing = bool(remotephases.get('publishing', False))
1436 publishing = bool(remotephases.get('publishing', False))
1435 if remotephases and not publishing:
1437 if remotephases and not publishing:
1436 # remote is new and non-publishing
1438 # remote is new and non-publishing
1437 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1439 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1438 pullop.pulledsubset,
1440 pullop.pulledsubset,
1439 remotephases)
1441 remotephases)
1440 dheads = pullop.pulledsubset
1442 dheads = pullop.pulledsubset
1441 else:
1443 else:
1442 # Remote is old or publishing all common changesets
1444 # Remote is old or publishing all common changesets
1443 # should be seen as public
1445 # should be seen as public
1444 pheads = pullop.pulledsubset
1446 pheads = pullop.pulledsubset
1445 dheads = []
1447 dheads = []
1446 unfi = pullop.repo.unfiltered()
1448 unfi = pullop.repo.unfiltered()
1447 phase = unfi._phasecache.phase
1449 phase = unfi._phasecache.phase
1448 rev = unfi.changelog.nodemap.get
1450 rev = unfi.changelog.nodemap.get
1449 public = phases.public
1451 public = phases.public
1450 draft = phases.draft
1452 draft = phases.draft
1451
1453
1452 # exclude changesets already public locally and update the others
1454 # exclude changesets already public locally and update the others
1453 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1455 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1454 if pheads:
1456 if pheads:
1455 tr = pullop.gettransaction()
1457 tr = pullop.gettransaction()
1456 phases.advanceboundary(pullop.repo, tr, public, pheads)
1458 phases.advanceboundary(pullop.repo, tr, public, pheads)
1457
1459
1458 # exclude changesets already draft locally and update the others
1460 # exclude changesets already draft locally and update the others
1459 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1461 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1460 if dheads:
1462 if dheads:
1461 tr = pullop.gettransaction()
1463 tr = pullop.gettransaction()
1462 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1464 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1463
1465
1464 def _pullbookmarks(pullop):
1466 def _pullbookmarks(pullop):
1465 """process the remote bookmark information to update the local one"""
1467 """process the remote bookmark information to update the local one"""
1466 if 'bookmarks' in pullop.stepsdone:
1468 if 'bookmarks' in pullop.stepsdone:
1467 return
1469 return
1468 pullop.stepsdone.add('bookmarks')
1470 pullop.stepsdone.add('bookmarks')
1469 repo = pullop.repo
1471 repo = pullop.repo
1470 remotebookmarks = pullop.remotebookmarks
1472 remotebookmarks = pullop.remotebookmarks
1471 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1473 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1472 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1474 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1473 pullop.remote.url(),
1475 pullop.remote.url(),
1474 pullop.gettransaction,
1476 pullop.gettransaction,
1475 explicit=pullop.explicitbookmarks)
1477 explicit=pullop.explicitbookmarks)
1476
1478
1477 def _pullobsolete(pullop):
1479 def _pullobsolete(pullop):
1478 """utility function to pull obsolete markers from a remote
1480 """utility function to pull obsolete markers from a remote
1479
1481
1480 The `gettransaction` is function that return the pull transaction, creating
1482 The `gettransaction` is function that return the pull transaction, creating
1481 one if necessary. We return the transaction to inform the calling code that
1483 one if necessary. We return the transaction to inform the calling code that
1482 a new transaction have been created (when applicable).
1484 a new transaction have been created (when applicable).
1483
1485
1484 Exists mostly to allow overriding for experimentation purpose"""
1486 Exists mostly to allow overriding for experimentation purpose"""
1485 if 'obsmarkers' in pullop.stepsdone:
1487 if 'obsmarkers' in pullop.stepsdone:
1486 return
1488 return
1487 pullop.stepsdone.add('obsmarkers')
1489 pullop.stepsdone.add('obsmarkers')
1488 tr = None
1490 tr = None
1489 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1491 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1490 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1492 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1491 remoteobs = pullop.remote.listkeys('obsolete')
1493 remoteobs = pullop.remote.listkeys('obsolete')
1492 if 'dump0' in remoteobs:
1494 if 'dump0' in remoteobs:
1493 tr = pullop.gettransaction()
1495 tr = pullop.gettransaction()
1494 markers = []
1496 markers = []
1495 for key in sorted(remoteobs, reverse=True):
1497 for key in sorted(remoteobs, reverse=True):
1496 if key.startswith('dump'):
1498 if key.startswith('dump'):
1497 data = base85.b85decode(remoteobs[key])
1499 data = base85.b85decode(remoteobs[key])
1498 version, newmarks = obsolete._readmarkers(data)
1500 version, newmarks = obsolete._readmarkers(data)
1499 markers += newmarks
1501 markers += newmarks
1500 if markers:
1502 if markers:
1501 pullop.repo.obsstore.add(tr, markers)
1503 pullop.repo.obsstore.add(tr, markers)
1502 pullop.repo.invalidatevolatilesets()
1504 pullop.repo.invalidatevolatilesets()
1503 return tr
1505 return tr
1504
1506
1505 def caps20to10(repo):
1507 def caps20to10(repo):
1506 """return a set with appropriate options to use bundle20 during getbundle"""
1508 """return a set with appropriate options to use bundle20 during getbundle"""
1507 caps = set(['HG20'])
1509 caps = set(['HG20'])
1508 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1510 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1509 caps.add('bundle2=' + urlreq.quote(capsblob))
1511 caps.add('bundle2=' + urlreq.quote(capsblob))
1510 return caps
1512 return caps
1511
1513
1512 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1514 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1513 getbundle2partsorder = []
1515 getbundle2partsorder = []
1514
1516
1515 # Mapping between step name and function
1517 # Mapping between step name and function
1516 #
1518 #
1517 # This exists to help extensions wrap steps if necessary
1519 # This exists to help extensions wrap steps if necessary
1518 getbundle2partsmapping = {}
1520 getbundle2partsmapping = {}
1519
1521
1520 def getbundle2partsgenerator(stepname, idx=None):
1522 def getbundle2partsgenerator(stepname, idx=None):
1521 """decorator for function generating bundle2 part for getbundle
1523 """decorator for function generating bundle2 part for getbundle
1522
1524
1523 The function is added to the step -> function mapping and appended to the
1525 The function is added to the step -> function mapping and appended to the
1524 list of steps. Beware that decorated functions will be added in order
1526 list of steps. Beware that decorated functions will be added in order
1525 (this may matter).
1527 (this may matter).
1526
1528
1527 You can only use this decorator for new steps, if you want to wrap a step
1529 You can only use this decorator for new steps, if you want to wrap a step
1528 from an extension, attack the getbundle2partsmapping dictionary directly."""
1530 from an extension, attack the getbundle2partsmapping dictionary directly."""
1529 def dec(func):
1531 def dec(func):
1530 assert stepname not in getbundle2partsmapping
1532 assert stepname not in getbundle2partsmapping
1531 getbundle2partsmapping[stepname] = func
1533 getbundle2partsmapping[stepname] = func
1532 if idx is None:
1534 if idx is None:
1533 getbundle2partsorder.append(stepname)
1535 getbundle2partsorder.append(stepname)
1534 else:
1536 else:
1535 getbundle2partsorder.insert(idx, stepname)
1537 getbundle2partsorder.insert(idx, stepname)
1536 return func
1538 return func
1537 return dec
1539 return dec
1538
1540
1539 def bundle2requested(bundlecaps):
1541 def bundle2requested(bundlecaps):
1540 if bundlecaps is not None:
1542 if bundlecaps is not None:
1541 return any(cap.startswith('HG2') for cap in bundlecaps)
1543 return any(cap.startswith('HG2') for cap in bundlecaps)
1542 return False
1544 return False
1543
1545
1544 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1546 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1545 **kwargs):
1547 **kwargs):
1546 """Return chunks constituting a bundle's raw data.
1548 """Return chunks constituting a bundle's raw data.
1547
1549
1548 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1550 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1549 passed.
1551 passed.
1550
1552
1551 Returns an iterator over raw chunks (of varying sizes).
1553 Returns an iterator over raw chunks (of varying sizes).
1552 """
1554 """
1553 usebundle2 = bundle2requested(bundlecaps)
1555 usebundle2 = bundle2requested(bundlecaps)
1554 # bundle10 case
1556 # bundle10 case
1555 if not usebundle2:
1557 if not usebundle2:
1556 if bundlecaps and not kwargs.get('cg', True):
1558 if bundlecaps and not kwargs.get('cg', True):
1557 raise ValueError(_('request for bundle10 must include changegroup'))
1559 raise ValueError(_('request for bundle10 must include changegroup'))
1558
1560
1559 if kwargs:
1561 if kwargs:
1560 raise ValueError(_('unsupported getbundle arguments: %s')
1562 raise ValueError(_('unsupported getbundle arguments: %s')
1561 % ', '.join(sorted(kwargs.keys())))
1563 % ', '.join(sorted(kwargs.keys())))
1562 outgoing = _computeoutgoing(repo, heads, common)
1564 outgoing = _computeoutgoing(repo, heads, common)
1563 bundler = changegroup.getbundler('01', repo, bundlecaps)
1565 bundler = changegroup.getbundler('01', repo, bundlecaps)
1564 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1566 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1565
1567
1566 # bundle20 case
1568 # bundle20 case
1567 b2caps = {}
1569 b2caps = {}
1568 for bcaps in bundlecaps:
1570 for bcaps in bundlecaps:
1569 if bcaps.startswith('bundle2='):
1571 if bcaps.startswith('bundle2='):
1570 blob = urlreq.unquote(bcaps[len('bundle2='):])
1572 blob = urlreq.unquote(bcaps[len('bundle2='):])
1571 b2caps.update(bundle2.decodecaps(blob))
1573 b2caps.update(bundle2.decodecaps(blob))
1572 bundler = bundle2.bundle20(repo.ui, b2caps)
1574 bundler = bundle2.bundle20(repo.ui, b2caps)
1573
1575
1574 kwargs['heads'] = heads
1576 kwargs['heads'] = heads
1575 kwargs['common'] = common
1577 kwargs['common'] = common
1576
1578
1577 for name in getbundle2partsorder:
1579 for name in getbundle2partsorder:
1578 func = getbundle2partsmapping[name]
1580 func = getbundle2partsmapping[name]
1579 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1581 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1580 **kwargs)
1582 **kwargs)
1581
1583
1582 return bundler.getchunks()
1584 return bundler.getchunks()
1583
1585
1584 @getbundle2partsgenerator('changegroup')
1586 @getbundle2partsgenerator('changegroup')
1585 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1587 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1586 b2caps=None, heads=None, common=None, **kwargs):
1588 b2caps=None, heads=None, common=None, **kwargs):
1587 """add a changegroup part to the requested bundle"""
1589 """add a changegroup part to the requested bundle"""
1588 cg = None
1590 cg = None
1589 if kwargs.get('cg', True):
1591 if kwargs.get('cg', True):
1590 # build changegroup bundle here.
1592 # build changegroup bundle here.
1591 version = '01'
1593 version = '01'
1592 cgversions = b2caps.get('changegroup')
1594 cgversions = b2caps.get('changegroup')
1593 if cgversions: # 3.1 and 3.2 ship with an empty value
1595 if cgversions: # 3.1 and 3.2 ship with an empty value
1594 cgversions = [v for v in cgversions
1596 cgversions = [v for v in cgversions
1595 if v in changegroup.supportedoutgoingversions(repo)]
1597 if v in changegroup.supportedoutgoingversions(repo)]
1596 if not cgversions:
1598 if not cgversions:
1597 raise ValueError(_('no common changegroup version'))
1599 raise ValueError(_('no common changegroup version'))
1598 version = max(cgversions)
1600 version = max(cgversions)
1599 outgoing = _computeoutgoing(repo, heads, common)
1601 outgoing = _computeoutgoing(repo, heads, common)
1600 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1602 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1601 bundlecaps=bundlecaps,
1603 bundlecaps=bundlecaps,
1602 version=version)
1604 version=version)
1603
1605
1604 if cg:
1606 if cg:
1605 part = bundler.newpart('changegroup', data=cg)
1607 part = bundler.newpart('changegroup', data=cg)
1606 if cgversions:
1608 if cgversions:
1607 part.addparam('version', version)
1609 part.addparam('version', version)
1608 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1610 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1609 if 'treemanifest' in repo.requirements:
1611 if 'treemanifest' in repo.requirements:
1610 part.addparam('treemanifest', '1')
1612 part.addparam('treemanifest', '1')
1611
1613
1612 @getbundle2partsgenerator('listkeys')
1614 @getbundle2partsgenerator('listkeys')
1613 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1615 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1614 b2caps=None, **kwargs):
1616 b2caps=None, **kwargs):
1615 """add parts containing listkeys namespaces to the requested bundle"""
1617 """add parts containing listkeys namespaces to the requested bundle"""
1616 listkeys = kwargs.get('listkeys', ())
1618 listkeys = kwargs.get('listkeys', ())
1617 for namespace in listkeys:
1619 for namespace in listkeys:
1618 part = bundler.newpart('listkeys')
1620 part = bundler.newpart('listkeys')
1619 part.addparam('namespace', namespace)
1621 part.addparam('namespace', namespace)
1620 keys = repo.listkeys(namespace).items()
1622 keys = repo.listkeys(namespace).items()
1621 part.data = pushkey.encodekeys(keys)
1623 part.data = pushkey.encodekeys(keys)
1622
1624
1623 @getbundle2partsgenerator('obsmarkers')
1625 @getbundle2partsgenerator('obsmarkers')
1624 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1626 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1625 b2caps=None, heads=None, **kwargs):
1627 b2caps=None, heads=None, **kwargs):
1626 """add an obsolescence markers part to the requested bundle"""
1628 """add an obsolescence markers part to the requested bundle"""
1627 if kwargs.get('obsmarkers', False):
1629 if kwargs.get('obsmarkers', False):
1628 if heads is None:
1630 if heads is None:
1629 heads = repo.heads()
1631 heads = repo.heads()
1630 subset = [c.node() for c in repo.set('::%ln', heads)]
1632 subset = [c.node() for c in repo.set('::%ln', heads)]
1631 markers = repo.obsstore.relevantmarkers(subset)
1633 markers = repo.obsstore.relevantmarkers(subset)
1632 markers = sorted(markers)
1634 markers = sorted(markers)
1633 buildobsmarkerspart(bundler, markers)
1635 buildobsmarkerspart(bundler, markers)
1634
1636
1635 @getbundle2partsgenerator('hgtagsfnodes')
1637 @getbundle2partsgenerator('hgtagsfnodes')
1636 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1638 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1637 b2caps=None, heads=None, common=None,
1639 b2caps=None, heads=None, common=None,
1638 **kwargs):
1640 **kwargs):
1639 """Transfer the .hgtags filenodes mapping.
1641 """Transfer the .hgtags filenodes mapping.
1640
1642
1641 Only values for heads in this bundle will be transferred.
1643 Only values for heads in this bundle will be transferred.
1642
1644
1643 The part data consists of pairs of 20 byte changeset node and .hgtags
1645 The part data consists of pairs of 20 byte changeset node and .hgtags
1644 filenodes raw values.
1646 filenodes raw values.
1645 """
1647 """
1646 # Don't send unless:
1648 # Don't send unless:
1647 # - changeset are being exchanged,
1649 # - changeset are being exchanged,
1648 # - the client supports it.
1650 # - the client supports it.
1649 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1651 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1650 return
1652 return
1651
1653
1652 outgoing = _computeoutgoing(repo, heads, common)
1654 outgoing = _computeoutgoing(repo, heads, common)
1653
1655
1654 if not outgoing.missingheads:
1656 if not outgoing.missingheads:
1655 return
1657 return
1656
1658
1657 cache = tags.hgtagsfnodescache(repo.unfiltered())
1659 cache = tags.hgtagsfnodescache(repo.unfiltered())
1658 chunks = []
1660 chunks = []
1659
1661
1660 # .hgtags fnodes are only relevant for head changesets. While we could
1662 # .hgtags fnodes are only relevant for head changesets. While we could
1661 # transfer values for all known nodes, there will likely be little to
1663 # transfer values for all known nodes, there will likely be little to
1662 # no benefit.
1664 # no benefit.
1663 #
1665 #
1664 # We don't bother using a generator to produce output data because
1666 # We don't bother using a generator to produce output data because
1665 # a) we only have 40 bytes per head and even esoteric numbers of heads
1667 # a) we only have 40 bytes per head and even esoteric numbers of heads
1666 # consume little memory (1M heads is 40MB) b) we don't want to send the
1668 # consume little memory (1M heads is 40MB) b) we don't want to send the
1667 # part if we don't have entries and knowing if we have entries requires
1669 # part if we don't have entries and knowing if we have entries requires
1668 # cache lookups.
1670 # cache lookups.
1669 for node in outgoing.missingheads:
1671 for node in outgoing.missingheads:
1670 # Don't compute missing, as this may slow down serving.
1672 # Don't compute missing, as this may slow down serving.
1671 fnode = cache.getfnode(node, computemissing=False)
1673 fnode = cache.getfnode(node, computemissing=False)
1672 if fnode is not None:
1674 if fnode is not None:
1673 chunks.extend([node, fnode])
1675 chunks.extend([node, fnode])
1674
1676
1675 if chunks:
1677 if chunks:
1676 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1678 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1677
1679
1678 def _getbookmarks(repo, **kwargs):
1680 def _getbookmarks(repo, **kwargs):
1679 """Returns bookmark to node mapping.
1681 """Returns bookmark to node mapping.
1680
1682
1681 This function is primarily used to generate `bookmarks` bundle2 part.
1683 This function is primarily used to generate `bookmarks` bundle2 part.
1682 It is a separate function in order to make it easy to wrap it
1684 It is a separate function in order to make it easy to wrap it
1683 in extensions. Passing `kwargs` to the function makes it easy to
1685 in extensions. Passing `kwargs` to the function makes it easy to
1684 add new parameters in extensions.
1686 add new parameters in extensions.
1685 """
1687 """
1686
1688
1687 return dict(bookmod.listbinbookmarks(repo))
1689 return dict(bookmod.listbinbookmarks(repo))
1688
1690
1689 def check_heads(repo, their_heads, context):
1691 def check_heads(repo, their_heads, context):
1690 """check if the heads of a repo have been modified
1692 """check if the heads of a repo have been modified
1691
1693
1692 Used by peer for unbundling.
1694 Used by peer for unbundling.
1693 """
1695 """
1694 heads = repo.heads()
1696 heads = repo.heads()
1695 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1697 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1696 if not (their_heads == ['force'] or their_heads == heads or
1698 if not (their_heads == ['force'] or their_heads == heads or
1697 their_heads == ['hashed', heads_hash]):
1699 their_heads == ['hashed', heads_hash]):
1698 # someone else committed/pushed/unbundled while we
1700 # someone else committed/pushed/unbundled while we
1699 # were transferring data
1701 # were transferring data
1700 raise error.PushRaced('repository changed while %s - '
1702 raise error.PushRaced('repository changed while %s - '
1701 'please try again' % context)
1703 'please try again' % context)
1702
1704
1703 def unbundle(repo, cg, heads, source, url):
1705 def unbundle(repo, cg, heads, source, url):
1704 """Apply a bundle to a repo.
1706 """Apply a bundle to a repo.
1705
1707
1706 this function makes sure the repo is locked during the application and have
1708 this function makes sure the repo is locked during the application and have
1707 mechanism to check that no push race occurred between the creation of the
1709 mechanism to check that no push race occurred between the creation of the
1708 bundle and its application.
1710 bundle and its application.
1709
1711
1710 If the push was raced as PushRaced exception is raised."""
1712 If the push was raced as PushRaced exception is raised."""
1711 r = 0
1713 r = 0
1712 # need a transaction when processing a bundle2 stream
1714 # need a transaction when processing a bundle2 stream
1713 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1715 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1714 lockandtr = [None, None, None]
1716 lockandtr = [None, None, None]
1715 recordout = None
1717 recordout = None
1716 # quick fix for output mismatch with bundle2 in 3.4
1718 # quick fix for output mismatch with bundle2 in 3.4
1717 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1719 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1718 False)
1720 False)
1719 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1721 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1720 captureoutput = True
1722 captureoutput = True
1721 try:
1723 try:
1722 check_heads(repo, heads, 'uploading changes')
1724 check_heads(repo, heads, 'uploading changes')
1723 # push can proceed
1725 # push can proceed
1724 if util.safehasattr(cg, 'params'):
1726 if util.safehasattr(cg, 'params'):
1725 r = None
1727 r = None
1726 try:
1728 try:
1727 def gettransaction():
1729 def gettransaction():
1728 if not lockandtr[2]:
1730 if not lockandtr[2]:
1729 lockandtr[0] = repo.wlock()
1731 lockandtr[0] = repo.wlock()
1730 lockandtr[1] = repo.lock()
1732 lockandtr[1] = repo.lock()
1731 lockandtr[2] = repo.transaction(source)
1733 lockandtr[2] = repo.transaction(source)
1732 lockandtr[2].hookargs['source'] = source
1734 lockandtr[2].hookargs['source'] = source
1733 lockandtr[2].hookargs['url'] = url
1735 lockandtr[2].hookargs['url'] = url
1734 lockandtr[2].hookargs['bundle2'] = '1'
1736 lockandtr[2].hookargs['bundle2'] = '1'
1735 return lockandtr[2]
1737 return lockandtr[2]
1736
1738
1737 # Do greedy locking by default until we're satisfied with lazy
1739 # Do greedy locking by default until we're satisfied with lazy
1738 # locking.
1740 # locking.
1739 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1741 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1740 gettransaction()
1742 gettransaction()
1741
1743
1742 op = bundle2.bundleoperation(repo, gettransaction,
1744 op = bundle2.bundleoperation(repo, gettransaction,
1743 captureoutput=captureoutput)
1745 captureoutput=captureoutput)
1744 try:
1746 try:
1745 op = bundle2.processbundle(repo, cg, op=op)
1747 op = bundle2.processbundle(repo, cg, op=op)
1746 finally:
1748 finally:
1747 r = op.reply
1749 r = op.reply
1748 if captureoutput and r is not None:
1750 if captureoutput and r is not None:
1749 repo.ui.pushbuffer(error=True, subproc=True)
1751 repo.ui.pushbuffer(error=True, subproc=True)
1750 def recordout(output):
1752 def recordout(output):
1751 r.newpart('output', data=output, mandatory=False)
1753 r.newpart('output', data=output, mandatory=False)
1752 if lockandtr[2] is not None:
1754 if lockandtr[2] is not None:
1753 lockandtr[2].close()
1755 lockandtr[2].close()
1754 except BaseException as exc:
1756 except BaseException as exc:
1755 exc.duringunbundle2 = True
1757 exc.duringunbundle2 = True
1756 if captureoutput and r is not None:
1758 if captureoutput and r is not None:
1757 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1759 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1758 def recordout(output):
1760 def recordout(output):
1759 part = bundle2.bundlepart('output', data=output,
1761 part = bundle2.bundlepart('output', data=output,
1760 mandatory=False)
1762 mandatory=False)
1761 parts.append(part)
1763 parts.append(part)
1762 raise
1764 raise
1763 else:
1765 else:
1764 lockandtr[1] = repo.lock()
1766 lockandtr[1] = repo.lock()
1765 r = cg.apply(repo, source, url)
1767 r = cg.apply(repo, source, url)
1766 finally:
1768 finally:
1767 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1769 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1768 if recordout is not None:
1770 if recordout is not None:
1769 recordout(repo.ui.popbuffer())
1771 recordout(repo.ui.popbuffer())
1770 return r
1772 return r
1771
1773
1772 def _maybeapplyclonebundle(pullop):
1774 def _maybeapplyclonebundle(pullop):
1773 """Apply a clone bundle from a remote, if possible."""
1775 """Apply a clone bundle from a remote, if possible."""
1774
1776
1775 repo = pullop.repo
1777 repo = pullop.repo
1776 remote = pullop.remote
1778 remote = pullop.remote
1777
1779
1778 if not repo.ui.configbool('ui', 'clonebundles', True):
1780 if not repo.ui.configbool('ui', 'clonebundles', True):
1779 return
1781 return
1780
1782
1781 # Only run if local repo is empty.
1783 # Only run if local repo is empty.
1782 if len(repo):
1784 if len(repo):
1783 return
1785 return
1784
1786
1785 if pullop.heads:
1787 if pullop.heads:
1786 return
1788 return
1787
1789
1788 if not remote.capable('clonebundles'):
1790 if not remote.capable('clonebundles'):
1789 return
1791 return
1790
1792
1791 res = remote._call('clonebundles')
1793 res = remote._call('clonebundles')
1792
1794
1793 # If we call the wire protocol command, that's good enough to record the
1795 # If we call the wire protocol command, that's good enough to record the
1794 # attempt.
1796 # attempt.
1795 pullop.clonebundleattempted = True
1797 pullop.clonebundleattempted = True
1796
1798
1797 entries = parseclonebundlesmanifest(repo, res)
1799 entries = parseclonebundlesmanifest(repo, res)
1798 if not entries:
1800 if not entries:
1799 repo.ui.note(_('no clone bundles available on remote; '
1801 repo.ui.note(_('no clone bundles available on remote; '
1800 'falling back to regular clone\n'))
1802 'falling back to regular clone\n'))
1801 return
1803 return
1802
1804
1803 entries = filterclonebundleentries(repo, entries)
1805 entries = filterclonebundleentries(repo, entries)
1804 if not entries:
1806 if not entries:
1805 # There is a thundering herd concern here. However, if a server
1807 # There is a thundering herd concern here. However, if a server
1806 # operator doesn't advertise bundles appropriate for its clients,
1808 # operator doesn't advertise bundles appropriate for its clients,
1807 # they deserve what's coming. Furthermore, from a client's
1809 # they deserve what's coming. Furthermore, from a client's
1808 # perspective, no automatic fallback would mean not being able to
1810 # perspective, no automatic fallback would mean not being able to
1809 # clone!
1811 # clone!
1810 repo.ui.warn(_('no compatible clone bundles available on server; '
1812 repo.ui.warn(_('no compatible clone bundles available on server; '
1811 'falling back to regular clone\n'))
1813 'falling back to regular clone\n'))
1812 repo.ui.warn(_('(you may want to report this to the server '
1814 repo.ui.warn(_('(you may want to report this to the server '
1813 'operator)\n'))
1815 'operator)\n'))
1814 return
1816 return
1815
1817
1816 entries = sortclonebundleentries(repo.ui, entries)
1818 entries = sortclonebundleentries(repo.ui, entries)
1817
1819
1818 url = entries[0]['URL']
1820 url = entries[0]['URL']
1819 repo.ui.status(_('applying clone bundle from %s\n') % url)
1821 repo.ui.status(_('applying clone bundle from %s\n') % url)
1820 if trypullbundlefromurl(repo.ui, repo, url):
1822 if trypullbundlefromurl(repo.ui, repo, url):
1821 repo.ui.status(_('finished applying clone bundle\n'))
1823 repo.ui.status(_('finished applying clone bundle\n'))
1822 # Bundle failed.
1824 # Bundle failed.
1823 #
1825 #
1824 # We abort by default to avoid the thundering herd of
1826 # We abort by default to avoid the thundering herd of
1825 # clients flooding a server that was expecting expensive
1827 # clients flooding a server that was expecting expensive
1826 # clone load to be offloaded.
1828 # clone load to be offloaded.
1827 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1829 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1828 repo.ui.warn(_('falling back to normal clone\n'))
1830 repo.ui.warn(_('falling back to normal clone\n'))
1829 else:
1831 else:
1830 raise error.Abort(_('error applying bundle'),
1832 raise error.Abort(_('error applying bundle'),
1831 hint=_('if this error persists, consider contacting '
1833 hint=_('if this error persists, consider contacting '
1832 'the server operator or disable clone '
1834 'the server operator or disable clone '
1833 'bundles via '
1835 'bundles via '
1834 '"--config ui.clonebundles=false"'))
1836 '"--config ui.clonebundles=false"'))
1835
1837
1836 def parseclonebundlesmanifest(repo, s):
1838 def parseclonebundlesmanifest(repo, s):
1837 """Parses the raw text of a clone bundles manifest.
1839 """Parses the raw text of a clone bundles manifest.
1838
1840
1839 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1841 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1840 to the URL and other keys are the attributes for the entry.
1842 to the URL and other keys are the attributes for the entry.
1841 """
1843 """
1842 m = []
1844 m = []
1843 for line in s.splitlines():
1845 for line in s.splitlines():
1844 fields = line.split()
1846 fields = line.split()
1845 if not fields:
1847 if not fields:
1846 continue
1848 continue
1847 attrs = {'URL': fields[0]}
1849 attrs = {'URL': fields[0]}
1848 for rawattr in fields[1:]:
1850 for rawattr in fields[1:]:
1849 key, value = rawattr.split('=', 1)
1851 key, value = rawattr.split('=', 1)
1850 key = urlreq.unquote(key)
1852 key = urlreq.unquote(key)
1851 value = urlreq.unquote(value)
1853 value = urlreq.unquote(value)
1852 attrs[key] = value
1854 attrs[key] = value
1853
1855
1854 # Parse BUNDLESPEC into components. This makes client-side
1856 # Parse BUNDLESPEC into components. This makes client-side
1855 # preferences easier to specify since you can prefer a single
1857 # preferences easier to specify since you can prefer a single
1856 # component of the BUNDLESPEC.
1858 # component of the BUNDLESPEC.
1857 if key == 'BUNDLESPEC':
1859 if key == 'BUNDLESPEC':
1858 try:
1860 try:
1859 comp, version, params = parsebundlespec(repo, value,
1861 comp, version, params = parsebundlespec(repo, value,
1860 externalnames=True)
1862 externalnames=True)
1861 attrs['COMPRESSION'] = comp
1863 attrs['COMPRESSION'] = comp
1862 attrs['VERSION'] = version
1864 attrs['VERSION'] = version
1863 except error.InvalidBundleSpecification:
1865 except error.InvalidBundleSpecification:
1864 pass
1866 pass
1865 except error.UnsupportedBundleSpecification:
1867 except error.UnsupportedBundleSpecification:
1866 pass
1868 pass
1867
1869
1868 m.append(attrs)
1870 m.append(attrs)
1869
1871
1870 return m
1872 return m
1871
1873
1872 def filterclonebundleentries(repo, entries):
1874 def filterclonebundleentries(repo, entries):
1873 """Remove incompatible clone bundle manifest entries.
1875 """Remove incompatible clone bundle manifest entries.
1874
1876
1875 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1877 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1876 and returns a new list consisting of only the entries that this client
1878 and returns a new list consisting of only the entries that this client
1877 should be able to apply.
1879 should be able to apply.
1878
1880
1879 There is no guarantee we'll be able to apply all returned entries because
1881 There is no guarantee we'll be able to apply all returned entries because
1880 the metadata we use to filter on may be missing or wrong.
1882 the metadata we use to filter on may be missing or wrong.
1881 """
1883 """
1882 newentries = []
1884 newentries = []
1883 for entry in entries:
1885 for entry in entries:
1884 spec = entry.get('BUNDLESPEC')
1886 spec = entry.get('BUNDLESPEC')
1885 if spec:
1887 if spec:
1886 try:
1888 try:
1887 parsebundlespec(repo, spec, strict=True)
1889 parsebundlespec(repo, spec, strict=True)
1888 except error.InvalidBundleSpecification as e:
1890 except error.InvalidBundleSpecification as e:
1889 repo.ui.debug(str(e) + '\n')
1891 repo.ui.debug(str(e) + '\n')
1890 continue
1892 continue
1891 except error.UnsupportedBundleSpecification as e:
1893 except error.UnsupportedBundleSpecification as e:
1892 repo.ui.debug('filtering %s because unsupported bundle '
1894 repo.ui.debug('filtering %s because unsupported bundle '
1893 'spec: %s\n' % (entry['URL'], str(e)))
1895 'spec: %s\n' % (entry['URL'], str(e)))
1894 continue
1896 continue
1895
1897
1896 if 'REQUIRESNI' in entry and not sslutil.hassni:
1898 if 'REQUIRESNI' in entry and not sslutil.hassni:
1897 repo.ui.debug('filtering %s because SNI not supported\n' %
1899 repo.ui.debug('filtering %s because SNI not supported\n' %
1898 entry['URL'])
1900 entry['URL'])
1899 continue
1901 continue
1900
1902
1901 newentries.append(entry)
1903 newentries.append(entry)
1902
1904
1903 return newentries
1905 return newentries
1904
1906
1905 class clonebundleentry(object):
1907 class clonebundleentry(object):
1906 """Represents an item in a clone bundles manifest.
1908 """Represents an item in a clone bundles manifest.
1907
1909
1908 This rich class is needed to support sorting since sorted() in Python 3
1910 This rich class is needed to support sorting since sorted() in Python 3
1909 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1911 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1910 won't work.
1912 won't work.
1911 """
1913 """
1912
1914
1913 def __init__(self, value, prefers):
1915 def __init__(self, value, prefers):
1914 self.value = value
1916 self.value = value
1915 self.prefers = prefers
1917 self.prefers = prefers
1916
1918
1917 def _cmp(self, other):
1919 def _cmp(self, other):
1918 for prefkey, prefvalue in self.prefers:
1920 for prefkey, prefvalue in self.prefers:
1919 avalue = self.value.get(prefkey)
1921 avalue = self.value.get(prefkey)
1920 bvalue = other.value.get(prefkey)
1922 bvalue = other.value.get(prefkey)
1921
1923
1922 # Special case for b missing attribute and a matches exactly.
1924 # Special case for b missing attribute and a matches exactly.
1923 if avalue is not None and bvalue is None and avalue == prefvalue:
1925 if avalue is not None and bvalue is None and avalue == prefvalue:
1924 return -1
1926 return -1
1925
1927
1926 # Special case for a missing attribute and b matches exactly.
1928 # Special case for a missing attribute and b matches exactly.
1927 if bvalue is not None and avalue is None and bvalue == prefvalue:
1929 if bvalue is not None and avalue is None and bvalue == prefvalue:
1928 return 1
1930 return 1
1929
1931
1930 # We can't compare unless attribute present on both.
1932 # We can't compare unless attribute present on both.
1931 if avalue is None or bvalue is None:
1933 if avalue is None or bvalue is None:
1932 continue
1934 continue
1933
1935
1934 # Same values should fall back to next attribute.
1936 # Same values should fall back to next attribute.
1935 if avalue == bvalue:
1937 if avalue == bvalue:
1936 continue
1938 continue
1937
1939
1938 # Exact matches come first.
1940 # Exact matches come first.
1939 if avalue == prefvalue:
1941 if avalue == prefvalue:
1940 return -1
1942 return -1
1941 if bvalue == prefvalue:
1943 if bvalue == prefvalue:
1942 return 1
1944 return 1
1943
1945
1944 # Fall back to next attribute.
1946 # Fall back to next attribute.
1945 continue
1947 continue
1946
1948
1947 # If we got here we couldn't sort by attributes and prefers. Fall
1949 # If we got here we couldn't sort by attributes and prefers. Fall
1948 # back to index order.
1950 # back to index order.
1949 return 0
1951 return 0
1950
1952
1951 def __lt__(self, other):
1953 def __lt__(self, other):
1952 return self._cmp(other) < 0
1954 return self._cmp(other) < 0
1953
1955
1954 def __gt__(self, other):
1956 def __gt__(self, other):
1955 return self._cmp(other) > 0
1957 return self._cmp(other) > 0
1956
1958
1957 def __eq__(self, other):
1959 def __eq__(self, other):
1958 return self._cmp(other) == 0
1960 return self._cmp(other) == 0
1959
1961
1960 def __le__(self, other):
1962 def __le__(self, other):
1961 return self._cmp(other) <= 0
1963 return self._cmp(other) <= 0
1962
1964
1963 def __ge__(self, other):
1965 def __ge__(self, other):
1964 return self._cmp(other) >= 0
1966 return self._cmp(other) >= 0
1965
1967
1966 def __ne__(self, other):
1968 def __ne__(self, other):
1967 return self._cmp(other) != 0
1969 return self._cmp(other) != 0
1968
1970
1969 def sortclonebundleentries(ui, entries):
1971 def sortclonebundleentries(ui, entries):
1970 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1972 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1971 if not prefers:
1973 if not prefers:
1972 return list(entries)
1974 return list(entries)
1973
1975
1974 prefers = [p.split('=', 1) for p in prefers]
1976 prefers = [p.split('=', 1) for p in prefers]
1975
1977
1976 items = sorted(clonebundleentry(v, prefers) for v in entries)
1978 items = sorted(clonebundleentry(v, prefers) for v in entries)
1977 return [i.value for i in items]
1979 return [i.value for i in items]
1978
1980
1979 def trypullbundlefromurl(ui, repo, url):
1981 def trypullbundlefromurl(ui, repo, url):
1980 """Attempt to apply a bundle from a URL."""
1982 """Attempt to apply a bundle from a URL."""
1981 lock = repo.lock()
1983 lock = repo.lock()
1982 try:
1984 try:
1983 tr = repo.transaction('bundleurl')
1985 tr = repo.transaction('bundleurl')
1984 try:
1986 try:
1985 try:
1987 try:
1986 fh = urlmod.open(ui, url)
1988 fh = urlmod.open(ui, url)
1987 cg = readbundle(ui, fh, 'stream')
1989 cg = readbundle(ui, fh, 'stream')
1988
1990
1989 if isinstance(cg, bundle2.unbundle20):
1991 if isinstance(cg, bundle2.unbundle20):
1990 bundle2.processbundle(repo, cg, lambda: tr)
1992 bundle2.processbundle(repo, cg, lambda: tr)
1991 elif isinstance(cg, streamclone.streamcloneapplier):
1993 elif isinstance(cg, streamclone.streamcloneapplier):
1992 cg.apply(repo)
1994 cg.apply(repo)
1993 else:
1995 else:
1994 cg.apply(repo, 'clonebundles', url)
1996 cg.apply(repo, 'clonebundles', url)
1995 tr.close()
1997 tr.close()
1996 return True
1998 return True
1997 except urlerr.httperror as e:
1999 except urlerr.httperror as e:
1998 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2000 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1999 except urlerr.urlerror as e:
2001 except urlerr.urlerror as e:
2000 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
2002 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
2001
2003
2002 return False
2004 return False
2003 finally:
2005 finally:
2004 tr.release()
2006 tr.release()
2005 finally:
2007 finally:
2006 lock.release()
2008 lock.release()
@@ -1,1116 +1,1116
1 Test exchange of common information using bundle2
1 Test exchange of common information using bundle2
2
2
3
3
4 $ getmainid() {
4 $ getmainid() {
5 > hg -R main log --template '{node}\n' --rev "$1"
5 > hg -R main log --template '{node}\n' --rev "$1"
6 > }
6 > }
7
7
8 enable obsolescence
8 enable obsolescence
9
9
10 $ cp $HGRCPATH $TESTTMP/hgrc.orig
10 $ cp $HGRCPATH $TESTTMP/hgrc.orig
11 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
11 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
12 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
12 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
13 > hg debuglock
13 > hg debuglock
14 > EOF
14 > EOF
15
15
16 $ cat >> $HGRCPATH << EOF
16 $ cat >> $HGRCPATH << EOF
17 > [experimental]
17 > [experimental]
18 > evolution=createmarkers,exchange
18 > evolution=createmarkers,exchange
19 > bundle2-output-capture=True
19 > bundle2-output-capture=True
20 > [ui]
20 > [ui]
21 > ssh=python "$TESTDIR/dummyssh"
21 > ssh=python "$TESTDIR/dummyssh"
22 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
22 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
23 > [web]
23 > [web]
24 > push_ssl = false
24 > push_ssl = false
25 > allow_push = *
25 > allow_push = *
26 > [phases]
26 > [phases]
27 > publish=False
27 > publish=False
28 > [hooks]
28 > [hooks]
29 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
29 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
30 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
30 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
31 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
31 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
32 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
32 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
33 > EOF
33 > EOF
34
34
35 The extension requires a repo (currently unused)
35 The extension requires a repo (currently unused)
36
36
37 $ hg init main
37 $ hg init main
38 $ cd main
38 $ cd main
39 $ touch a
39 $ touch a
40 $ hg add a
40 $ hg add a
41 $ hg commit -m 'a'
41 $ hg commit -m 'a'
42 pre-close-tip:3903775176ed draft
42 pre-close-tip:3903775176ed draft
43 postclose-tip:3903775176ed draft
43 postclose-tip:3903775176ed draft
44 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
44 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
45
45
46 $ hg unbundle $TESTDIR/bundles/rebase.hg
46 $ hg unbundle $TESTDIR/bundles/rebase.hg
47 adding changesets
47 adding changesets
48 adding manifests
48 adding manifests
49 adding file changes
49 adding file changes
50 added 8 changesets with 7 changes to 7 files (+3 heads)
50 added 8 changesets with 7 changes to 7 files (+3 heads)
51 pre-close-tip:02de42196ebe draft
51 pre-close-tip:02de42196ebe draft
52 postclose-tip:02de42196ebe draft
52 postclose-tip:02de42196ebe draft
53 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
53 txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
54 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
54 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
55 (run 'hg heads' to see heads, 'hg merge' to merge)
55 (run 'hg heads' to see heads, 'hg merge' to merge)
56
56
57 $ cd ..
57 $ cd ..
58
58
59 Real world exchange
59 Real world exchange
60 =====================
60 =====================
61
61
62 Add more obsolescence information
62 Add more obsolescence information
63
63
64 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
64 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
65 pre-close-tip:02de42196ebe draft
65 pre-close-tip:02de42196ebe draft
66 postclose-tip:02de42196ebe draft
66 postclose-tip:02de42196ebe draft
67 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
67 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
68 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
68 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
69 pre-close-tip:02de42196ebe draft
69 pre-close-tip:02de42196ebe draft
70 postclose-tip:02de42196ebe draft
70 postclose-tip:02de42196ebe draft
71 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
71 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
72
72
73 clone --pull
73 clone --pull
74
74
75 $ hg -R main phase --public cd010b8cd998
75 $ hg -R main phase --public cd010b8cd998
76 pre-close-tip:02de42196ebe draft
76 pre-close-tip:02de42196ebe draft
77 postclose-tip:02de42196ebe draft
77 postclose-tip:02de42196ebe draft
78 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
78 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
79 $ hg clone main other --pull --rev 9520eea781bc
79 $ hg clone main other --pull --rev 9520eea781bc
80 adding changesets
80 adding changesets
81 adding manifests
81 adding manifests
82 adding file changes
82 adding file changes
83 added 2 changesets with 2 changes to 2 files
83 added 2 changesets with 2 changes to 2 files
84 1 new obsolescence markers
84 1 new obsolescence markers
85 pre-close-tip:9520eea781bc draft
85 pre-close-tip:9520eea781bc draft
86 postclose-tip:9520eea781bc draft
86 postclose-tip:9520eea781bc draft
87 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
87 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
88 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
88 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
89 updating to branch default
89 updating to branch default
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 $ hg -R other log -G
91 $ hg -R other log -G
92 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
92 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
93 |
93 |
94 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
94 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
95
95
96 $ hg -R other debugobsolete
96 $ hg -R other debugobsolete
97 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
97 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
98
98
99 pull
99 pull
100
100
101 $ hg -R main phase --public 9520eea781bc
101 $ hg -R main phase --public 9520eea781bc
102 pre-close-tip:02de42196ebe draft
102 pre-close-tip:02de42196ebe draft
103 postclose-tip:02de42196ebe draft
103 postclose-tip:02de42196ebe draft
104 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
104 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
105 $ hg -R other pull -r 24b6387c8c8c
105 $ hg -R other pull -r 24b6387c8c8c
106 pulling from $TESTTMP/main (glob)
106 pulling from $TESTTMP/main (glob)
107 searching for changes
107 searching for changes
108 adding changesets
108 adding changesets
109 adding manifests
109 adding manifests
110 adding file changes
110 adding file changes
111 added 1 changesets with 1 changes to 1 files (+1 heads)
111 added 1 changesets with 1 changes to 1 files (+1 heads)
112 1 new obsolescence markers
112 1 new obsolescence markers
113 pre-close-tip:24b6387c8c8c draft
113 pre-close-tip:24b6387c8c8c draft
114 postclose-tip:24b6387c8c8c draft
114 postclose-tip:24b6387c8c8c draft
115 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
115 txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
116 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
116 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
117 (run 'hg heads' to see heads, 'hg merge' to merge)
117 (run 'hg heads' to see heads, 'hg merge' to merge)
118 $ hg -R other log -G
118 $ hg -R other log -G
119 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
119 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
120 |
120 |
121 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
121 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
122 |/
122 |/
123 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
123 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
124
124
125 $ hg -R other debugobsolete
125 $ hg -R other debugobsolete
126 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
126 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
127 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
128
128
129 pull empty (with phase movement)
129 pull empty (with phase movement)
130
130
131 $ hg -R main phase --public 24b6387c8c8c
131 $ hg -R main phase --public 24b6387c8c8c
132 pre-close-tip:02de42196ebe draft
132 pre-close-tip:02de42196ebe draft
133 postclose-tip:02de42196ebe draft
133 postclose-tip:02de42196ebe draft
134 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
134 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
135 $ hg -R other pull -r 24b6387c8c8c
135 $ hg -R other pull -r 24b6387c8c8c
136 pulling from $TESTTMP/main (glob)
136 pulling from $TESTTMP/main (glob)
137 no changes found
137 no changes found
138 pre-close-tip:24b6387c8c8c public
138 pre-close-tip:24b6387c8c8c public
139 postclose-tip:24b6387c8c8c public
139 postclose-tip:24b6387c8c8c public
140 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
140 txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
141 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
141 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
142 $ hg -R other log -G
142 $ hg -R other log -G
143 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
143 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
144 |
144 |
145 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
145 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
146 |/
146 |/
147 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
147 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
148
148
149 $ hg -R other debugobsolete
149 $ hg -R other debugobsolete
150 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
150 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
151 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
151 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
152
152
153 pull empty
153 pull empty
154
154
155 $ hg -R other pull -r 24b6387c8c8c
155 $ hg -R other pull -r 24b6387c8c8c
156 pulling from $TESTTMP/main (glob)
156 pulling from $TESTTMP/main (glob)
157 no changes found
157 no changes found
158 pre-close-tip:24b6387c8c8c public
158 pre-close-tip:24b6387c8c8c public
159 postclose-tip:24b6387c8c8c public
159 postclose-tip:24b6387c8c8c public
160 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
160 txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
161 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
161 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
162 $ hg -R other log -G
162 $ hg -R other log -G
163 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
163 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
164 |
164 |
165 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
165 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
166 |/
166 |/
167 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
167 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
168
168
169 $ hg -R other debugobsolete
169 $ hg -R other debugobsolete
170 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
170 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
171 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
171 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
172
172
173 add extra data to test their exchange during push
173 add extra data to test their exchange during push
174
174
175 $ hg -R main bookmark --rev eea13746799a book_eea1
175 $ hg -R main bookmark --rev eea13746799a book_eea1
176 pre-close-tip:02de42196ebe draft
176 pre-close-tip:02de42196ebe draft
177 postclose-tip:02de42196ebe draft
177 postclose-tip:02de42196ebe draft
178 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
178 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
179 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
179 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
180 pre-close-tip:02de42196ebe draft
180 pre-close-tip:02de42196ebe draft
181 postclose-tip:02de42196ebe draft
181 postclose-tip:02de42196ebe draft
182 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
182 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
183 $ hg -R main bookmark --rev 02de42196ebe book_02de
183 $ hg -R main bookmark --rev 02de42196ebe book_02de
184 pre-close-tip:02de42196ebe draft book_02de
184 pre-close-tip:02de42196ebe draft book_02de
185 postclose-tip:02de42196ebe draft book_02de
185 postclose-tip:02de42196ebe draft book_02de
186 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
186 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
187 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
187 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
188 pre-close-tip:02de42196ebe draft book_02de
188 pre-close-tip:02de42196ebe draft book_02de
189 postclose-tip:02de42196ebe draft book_02de
189 postclose-tip:02de42196ebe draft book_02de
190 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
190 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
191 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
191 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
192 pre-close-tip:02de42196ebe draft book_02de
192 pre-close-tip:02de42196ebe draft book_02de
193 postclose-tip:02de42196ebe draft book_02de
193 postclose-tip:02de42196ebe draft book_02de
194 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
194 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
195 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
195 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
196 pre-close-tip:02de42196ebe draft book_02de
196 pre-close-tip:02de42196ebe draft book_02de
197 postclose-tip:02de42196ebe draft book_02de
197 postclose-tip:02de42196ebe draft book_02de
198 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
198 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
199 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
199 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
200 pre-close-tip:02de42196ebe draft book_02de
200 pre-close-tip:02de42196ebe draft book_02de
201 postclose-tip:02de42196ebe draft book_02de
201 postclose-tip:02de42196ebe draft book_02de
202 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
202 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
203 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
203 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
204 pre-close-tip:02de42196ebe draft book_02de
204 pre-close-tip:02de42196ebe draft book_02de
205 postclose-tip:02de42196ebe draft book_02de
205 postclose-tip:02de42196ebe draft book_02de
206 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
206 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
207 $ hg -R main bookmark --rev 32af7686d403 book_32af
207 $ hg -R main bookmark --rev 32af7686d403 book_32af
208 pre-close-tip:02de42196ebe draft book_02de
208 pre-close-tip:02de42196ebe draft book_02de
209 postclose-tip:02de42196ebe draft book_02de
209 postclose-tip:02de42196ebe draft book_02de
210 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
210 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
211 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
211 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
212 pre-close-tip:02de42196ebe draft book_02de
212 pre-close-tip:02de42196ebe draft book_02de
213 postclose-tip:02de42196ebe draft book_02de
213 postclose-tip:02de42196ebe draft book_02de
214 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
214 txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
215
215
216 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
216 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
217 pre-close-tip:24b6387c8c8c public
217 pre-close-tip:24b6387c8c8c public
218 postclose-tip:24b6387c8c8c public
218 postclose-tip:24b6387c8c8c public
219 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
219 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
220 $ hg -R other bookmark --rev cd010b8cd998 book_02de
220 $ hg -R other bookmark --rev cd010b8cd998 book_02de
221 pre-close-tip:24b6387c8c8c public
221 pre-close-tip:24b6387c8c8c public
222 postclose-tip:24b6387c8c8c public
222 postclose-tip:24b6387c8c8c public
223 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
223 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
224 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
224 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
225 pre-close-tip:24b6387c8c8c public
225 pre-close-tip:24b6387c8c8c public
226 postclose-tip:24b6387c8c8c public
226 postclose-tip:24b6387c8c8c public
227 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
227 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
228 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
228 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
229 pre-close-tip:24b6387c8c8c public
229 pre-close-tip:24b6387c8c8c public
230 postclose-tip:24b6387c8c8c public
230 postclose-tip:24b6387c8c8c public
231 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
231 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
232 $ hg -R other bookmark --rev cd010b8cd998 book_32af
232 $ hg -R other bookmark --rev cd010b8cd998 book_32af
233 pre-close-tip:24b6387c8c8c public
233 pre-close-tip:24b6387c8c8c public
234 postclose-tip:24b6387c8c8c public
234 postclose-tip:24b6387c8c8c public
235 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
235 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
236
236
237 $ hg -R main phase --public eea13746799a
237 $ hg -R main phase --public eea13746799a
238 pre-close-tip:02de42196ebe draft book_02de
238 pre-close-tip:02de42196ebe draft book_02de
239 postclose-tip:02de42196ebe draft book_02de
239 postclose-tip:02de42196ebe draft book_02de
240 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
240 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
241
241
242 push
242 push
243 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
243 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
244 pushing to other
244 pushing to other
245 searching for changes
245 searching for changes
246 remote: adding changesets
246 remote: adding changesets
247 remote: adding manifests
247 remote: adding manifests
248 remote: adding file changes
248 remote: adding file changes
249 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
249 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
250 remote: 1 new obsolescence markers
250 remote: 1 new obsolescence markers
251 remote: pre-close-tip:eea13746799a public book_eea1
251 remote: pre-close-tip:eea13746799a public book_eea1
252 remote: pushkey: lock state after "phases"
252 remote: pushkey: lock state after "phases"
253 remote: lock: free
253 remote: lock: free
254 remote: wlock: free
254 remote: wlock: free
255 remote: pushkey: lock state after "bookmarks"
255 remote: pushkey: lock state after "bookmarks"
256 remote: lock: free
256 remote: lock: free
257 remote: wlock: free
257 remote: wlock: free
258 remote: postclose-tip:eea13746799a public book_eea1
258 remote: postclose-tip:eea13746799a public book_eea1
259 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/other (glob)
259 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=file:$TESTTMP/other (glob)
260 updating bookmark book_eea1
260 updating bookmark book_eea1
261 pre-close-tip:02de42196ebe draft book_02de
261 pre-close-tip:02de42196ebe draft book_02de
262 postclose-tip:02de42196ebe draft book_02de
262 postclose-tip:02de42196ebe draft book_02de
263 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
263 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
264 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
264 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
265 $ hg -R other log -G
265 $ hg -R other log -G
266 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
266 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
267 |\
267 |\
268 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
268 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
269 | |
269 | |
270 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
270 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
271 |/
271 |/
272 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
272 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
273
273
274 $ hg -R other debugobsolete
274 $ hg -R other debugobsolete
275 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
275 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
276 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
276 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
277 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
277 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
278
278
279 pull over ssh
279 pull over ssh
280
280
281 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
281 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
282 pulling from ssh://user@dummy/main
282 pulling from ssh://user@dummy/main
283 searching for changes
283 searching for changes
284 adding changesets
284 adding changesets
285 adding manifests
285 adding manifests
286 adding file changes
286 adding file changes
287 added 1 changesets with 1 changes to 1 files (+1 heads)
287 added 1 changesets with 1 changes to 1 files (+1 heads)
288 1 new obsolescence markers
288 1 new obsolescence markers
289 updating bookmark book_02de
289 updating bookmark book_02de
290 pre-close-tip:02de42196ebe draft book_02de
290 pre-close-tip:02de42196ebe draft book_02de
291 postclose-tip:02de42196ebe draft book_02de
291 postclose-tip:02de42196ebe draft book_02de
292 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
292 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
293 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
293 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
294 (run 'hg heads' to see heads, 'hg merge' to merge)
294 (run 'hg heads' to see heads, 'hg merge' to merge)
295 $ hg -R other debugobsolete
295 $ hg -R other debugobsolete
296 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
296 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
297 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
298 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
299 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
300
300
301 pull over http
301 pull over http
302
302
303 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
303 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
304 $ cat main.pid >> $DAEMON_PIDS
304 $ cat main.pid >> $DAEMON_PIDS
305
305
306 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
306 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
307 pulling from http://localhost:$HGPORT/
307 pulling from http://localhost:$HGPORT/
308 searching for changes
308 searching for changes
309 adding changesets
309 adding changesets
310 adding manifests
310 adding manifests
311 adding file changes
311 adding file changes
312 added 1 changesets with 1 changes to 1 files (+1 heads)
312 added 1 changesets with 1 changes to 1 files (+1 heads)
313 1 new obsolescence markers
313 1 new obsolescence markers
314 updating bookmark book_42cc
314 updating bookmark book_42cc
315 pre-close-tip:42ccdea3bb16 draft book_42cc
315 pre-close-tip:42ccdea3bb16 draft book_42cc
316 postclose-tip:42ccdea3bb16 draft book_42cc
316 postclose-tip:42ccdea3bb16 draft book_42cc
317 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
317 txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
318 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
318 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
319 (run 'hg heads .' to see heads, 'hg merge' to merge)
319 (run 'hg heads .' to see heads, 'hg merge' to merge)
320 $ cat main-error.log
320 $ cat main-error.log
321 $ hg -R other debugobsolete
321 $ hg -R other debugobsolete
322 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
322 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
323 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
323 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
324 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
324 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
325 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
325 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
326 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
326 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
327
327
328 push over ssh
328 push over ssh
329
329
330 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
330 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
331 pushing to ssh://user@dummy/other
331 pushing to ssh://user@dummy/other
332 searching for changes
332 searching for changes
333 remote: adding changesets
333 remote: adding changesets
334 remote: adding manifests
334 remote: adding manifests
335 remote: adding file changes
335 remote: adding file changes
336 remote: added 1 changesets with 1 changes to 1 files
336 remote: added 1 changesets with 1 changes to 1 files
337 remote: 1 new obsolescence markers
337 remote: 1 new obsolescence markers
338 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
338 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
339 remote: pushkey: lock state after "bookmarks"
339 remote: pushkey: lock state after "bookmarks"
340 remote: lock: free
340 remote: lock: free
341 remote: wlock: free
341 remote: wlock: free
342 remote: postclose-tip:5fddd98957c8 draft book_5fdd
342 remote: postclose-tip:5fddd98957c8 draft book_5fdd
343 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
343 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
344 updating bookmark book_5fdd
344 updating bookmark book_5fdd
345 pre-close-tip:02de42196ebe draft book_02de
345 pre-close-tip:02de42196ebe draft book_02de
346 postclose-tip:02de42196ebe draft book_02de
346 postclose-tip:02de42196ebe draft book_02de
347 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
347 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
348 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
348 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
349 $ hg -R other log -G
349 $ hg -R other log -G
350 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
350 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
351 |
351 |
352 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
352 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
353 |
353 |
354 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
354 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
355 | |
355 | |
356 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
356 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
357 | |/|
357 | |/|
358 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
358 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
359 |/ /
359 |/ /
360 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
360 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
361 |/
361 |/
362 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
362 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
363
363
364 $ hg -R other debugobsolete
364 $ hg -R other debugobsolete
365 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
365 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
366 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
366 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
367 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
367 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
368 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
368 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
369 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
369 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
370 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
370 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
371
371
372 push over http
372 push over http
373
373
374 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
374 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
375 $ cat other.pid >> $DAEMON_PIDS
375 $ cat other.pid >> $DAEMON_PIDS
376
376
377 $ hg -R main phase --public 32af7686d403
377 $ hg -R main phase --public 32af7686d403
378 pre-close-tip:02de42196ebe draft book_02de
378 pre-close-tip:02de42196ebe draft book_02de
379 postclose-tip:02de42196ebe draft book_02de
379 postclose-tip:02de42196ebe draft book_02de
380 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
380 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
381 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
381 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
382 pushing to http://localhost:$HGPORT2/
382 pushing to http://localhost:$HGPORT2/
383 searching for changes
383 searching for changes
384 remote: adding changesets
384 remote: adding changesets
385 remote: adding manifests
385 remote: adding manifests
386 remote: adding file changes
386 remote: adding file changes
387 remote: added 1 changesets with 1 changes to 1 files
387 remote: added 1 changesets with 1 changes to 1 files
388 remote: 1 new obsolescence markers
388 remote: 1 new obsolescence markers
389 remote: pre-close-tip:32af7686d403 public book_32af
389 remote: pre-close-tip:32af7686d403 public book_32af
390 remote: pushkey: lock state after "phases"
390 remote: pushkey: lock state after "phases"
391 remote: lock: free
391 remote: lock: free
392 remote: wlock: free
392 remote: wlock: free
393 remote: pushkey: lock state after "bookmarks"
393 remote: pushkey: lock state after "bookmarks"
394 remote: lock: free
394 remote: lock: free
395 remote: wlock: free
395 remote: wlock: free
396 remote: postclose-tip:32af7686d403 public book_32af
396 remote: postclose-tip:32af7686d403 public book_32af
397 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
397 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob)
398 updating bookmark book_32af
398 updating bookmark book_32af
399 pre-close-tip:02de42196ebe draft book_02de
399 pre-close-tip:02de42196ebe draft book_02de
400 postclose-tip:02de42196ebe draft book_02de
400 postclose-tip:02de42196ebe draft book_02de
401 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
401 txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
402 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
402 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
403 $ cat other-error.log
403 $ cat other-error.log
404
404
405 Check final content.
405 Check final content.
406
406
407 $ hg -R other log -G
407 $ hg -R other log -G
408 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
408 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
409 |
409 |
410 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
410 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
411 |
411 |
412 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
412 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
413 |
413 |
414 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
414 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
415 | |
415 | |
416 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
416 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
417 | |/|
417 | |/|
418 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
418 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
419 |/ /
419 |/ /
420 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
420 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
421 |/
421 |/
422 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
422 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
423
423
424 $ hg -R other debugobsolete
424 $ hg -R other debugobsolete
425 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
425 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
426 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
426 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
427 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
427 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
428 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
428 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
429 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
429 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
430 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
430 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
431 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
431 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
432
432
433 (check that no 'pending' files remain)
433 (check that no 'pending' files remain)
434
434
435 $ ls -1 other/.hg/bookmarks*
435 $ ls -1 other/.hg/bookmarks*
436 other/.hg/bookmarks
436 other/.hg/bookmarks
437 $ ls -1 other/.hg/store/phaseroots*
437 $ ls -1 other/.hg/store/phaseroots*
438 other/.hg/store/phaseroots
438 other/.hg/store/phaseroots
439 $ ls -1 other/.hg/store/00changelog.i*
439 $ ls -1 other/.hg/store/00changelog.i*
440 other/.hg/store/00changelog.i
440 other/.hg/store/00changelog.i
441
441
442 Error Handling
442 Error Handling
443 ==============
443 ==============
444
444
445 Check that errors are properly returned to the client during push.
445 Check that errors are properly returned to the client during push.
446
446
447 Setting up
447 Setting up
448
448
449 $ cat > failpush.py << EOF
449 $ cat > failpush.py << EOF
450 > """A small extension that makes push fails when using bundle2
450 > """A small extension that makes push fails when using bundle2
451 >
451 >
452 > used to test error handling in bundle2
452 > used to test error handling in bundle2
453 > """
453 > """
454 >
454 >
455 > from mercurial import error
455 > from mercurial import error
456 > from mercurial import bundle2
456 > from mercurial import bundle2
457 > from mercurial import exchange
457 > from mercurial import exchange
458 > from mercurial import extensions
458 > from mercurial import extensions
459 >
459 >
460 > def _pushbundle2failpart(pushop, bundler):
460 > def _pushbundle2failpart(pushop, bundler):
461 > reason = pushop.ui.config('failpush', 'reason', None)
461 > reason = pushop.ui.config('failpush', 'reason', None)
462 > part = None
462 > part = None
463 > if reason == 'abort':
463 > if reason == 'abort':
464 > bundler.newpart('test:abort')
464 > bundler.newpart('test:abort')
465 > if reason == 'unknown':
465 > if reason == 'unknown':
466 > bundler.newpart('test:unknown')
466 > bundler.newpart('test:unknown')
467 > if reason == 'race':
467 > if reason == 'race':
468 > # 20 Bytes of crap
468 > # 20 Bytes of crap
469 > bundler.newpart('check:heads', data='01234567890123456789')
469 > bundler.newpart('check:heads', data='01234567890123456789')
470 >
470 >
471 > @bundle2.parthandler("test:abort")
471 > @bundle2.parthandler("test:abort")
472 > def handleabort(op, part):
472 > def handleabort(op, part):
473 > raise error.Abort('Abandon ship!', hint="don't panic")
473 > raise error.Abort('Abandon ship!', hint="don't panic")
474 >
474 >
475 > def uisetup(ui):
475 > def uisetup(ui):
476 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
476 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
477 > exchange.b2partsgenorder.insert(0, 'failpart')
477 > exchange.b2partsgenorder.insert(0, 'failpart')
478 >
478 >
479 > EOF
479 > EOF
480
480
481 $ cd main
481 $ cd main
482 $ hg up tip
482 $ hg up tip
483 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
483 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
484 $ echo 'I' > I
484 $ echo 'I' > I
485 $ hg add I
485 $ hg add I
486 $ hg ci -m 'I'
486 $ hg ci -m 'I'
487 pre-close-tip:e7ec4e813ba6 draft
487 pre-close-tip:e7ec4e813ba6 draft
488 postclose-tip:e7ec4e813ba6 draft
488 postclose-tip:e7ec4e813ba6 draft
489 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
489 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
490 $ hg id
490 $ hg id
491 e7ec4e813ba6 tip
491 e7ec4e813ba6 tip
492 $ cd ..
492 $ cd ..
493
493
494 $ cat << EOF >> $HGRCPATH
494 $ cat << EOF >> $HGRCPATH
495 > [extensions]
495 > [extensions]
496 > failpush=$TESTTMP/failpush.py
496 > failpush=$TESTTMP/failpush.py
497 > EOF
497 > EOF
498
498
499 $ killdaemons.py
499 $ killdaemons.py
500 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
500 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
501 $ cat other.pid >> $DAEMON_PIDS
501 $ cat other.pid >> $DAEMON_PIDS
502
502
503 Doing the actual push: Abort error
503 Doing the actual push: Abort error
504
504
505 $ cat << EOF >> $HGRCPATH
505 $ cat << EOF >> $HGRCPATH
506 > [failpush]
506 > [failpush]
507 > reason = abort
507 > reason = abort
508 > EOF
508 > EOF
509
509
510 $ hg -R main push other -r e7ec4e813ba6
510 $ hg -R main push other -r e7ec4e813ba6
511 pushing to other
511 pushing to other
512 searching for changes
512 searching for changes
513 abort: Abandon ship!
513 abort: Abandon ship!
514 (don't panic)
514 (don't panic)
515 [255]
515 [255]
516
516
517 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
517 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
518 pushing to ssh://user@dummy/other
518 pushing to ssh://user@dummy/other
519 searching for changes
519 searching for changes
520 remote: Abandon ship!
520 remote: Abandon ship!
521 remote: (don't panic)
521 abort: push failed on remote
522 abort: push failed on remote
522 (don't panic)
523 [255]
523 [255]
524
524
525 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
525 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
526 pushing to http://localhost:$HGPORT2/
526 pushing to http://localhost:$HGPORT2/
527 searching for changes
527 searching for changes
528 remote: Abandon ship!
528 remote: Abandon ship!
529 remote: (don't panic)
529 abort: push failed on remote
530 abort: push failed on remote
530 (don't panic)
531 [255]
531 [255]
532
532
533
533
534 Doing the actual push: unknown mandatory parts
534 Doing the actual push: unknown mandatory parts
535
535
536 $ cat << EOF >> $HGRCPATH
536 $ cat << EOF >> $HGRCPATH
537 > [failpush]
537 > [failpush]
538 > reason = unknown
538 > reason = unknown
539 > EOF
539 > EOF
540
540
541 $ hg -R main push other -r e7ec4e813ba6
541 $ hg -R main push other -r e7ec4e813ba6
542 pushing to other
542 pushing to other
543 searching for changes
543 searching for changes
544 abort: missing support for test:unknown
544 abort: missing support for test:unknown
545 [255]
545 [255]
546
546
547 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
547 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
548 pushing to ssh://user@dummy/other
548 pushing to ssh://user@dummy/other
549 searching for changes
549 searching for changes
550 abort: missing support for test:unknown
550 abort: missing support for test:unknown
551 [255]
551 [255]
552
552
553 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
553 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
554 pushing to http://localhost:$HGPORT2/
554 pushing to http://localhost:$HGPORT2/
555 searching for changes
555 searching for changes
556 abort: missing support for test:unknown
556 abort: missing support for test:unknown
557 [255]
557 [255]
558
558
559 Doing the actual push: race
559 Doing the actual push: race
560
560
561 $ cat << EOF >> $HGRCPATH
561 $ cat << EOF >> $HGRCPATH
562 > [failpush]
562 > [failpush]
563 > reason = race
563 > reason = race
564 > EOF
564 > EOF
565
565
566 $ hg -R main push other -r e7ec4e813ba6
566 $ hg -R main push other -r e7ec4e813ba6
567 pushing to other
567 pushing to other
568 searching for changes
568 searching for changes
569 abort: push failed:
569 abort: push failed:
570 'repository changed while pushing - please try again'
570 'repository changed while pushing - please try again'
571 [255]
571 [255]
572
572
573 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
573 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
574 pushing to ssh://user@dummy/other
574 pushing to ssh://user@dummy/other
575 searching for changes
575 searching for changes
576 abort: push failed:
576 abort: push failed:
577 'repository changed while pushing - please try again'
577 'repository changed while pushing - please try again'
578 [255]
578 [255]
579
579
580 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
580 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
581 pushing to http://localhost:$HGPORT2/
581 pushing to http://localhost:$HGPORT2/
582 searching for changes
582 searching for changes
583 abort: push failed:
583 abort: push failed:
584 'repository changed while pushing - please try again'
584 'repository changed while pushing - please try again'
585 [255]
585 [255]
586
586
587 Doing the actual push: hook abort
587 Doing the actual push: hook abort
588
588
589 $ cat << EOF >> $HGRCPATH
589 $ cat << EOF >> $HGRCPATH
590 > [failpush]
590 > [failpush]
591 > reason =
591 > reason =
592 > [hooks]
592 > [hooks]
593 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
593 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
594 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
594 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
595 > EOF
595 > EOF
596
596
597 $ killdaemons.py
597 $ killdaemons.py
598 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
598 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
599 $ cat other.pid >> $DAEMON_PIDS
599 $ cat other.pid >> $DAEMON_PIDS
600
600
601 $ hg -R main push other -r e7ec4e813ba6
601 $ hg -R main push other -r e7ec4e813ba6
602 pushing to other
602 pushing to other
603 searching for changes
603 searching for changes
604 remote: adding changesets
604 remote: adding changesets
605 remote: adding manifests
605 remote: adding manifests
606 remote: adding file changes
606 remote: adding file changes
607 remote: added 1 changesets with 1 changes to 1 files
607 remote: added 1 changesets with 1 changes to 1 files
608 remote: pre-close-tip:e7ec4e813ba6 draft
608 remote: pre-close-tip:e7ec4e813ba6 draft
609 remote: You shall not pass!
609 remote: You shall not pass!
610 remote: transaction abort!
610 remote: transaction abort!
611 remote: Cleaning up the mess...
611 remote: Cleaning up the mess...
612 remote: rollback completed
612 remote: rollback completed
613 abort: pretxnclose.failpush hook exited with status 1
613 abort: pretxnclose.failpush hook exited with status 1
614 [255]
614 [255]
615
615
616 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
616 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
617 pushing to ssh://user@dummy/other
617 pushing to ssh://user@dummy/other
618 searching for changes
618 searching for changes
619 remote: adding changesets
619 remote: adding changesets
620 remote: adding manifests
620 remote: adding manifests
621 remote: adding file changes
621 remote: adding file changes
622 remote: added 1 changesets with 1 changes to 1 files
622 remote: added 1 changesets with 1 changes to 1 files
623 remote: pre-close-tip:e7ec4e813ba6 draft
623 remote: pre-close-tip:e7ec4e813ba6 draft
624 remote: You shall not pass!
624 remote: You shall not pass!
625 remote: transaction abort!
625 remote: transaction abort!
626 remote: Cleaning up the mess...
626 remote: Cleaning up the mess...
627 remote: rollback completed
627 remote: rollback completed
628 remote: pretxnclose.failpush hook exited with status 1
628 remote: pretxnclose.failpush hook exited with status 1
629 abort: push failed on remote
629 abort: push failed on remote
630 [255]
630 [255]
631
631
632 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
632 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
633 pushing to http://localhost:$HGPORT2/
633 pushing to http://localhost:$HGPORT2/
634 searching for changes
634 searching for changes
635 remote: adding changesets
635 remote: adding changesets
636 remote: adding manifests
636 remote: adding manifests
637 remote: adding file changes
637 remote: adding file changes
638 remote: added 1 changesets with 1 changes to 1 files
638 remote: added 1 changesets with 1 changes to 1 files
639 remote: pre-close-tip:e7ec4e813ba6 draft
639 remote: pre-close-tip:e7ec4e813ba6 draft
640 remote: You shall not pass!
640 remote: You shall not pass!
641 remote: transaction abort!
641 remote: transaction abort!
642 remote: Cleaning up the mess...
642 remote: Cleaning up the mess...
643 remote: rollback completed
643 remote: rollback completed
644 remote: pretxnclose.failpush hook exited with status 1
644 remote: pretxnclose.failpush hook exited with status 1
645 abort: push failed on remote
645 abort: push failed on remote
646 [255]
646 [255]
647
647
648 (check that no 'pending' files remain)
648 (check that no 'pending' files remain)
649
649
650 $ ls -1 other/.hg/bookmarks*
650 $ ls -1 other/.hg/bookmarks*
651 other/.hg/bookmarks
651 other/.hg/bookmarks
652 $ ls -1 other/.hg/store/phaseroots*
652 $ ls -1 other/.hg/store/phaseroots*
653 other/.hg/store/phaseroots
653 other/.hg/store/phaseroots
654 $ ls -1 other/.hg/store/00changelog.i*
654 $ ls -1 other/.hg/store/00changelog.i*
655 other/.hg/store/00changelog.i
655 other/.hg/store/00changelog.i
656
656
657 Check error from hook during the unbundling process itself
657 Check error from hook during the unbundling process itself
658
658
659 $ cat << EOF >> $HGRCPATH
659 $ cat << EOF >> $HGRCPATH
660 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
660 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
661 > EOF
661 > EOF
662 $ killdaemons.py # reload http config
662 $ killdaemons.py # reload http config
663 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
663 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
664 $ cat other.pid >> $DAEMON_PIDS
664 $ cat other.pid >> $DAEMON_PIDS
665
665
666 $ hg -R main push other -r e7ec4e813ba6
666 $ hg -R main push other -r e7ec4e813ba6
667 pushing to other
667 pushing to other
668 searching for changes
668 searching for changes
669 remote: adding changesets
669 remote: adding changesets
670 remote: adding manifests
670 remote: adding manifests
671 remote: adding file changes
671 remote: adding file changes
672 remote: added 1 changesets with 1 changes to 1 files
672 remote: added 1 changesets with 1 changes to 1 files
673 remote: Fail early!
673 remote: Fail early!
674 remote: transaction abort!
674 remote: transaction abort!
675 remote: Cleaning up the mess...
675 remote: Cleaning up the mess...
676 remote: rollback completed
676 remote: rollback completed
677 abort: pretxnchangegroup hook exited with status 1
677 abort: pretxnchangegroup hook exited with status 1
678 [255]
678 [255]
679 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
679 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
680 pushing to ssh://user@dummy/other
680 pushing to ssh://user@dummy/other
681 searching for changes
681 searching for changes
682 remote: adding changesets
682 remote: adding changesets
683 remote: adding manifests
683 remote: adding manifests
684 remote: adding file changes
684 remote: adding file changes
685 remote: added 1 changesets with 1 changes to 1 files
685 remote: added 1 changesets with 1 changes to 1 files
686 remote: Fail early!
686 remote: Fail early!
687 remote: transaction abort!
687 remote: transaction abort!
688 remote: Cleaning up the mess...
688 remote: Cleaning up the mess...
689 remote: rollback completed
689 remote: rollback completed
690 remote: pretxnchangegroup hook exited with status 1
690 remote: pretxnchangegroup hook exited with status 1
691 abort: push failed on remote
691 abort: push failed on remote
692 [255]
692 [255]
693 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
693 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
694 pushing to http://localhost:$HGPORT2/
694 pushing to http://localhost:$HGPORT2/
695 searching for changes
695 searching for changes
696 remote: adding changesets
696 remote: adding changesets
697 remote: adding manifests
697 remote: adding manifests
698 remote: adding file changes
698 remote: adding file changes
699 remote: added 1 changesets with 1 changes to 1 files
699 remote: added 1 changesets with 1 changes to 1 files
700 remote: Fail early!
700 remote: Fail early!
701 remote: transaction abort!
701 remote: transaction abort!
702 remote: Cleaning up the mess...
702 remote: Cleaning up the mess...
703 remote: rollback completed
703 remote: rollback completed
704 remote: pretxnchangegroup hook exited with status 1
704 remote: pretxnchangegroup hook exited with status 1
705 abort: push failed on remote
705 abort: push failed on remote
706 [255]
706 [255]
707
707
708 Check output capture control.
708 Check output capture control.
709
709
710 (should be still forced for http, disabled for local and ssh)
710 (should be still forced for http, disabled for local and ssh)
711
711
712 $ cat >> $HGRCPATH << EOF
712 $ cat >> $HGRCPATH << EOF
713 > [experimental]
713 > [experimental]
714 > bundle2-output-capture=False
714 > bundle2-output-capture=False
715 > EOF
715 > EOF
716
716
717 $ hg -R main push other -r e7ec4e813ba6
717 $ hg -R main push other -r e7ec4e813ba6
718 pushing to other
718 pushing to other
719 searching for changes
719 searching for changes
720 adding changesets
720 adding changesets
721 adding manifests
721 adding manifests
722 adding file changes
722 adding file changes
723 added 1 changesets with 1 changes to 1 files
723 added 1 changesets with 1 changes to 1 files
724 Fail early!
724 Fail early!
725 transaction abort!
725 transaction abort!
726 Cleaning up the mess...
726 Cleaning up the mess...
727 rollback completed
727 rollback completed
728 abort: pretxnchangegroup hook exited with status 1
728 abort: pretxnchangegroup hook exited with status 1
729 [255]
729 [255]
730 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
730 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
731 pushing to ssh://user@dummy/other
731 pushing to ssh://user@dummy/other
732 searching for changes
732 searching for changes
733 remote: adding changesets
733 remote: adding changesets
734 remote: adding manifests
734 remote: adding manifests
735 remote: adding file changes
735 remote: adding file changes
736 remote: added 1 changesets with 1 changes to 1 files
736 remote: added 1 changesets with 1 changes to 1 files
737 remote: Fail early!
737 remote: Fail early!
738 remote: transaction abort!
738 remote: transaction abort!
739 remote: Cleaning up the mess...
739 remote: Cleaning up the mess...
740 remote: rollback completed
740 remote: rollback completed
741 remote: pretxnchangegroup hook exited with status 1
741 remote: pretxnchangegroup hook exited with status 1
742 abort: push failed on remote
742 abort: push failed on remote
743 [255]
743 [255]
744 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
744 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
745 pushing to http://localhost:$HGPORT2/
745 pushing to http://localhost:$HGPORT2/
746 searching for changes
746 searching for changes
747 remote: adding changesets
747 remote: adding changesets
748 remote: adding manifests
748 remote: adding manifests
749 remote: adding file changes
749 remote: adding file changes
750 remote: added 1 changesets with 1 changes to 1 files
750 remote: added 1 changesets with 1 changes to 1 files
751 remote: Fail early!
751 remote: Fail early!
752 remote: transaction abort!
752 remote: transaction abort!
753 remote: Cleaning up the mess...
753 remote: Cleaning up the mess...
754 remote: rollback completed
754 remote: rollback completed
755 remote: pretxnchangegroup hook exited with status 1
755 remote: pretxnchangegroup hook exited with status 1
756 abort: push failed on remote
756 abort: push failed on remote
757 [255]
757 [255]
758
758
759 Check abort from mandatory pushkey
759 Check abort from mandatory pushkey
760
760
761 $ cat > mandatorypart.py << EOF
761 $ cat > mandatorypart.py << EOF
762 > from mercurial import exchange
762 > from mercurial import exchange
763 > from mercurial import pushkey
763 > from mercurial import pushkey
764 > from mercurial import node
764 > from mercurial import node
765 > from mercurial import error
765 > from mercurial import error
766 > @exchange.b2partsgenerator('failingpuskey')
766 > @exchange.b2partsgenerator('failingpuskey')
767 > def addfailingpushey(pushop, bundler):
767 > def addfailingpushey(pushop, bundler):
768 > enc = pushkey.encode
768 > enc = pushkey.encode
769 > part = bundler.newpart('pushkey')
769 > part = bundler.newpart('pushkey')
770 > part.addparam('namespace', enc('phases'))
770 > part.addparam('namespace', enc('phases'))
771 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
771 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
772 > part.addparam('old', enc(str(0))) # successful update
772 > part.addparam('old', enc(str(0))) # successful update
773 > part.addparam('new', enc(str(0)))
773 > part.addparam('new', enc(str(0)))
774 > def fail(pushop, exc):
774 > def fail(pushop, exc):
775 > raise error.Abort('Correct phase push failed (because hooks)')
775 > raise error.Abort('Correct phase push failed (because hooks)')
776 > pushop.pkfailcb[part.id] = fail
776 > pushop.pkfailcb[part.id] = fail
777 > EOF
777 > EOF
778 $ cat >> $HGRCPATH << EOF
778 $ cat >> $HGRCPATH << EOF
779 > [hooks]
779 > [hooks]
780 > pretxnchangegroup=
780 > pretxnchangegroup=
781 > pretxnclose.failpush=
781 > pretxnclose.failpush=
782 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
782 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
783 > [extensions]
783 > [extensions]
784 > mandatorypart=$TESTTMP/mandatorypart.py
784 > mandatorypart=$TESTTMP/mandatorypart.py
785 > EOF
785 > EOF
786 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
786 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
787 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
787 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
788 $ cat other.pid >> $DAEMON_PIDS
788 $ cat other.pid >> $DAEMON_PIDS
789
789
790 (Failure from a hook)
790 (Failure from a hook)
791
791
792 $ hg -R main push other -r e7ec4e813ba6
792 $ hg -R main push other -r e7ec4e813ba6
793 pushing to other
793 pushing to other
794 searching for changes
794 searching for changes
795 adding changesets
795 adding changesets
796 adding manifests
796 adding manifests
797 adding file changes
797 adding file changes
798 added 1 changesets with 1 changes to 1 files
798 added 1 changesets with 1 changes to 1 files
799 do not push the key !
799 do not push the key !
800 pushkey-abort: prepushkey.failpush hook exited with status 1
800 pushkey-abort: prepushkey.failpush hook exited with status 1
801 transaction abort!
801 transaction abort!
802 Cleaning up the mess...
802 Cleaning up the mess...
803 rollback completed
803 rollback completed
804 abort: Correct phase push failed (because hooks)
804 abort: Correct phase push failed (because hooks)
805 [255]
805 [255]
806 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
806 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
807 pushing to ssh://user@dummy/other
807 pushing to ssh://user@dummy/other
808 searching for changes
808 searching for changes
809 remote: adding changesets
809 remote: adding changesets
810 remote: adding manifests
810 remote: adding manifests
811 remote: adding file changes
811 remote: adding file changes
812 remote: added 1 changesets with 1 changes to 1 files
812 remote: added 1 changesets with 1 changes to 1 files
813 remote: do not push the key !
813 remote: do not push the key !
814 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
814 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
815 remote: transaction abort!
815 remote: transaction abort!
816 remote: Cleaning up the mess...
816 remote: Cleaning up the mess...
817 remote: rollback completed
817 remote: rollback completed
818 abort: Correct phase push failed (because hooks)
818 abort: Correct phase push failed (because hooks)
819 [255]
819 [255]
820 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
820 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
821 pushing to http://localhost:$HGPORT2/
821 pushing to http://localhost:$HGPORT2/
822 searching for changes
822 searching for changes
823 remote: adding changesets
823 remote: adding changesets
824 remote: adding manifests
824 remote: adding manifests
825 remote: adding file changes
825 remote: adding file changes
826 remote: added 1 changesets with 1 changes to 1 files
826 remote: added 1 changesets with 1 changes to 1 files
827 remote: do not push the key !
827 remote: do not push the key !
828 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
828 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
829 remote: transaction abort!
829 remote: transaction abort!
830 remote: Cleaning up the mess...
830 remote: Cleaning up the mess...
831 remote: rollback completed
831 remote: rollback completed
832 abort: Correct phase push failed (because hooks)
832 abort: Correct phase push failed (because hooks)
833 [255]
833 [255]
834
834
835 (Failure from a the pushkey)
835 (Failure from a the pushkey)
836
836
837 $ cat > mandatorypart.py << EOF
837 $ cat > mandatorypart.py << EOF
838 > from mercurial import exchange
838 > from mercurial import exchange
839 > from mercurial import pushkey
839 > from mercurial import pushkey
840 > from mercurial import node
840 > from mercurial import node
841 > from mercurial import error
841 > from mercurial import error
842 > @exchange.b2partsgenerator('failingpuskey')
842 > @exchange.b2partsgenerator('failingpuskey')
843 > def addfailingpushey(pushop, bundler):
843 > def addfailingpushey(pushop, bundler):
844 > enc = pushkey.encode
844 > enc = pushkey.encode
845 > part = bundler.newpart('pushkey')
845 > part = bundler.newpart('pushkey')
846 > part.addparam('namespace', enc('phases'))
846 > part.addparam('namespace', enc('phases'))
847 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
847 > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex()))
848 > part.addparam('old', enc(str(4))) # will fail
848 > part.addparam('old', enc(str(4))) # will fail
849 > part.addparam('new', enc(str(3)))
849 > part.addparam('new', enc(str(3)))
850 > def fail(pushop, exc):
850 > def fail(pushop, exc):
851 > raise error.Abort('Clown phase push failed')
851 > raise error.Abort('Clown phase push failed')
852 > pushop.pkfailcb[part.id] = fail
852 > pushop.pkfailcb[part.id] = fail
853 > EOF
853 > EOF
854 $ cat >> $HGRCPATH << EOF
854 $ cat >> $HGRCPATH << EOF
855 > [hooks]
855 > [hooks]
856 > prepushkey.failpush =
856 > prepushkey.failpush =
857 > EOF
857 > EOF
858 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
858 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
859 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
859 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
860 $ cat other.pid >> $DAEMON_PIDS
860 $ cat other.pid >> $DAEMON_PIDS
861
861
862 $ hg -R main push other -r e7ec4e813ba6
862 $ hg -R main push other -r e7ec4e813ba6
863 pushing to other
863 pushing to other
864 searching for changes
864 searching for changes
865 adding changesets
865 adding changesets
866 adding manifests
866 adding manifests
867 adding file changes
867 adding file changes
868 added 1 changesets with 1 changes to 1 files
868 added 1 changesets with 1 changes to 1 files
869 transaction abort!
869 transaction abort!
870 Cleaning up the mess...
870 Cleaning up the mess...
871 rollback completed
871 rollback completed
872 pushkey: lock state after "phases"
872 pushkey: lock state after "phases"
873 lock: free
873 lock: free
874 wlock: free
874 wlock: free
875 abort: Clown phase push failed
875 abort: Clown phase push failed
876 [255]
876 [255]
877 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
877 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
878 pushing to ssh://user@dummy/other
878 pushing to ssh://user@dummy/other
879 searching for changes
879 searching for changes
880 remote: adding changesets
880 remote: adding changesets
881 remote: adding manifests
881 remote: adding manifests
882 remote: adding file changes
882 remote: adding file changes
883 remote: added 1 changesets with 1 changes to 1 files
883 remote: added 1 changesets with 1 changes to 1 files
884 remote: transaction abort!
884 remote: transaction abort!
885 remote: Cleaning up the mess...
885 remote: Cleaning up the mess...
886 remote: rollback completed
886 remote: rollback completed
887 remote: pushkey: lock state after "phases"
887 remote: pushkey: lock state after "phases"
888 remote: lock: free
888 remote: lock: free
889 remote: wlock: free
889 remote: wlock: free
890 abort: Clown phase push failed
890 abort: Clown phase push failed
891 [255]
891 [255]
892 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
892 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
893 pushing to http://localhost:$HGPORT2/
893 pushing to http://localhost:$HGPORT2/
894 searching for changes
894 searching for changes
895 remote: adding changesets
895 remote: adding changesets
896 remote: adding manifests
896 remote: adding manifests
897 remote: adding file changes
897 remote: adding file changes
898 remote: added 1 changesets with 1 changes to 1 files
898 remote: added 1 changesets with 1 changes to 1 files
899 remote: transaction abort!
899 remote: transaction abort!
900 remote: Cleaning up the mess...
900 remote: Cleaning up the mess...
901 remote: rollback completed
901 remote: rollback completed
902 remote: pushkey: lock state after "phases"
902 remote: pushkey: lock state after "phases"
903 remote: lock: free
903 remote: lock: free
904 remote: wlock: free
904 remote: wlock: free
905 abort: Clown phase push failed
905 abort: Clown phase push failed
906 [255]
906 [255]
907
907
908 Test lazily acquiring the lock during unbundle
908 Test lazily acquiring the lock during unbundle
909 $ cp $TESTTMP/hgrc.orig $HGRCPATH
909 $ cp $TESTTMP/hgrc.orig $HGRCPATH
910 $ cat >> $HGRCPATH <<EOF
910 $ cat >> $HGRCPATH <<EOF
911 > [ui]
911 > [ui]
912 > ssh=python "$TESTDIR/dummyssh"
912 > ssh=python "$TESTDIR/dummyssh"
913 > EOF
913 > EOF
914
914
915 $ cat >> $TESTTMP/locktester.py <<EOF
915 $ cat >> $TESTTMP/locktester.py <<EOF
916 > import os
916 > import os
917 > from mercurial import extensions, bundle2, util
917 > from mercurial import extensions, bundle2, util
918 > def checklock(orig, repo, *args, **kwargs):
918 > def checklock(orig, repo, *args, **kwargs):
919 > if repo.svfs.lexists("lock"):
919 > if repo.svfs.lexists("lock"):
920 > raise util.Abort("Lock should not be taken")
920 > raise util.Abort("Lock should not be taken")
921 > return orig(repo, *args, **kwargs)
921 > return orig(repo, *args, **kwargs)
922 > def extsetup(ui):
922 > def extsetup(ui):
923 > extensions.wrapfunction(bundle2, 'processbundle', checklock)
923 > extensions.wrapfunction(bundle2, 'processbundle', checklock)
924 > EOF
924 > EOF
925
925
926 $ hg init lazylock
926 $ hg init lazylock
927 $ cat >> lazylock/.hg/hgrc <<EOF
927 $ cat >> lazylock/.hg/hgrc <<EOF
928 > [extensions]
928 > [extensions]
929 > locktester=$TESTTMP/locktester.py
929 > locktester=$TESTTMP/locktester.py
930 > EOF
930 > EOF
931
931
932 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
932 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
933 $ cd lazylockclient
933 $ cd lazylockclient
934 $ touch a && hg ci -Aqm a
934 $ touch a && hg ci -Aqm a
935 $ hg push
935 $ hg push
936 pushing to ssh://user@dummy/lazylock
936 pushing to ssh://user@dummy/lazylock
937 searching for changes
937 searching for changes
938 remote: Lock should not be taken
938 remote: Lock should not be taken
939 abort: push failed on remote
939 abort: push failed on remote
940 [255]
940 [255]
941
941
942 $ cat >> ../lazylock/.hg/hgrc <<EOF
942 $ cat >> ../lazylock/.hg/hgrc <<EOF
943 > [experimental]
943 > [experimental]
944 > bundle2lazylocking=True
944 > bundle2lazylocking=True
945 > EOF
945 > EOF
946 $ hg push
946 $ hg push
947 pushing to ssh://user@dummy/lazylock
947 pushing to ssh://user@dummy/lazylock
948 searching for changes
948 searching for changes
949 remote: adding changesets
949 remote: adding changesets
950 remote: adding manifests
950 remote: adding manifests
951 remote: adding file changes
951 remote: adding file changes
952 remote: added 1 changesets with 1 changes to 1 files
952 remote: added 1 changesets with 1 changes to 1 files
953
953
954 $ cd ..
954 $ cd ..
955
955
956 Servers can disable bundle1 for clone/pull operations
956 Servers can disable bundle1 for clone/pull operations
957
957
958 $ killdaemons.py
958 $ killdaemons.py
959 $ hg init bundle2onlyserver
959 $ hg init bundle2onlyserver
960 $ cd bundle2onlyserver
960 $ cd bundle2onlyserver
961 $ cat > .hg/hgrc << EOF
961 $ cat > .hg/hgrc << EOF
962 > [server]
962 > [server]
963 > bundle1.pull = false
963 > bundle1.pull = false
964 > EOF
964 > EOF
965
965
966 $ touch foo
966 $ touch foo
967 $ hg -q commit -A -m initial
967 $ hg -q commit -A -m initial
968
968
969 $ hg serve -p $HGPORT -d --pid-file=hg.pid
969 $ hg serve -p $HGPORT -d --pid-file=hg.pid
970 $ cat hg.pid >> $DAEMON_PIDS
970 $ cat hg.pid >> $DAEMON_PIDS
971
971
972 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
972 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
973 requesting all changes
973 requesting all changes
974 abort: remote error:
974 abort: remote error:
975 incompatible Mercurial client; bundle2 required
975 incompatible Mercurial client; bundle2 required
976 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
976 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
977 [255]
977 [255]
978 $ killdaemons.py
978 $ killdaemons.py
979 $ cd ..
979 $ cd ..
980
980
981 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
981 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
982
982
983 $ hg --config format.usegeneraldelta=false init notgdserver
983 $ hg --config format.usegeneraldelta=false init notgdserver
984 $ cd notgdserver
984 $ cd notgdserver
985 $ cat > .hg/hgrc << EOF
985 $ cat > .hg/hgrc << EOF
986 > [server]
986 > [server]
987 > bundle1gd.pull = false
987 > bundle1gd.pull = false
988 > EOF
988 > EOF
989
989
990 $ touch foo
990 $ touch foo
991 $ hg -q commit -A -m initial
991 $ hg -q commit -A -m initial
992 $ hg serve -p $HGPORT -d --pid-file=hg.pid
992 $ hg serve -p $HGPORT -d --pid-file=hg.pid
993 $ cat hg.pid >> $DAEMON_PIDS
993 $ cat hg.pid >> $DAEMON_PIDS
994
994
995 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-1
995 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-1
996 requesting all changes
996 requesting all changes
997 adding changesets
997 adding changesets
998 adding manifests
998 adding manifests
999 adding file changes
999 adding file changes
1000 added 1 changesets with 1 changes to 1 files
1000 added 1 changesets with 1 changes to 1 files
1001 updating to branch default
1001 updating to branch default
1002 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1002 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1003
1003
1004 $ killdaemons.py
1004 $ killdaemons.py
1005 $ cd ../bundle2onlyserver
1005 $ cd ../bundle2onlyserver
1006
1006
1007 bundle1 pull can be disabled for generaldelta repos only
1007 bundle1 pull can be disabled for generaldelta repos only
1008
1008
1009 $ cat > .hg/hgrc << EOF
1009 $ cat > .hg/hgrc << EOF
1010 > [server]
1010 > [server]
1011 > bundle1gd.pull = false
1011 > bundle1gd.pull = false
1012 > EOF
1012 > EOF
1013
1013
1014 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1014 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1015 $ cat hg.pid >> $DAEMON_PIDS
1015 $ cat hg.pid >> $DAEMON_PIDS
1016 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1016 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1017 requesting all changes
1017 requesting all changes
1018 abort: remote error:
1018 abort: remote error:
1019 incompatible Mercurial client; bundle2 required
1019 incompatible Mercurial client; bundle2 required
1020 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1020 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1021 [255]
1021 [255]
1022
1022
1023 $ killdaemons.py
1023 $ killdaemons.py
1024
1024
1025 Verify the global server.bundle1 option works
1025 Verify the global server.bundle1 option works
1026
1026
1027 $ cat > .hg/hgrc << EOF
1027 $ cat > .hg/hgrc << EOF
1028 > [server]
1028 > [server]
1029 > bundle1 = false
1029 > bundle1 = false
1030 > EOF
1030 > EOF
1031 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1031 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1032 $ cat hg.pid >> $DAEMON_PIDS
1032 $ cat hg.pid >> $DAEMON_PIDS
1033 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT not-bundle2
1033 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT not-bundle2
1034 requesting all changes
1034 requesting all changes
1035 abort: remote error:
1035 abort: remote error:
1036 incompatible Mercurial client; bundle2 required
1036 incompatible Mercurial client; bundle2 required
1037 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1037 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1038 [255]
1038 [255]
1039 $ killdaemons.py
1039 $ killdaemons.py
1040
1040
1041 $ cat > .hg/hgrc << EOF
1041 $ cat > .hg/hgrc << EOF
1042 > [server]
1042 > [server]
1043 > bundle1gd = false
1043 > bundle1gd = false
1044 > EOF
1044 > EOF
1045 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1045 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1046 $ cat hg.pid >> $DAEMON_PIDS
1046 $ cat hg.pid >> $DAEMON_PIDS
1047
1047
1048 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1048 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1049 requesting all changes
1049 requesting all changes
1050 abort: remote error:
1050 abort: remote error:
1051 incompatible Mercurial client; bundle2 required
1051 incompatible Mercurial client; bundle2 required
1052 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1052 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1053 [255]
1053 [255]
1054
1054
1055 $ killdaemons.py
1055 $ killdaemons.py
1056
1056
1057 $ cd ../notgdserver
1057 $ cd ../notgdserver
1058 $ cat > .hg/hgrc << EOF
1058 $ cat > .hg/hgrc << EOF
1059 > [server]
1059 > [server]
1060 > bundle1gd = false
1060 > bundle1gd = false
1061 > EOF
1061 > EOF
1062 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1062 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1063 $ cat hg.pid >> $DAEMON_PIDS
1063 $ cat hg.pid >> $DAEMON_PIDS
1064
1064
1065 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-2
1065 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-2
1066 requesting all changes
1066 requesting all changes
1067 adding changesets
1067 adding changesets
1068 adding manifests
1068 adding manifests
1069 adding file changes
1069 adding file changes
1070 added 1 changesets with 1 changes to 1 files
1070 added 1 changesets with 1 changes to 1 files
1071 updating to branch default
1071 updating to branch default
1072 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1072 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1073
1073
1074 $ killdaemons.py
1074 $ killdaemons.py
1075 $ cd ../bundle2onlyserver
1075 $ cd ../bundle2onlyserver
1076
1076
1077 Verify bundle1 pushes can be disabled
1077 Verify bundle1 pushes can be disabled
1078
1078
1079 $ cat > .hg/hgrc << EOF
1079 $ cat > .hg/hgrc << EOF
1080 > [server]
1080 > [server]
1081 > bundle1.push = false
1081 > bundle1.push = false
1082 > [web]
1082 > [web]
1083 > allow_push = *
1083 > allow_push = *
1084 > push_ssl = false
1084 > push_ssl = false
1085 > EOF
1085 > EOF
1086
1086
1087 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1087 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1088 $ cat hg.pid >> $DAEMON_PIDS
1088 $ cat hg.pid >> $DAEMON_PIDS
1089 $ cd ..
1089 $ cd ..
1090
1090
1091 $ hg clone http://localhost:$HGPORT bundle2-only
1091 $ hg clone http://localhost:$HGPORT bundle2-only
1092 requesting all changes
1092 requesting all changes
1093 adding changesets
1093 adding changesets
1094 adding manifests
1094 adding manifests
1095 adding file changes
1095 adding file changes
1096 added 1 changesets with 1 changes to 1 files
1096 added 1 changesets with 1 changes to 1 files
1097 updating to branch default
1097 updating to branch default
1098 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1098 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1099 $ cd bundle2-only
1099 $ cd bundle2-only
1100 $ echo commit > foo
1100 $ echo commit > foo
1101 $ hg commit -m commit
1101 $ hg commit -m commit
1102 $ hg --config devel.legacy.exchange=bundle1 push
1102 $ hg --config devel.legacy.exchange=bundle1 push
1103 pushing to http://localhost:$HGPORT/
1103 pushing to http://localhost:$HGPORT/
1104 searching for changes
1104 searching for changes
1105 abort: remote error:
1105 abort: remote error:
1106 incompatible Mercurial client; bundle2 required
1106 incompatible Mercurial client; bundle2 required
1107 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1107 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1108 [255]
1108 [255]
1109
1109
1110 $ hg push
1110 $ hg push
1111 pushing to http://localhost:$HGPORT/
1111 pushing to http://localhost:$HGPORT/
1112 searching for changes
1112 searching for changes
1113 remote: adding changesets
1113 remote: adding changesets
1114 remote: adding manifests
1114 remote: adding manifests
1115 remote: adding file changes
1115 remote: adding file changes
1116 remote: added 1 changesets with 1 changes to 1 files
1116 remote: added 1 changesets with 1 changes to 1 files
General Comments 0
You need to be logged in to leave comments. Login now