##// END OF EJS Templates
discovery: avoid dropping remote heads hidden locally...
Boris Feld -
r34318:e45ec589 default
parent child Browse files
Show More
@@ -1,2013 +1,2008 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 changegroup,
21 changegroup,
22 discovery,
22 discovery,
23 error,
23 error,
24 lock as lockmod,
24 lock as lockmod,
25 obsolete,
25 obsolete,
26 phases,
26 phases,
27 pushkey,
27 pushkey,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 url as urlmod,
32 url as urlmod,
33 util,
33 util,
34 )
34 )
35
35
36 urlerr = util.urlerr
36 urlerr = util.urlerr
37 urlreq = util.urlreq
37 urlreq = util.urlreq
38
38
39 # Maps bundle version human names to changegroup versions.
39 # Maps bundle version human names to changegroup versions.
40 _bundlespeccgversions = {'v1': '01',
40 _bundlespeccgversions = {'v1': '01',
41 'v2': '02',
41 'v2': '02',
42 'packed1': 's1',
42 'packed1': 's1',
43 'bundle2': '02', #legacy
43 'bundle2': '02', #legacy
44 }
44 }
45
45
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48
48
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 """Parse a bundle string specification into parts.
50 """Parse a bundle string specification into parts.
51
51
52 Bundle specifications denote a well-defined bundle/exchange format.
52 Bundle specifications denote a well-defined bundle/exchange format.
53 The content of a given specification should not change over time in
53 The content of a given specification should not change over time in
54 order to ensure that bundles produced by a newer version of Mercurial are
54 order to ensure that bundles produced by a newer version of Mercurial are
55 readable from an older version.
55 readable from an older version.
56
56
57 The string currently has the form:
57 The string currently has the form:
58
58
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60
60
61 Where <compression> is one of the supported compression formats
61 Where <compression> is one of the supported compression formats
62 and <type> is (currently) a version string. A ";" can follow the type and
62 and <type> is (currently) a version string. A ";" can follow the type and
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 pairs.
64 pairs.
65
65
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 it is optional.
67 it is optional.
68
68
69 If ``externalnames`` is False (the default), the human-centric names will
69 If ``externalnames`` is False (the default), the human-centric names will
70 be converted to their internal representation.
70 be converted to their internal representation.
71
71
72 Returns a 3-tuple of (compression, version, parameters). Compression will
72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 be ``None`` if not in strict mode and a compression isn't defined.
73 be ``None`` if not in strict mode and a compression isn't defined.
74
74
75 An ``InvalidBundleSpecification`` is raised when the specification is
75 An ``InvalidBundleSpecification`` is raised when the specification is
76 not syntactically well formed.
76 not syntactically well formed.
77
77
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 bundle type/version is not recognized.
79 bundle type/version is not recognized.
80
80
81 Note: this function will likely eventually return a more complex data
81 Note: this function will likely eventually return a more complex data
82 structure, including bundle2 part information.
82 structure, including bundle2 part information.
83 """
83 """
84 def parseparams(s):
84 def parseparams(s):
85 if ';' not in s:
85 if ';' not in s:
86 return s, {}
86 return s, {}
87
87
88 params = {}
88 params = {}
89 version, paramstr = s.split(';', 1)
89 version, paramstr = s.split(';', 1)
90
90
91 for p in paramstr.split(';'):
91 for p in paramstr.split(';'):
92 if '=' not in p:
92 if '=' not in p:
93 raise error.InvalidBundleSpecification(
93 raise error.InvalidBundleSpecification(
94 _('invalid bundle specification: '
94 _('invalid bundle specification: '
95 'missing "=" in parameter: %s') % p)
95 'missing "=" in parameter: %s') % p)
96
96
97 key, value = p.split('=', 1)
97 key, value = p.split('=', 1)
98 key = urlreq.unquote(key)
98 key = urlreq.unquote(key)
99 value = urlreq.unquote(value)
99 value = urlreq.unquote(value)
100 params[key] = value
100 params[key] = value
101
101
102 return version, params
102 return version, params
103
103
104
104
105 if strict and '-' not in spec:
105 if strict and '-' not in spec:
106 raise error.InvalidBundleSpecification(
106 raise error.InvalidBundleSpecification(
107 _('invalid bundle specification; '
107 _('invalid bundle specification; '
108 'must be prefixed with compression: %s') % spec)
108 'must be prefixed with compression: %s') % spec)
109
109
110 if '-' in spec:
110 if '-' in spec:
111 compression, version = spec.split('-', 1)
111 compression, version = spec.split('-', 1)
112
112
113 if compression not in util.compengines.supportedbundlenames:
113 if compression not in util.compengines.supportedbundlenames:
114 raise error.UnsupportedBundleSpecification(
114 raise error.UnsupportedBundleSpecification(
115 _('%s compression is not supported') % compression)
115 _('%s compression is not supported') % compression)
116
116
117 version, params = parseparams(version)
117 version, params = parseparams(version)
118
118
119 if version not in _bundlespeccgversions:
119 if version not in _bundlespeccgversions:
120 raise error.UnsupportedBundleSpecification(
120 raise error.UnsupportedBundleSpecification(
121 _('%s is not a recognized bundle version') % version)
121 _('%s is not a recognized bundle version') % version)
122 else:
122 else:
123 # Value could be just the compression or just the version, in which
123 # Value could be just the compression or just the version, in which
124 # case some defaults are assumed (but only when not in strict mode).
124 # case some defaults are assumed (but only when not in strict mode).
125 assert not strict
125 assert not strict
126
126
127 spec, params = parseparams(spec)
127 spec, params = parseparams(spec)
128
128
129 if spec in util.compengines.supportedbundlenames:
129 if spec in util.compengines.supportedbundlenames:
130 compression = spec
130 compression = spec
131 version = 'v1'
131 version = 'v1'
132 # Generaldelta repos require v2.
132 # Generaldelta repos require v2.
133 if 'generaldelta' in repo.requirements:
133 if 'generaldelta' in repo.requirements:
134 version = 'v2'
134 version = 'v2'
135 # Modern compression engines require v2.
135 # Modern compression engines require v2.
136 if compression not in _bundlespecv1compengines:
136 if compression not in _bundlespecv1compengines:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # Bundle version 1 only supports a known set of compression engines.
148 # Bundle version 1 only supports a known set of compression engines.
149 if version == 'v1' and compression not in _bundlespecv1compengines:
149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 raise error.UnsupportedBundleSpecification(
150 raise error.UnsupportedBundleSpecification(
151 _('compression engine %s is not supported on v1 bundles') %
151 _('compression engine %s is not supported on v1 bundles') %
152 compression)
152 compression)
153
153
154 # The specification for packed1 can optionally declare the data formats
154 # The specification for packed1 can optionally declare the data formats
155 # required to apply it. If we see this metadata, compare against what the
155 # required to apply it. If we see this metadata, compare against what the
156 # repo supports and error if the bundle isn't compatible.
156 # repo supports and error if the bundle isn't compatible.
157 if version == 'packed1' and 'requirements' in params:
157 if version == 'packed1' and 'requirements' in params:
158 requirements = set(params['requirements'].split(','))
158 requirements = set(params['requirements'].split(','))
159 missingreqs = requirements - repo.supportedformats
159 missingreqs = requirements - repo.supportedformats
160 if missingreqs:
160 if missingreqs:
161 raise error.UnsupportedBundleSpecification(
161 raise error.UnsupportedBundleSpecification(
162 _('missing support for repository features: %s') %
162 _('missing support for repository features: %s') %
163 ', '.join(sorted(missingreqs)))
163 ', '.join(sorted(missingreqs)))
164
164
165 if not externalnames:
165 if not externalnames:
166 engine = util.compengines.forbundlename(compression)
166 engine = util.compengines.forbundlename(compression)
167 compression = engine.bundletype()[1]
167 compression = engine.bundletype()[1]
168 version = _bundlespeccgversions[version]
168 version = _bundlespeccgversions[version]
169 return compression, version, params
169 return compression, version, params
170
170
171 def readbundle(ui, fh, fname, vfs=None):
171 def readbundle(ui, fh, fname, vfs=None):
172 header = changegroup.readexactly(fh, 4)
172 header = changegroup.readexactly(fh, 4)
173
173
174 alg = None
174 alg = None
175 if not fname:
175 if not fname:
176 fname = "stream"
176 fname = "stream"
177 if not header.startswith('HG') and header.startswith('\0'):
177 if not header.startswith('HG') and header.startswith('\0'):
178 fh = changegroup.headerlessfixup(fh, header)
178 fh = changegroup.headerlessfixup(fh, header)
179 header = "HG10"
179 header = "HG10"
180 alg = 'UN'
180 alg = 'UN'
181 elif vfs:
181 elif vfs:
182 fname = vfs.join(fname)
182 fname = vfs.join(fname)
183
183
184 magic, version = header[0:2], header[2:4]
184 magic, version = header[0:2], header[2:4]
185
185
186 if magic != 'HG':
186 if magic != 'HG':
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 if version == '10':
188 if version == '10':
189 if alg is None:
189 if alg is None:
190 alg = changegroup.readexactly(fh, 2)
190 alg = changegroup.readexactly(fh, 2)
191 return changegroup.cg1unpacker(fh, alg)
191 return changegroup.cg1unpacker(fh, alg)
192 elif version.startswith('2'):
192 elif version.startswith('2'):
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 elif version == 'S1':
194 elif version == 'S1':
195 return streamclone.streamcloneapplier(fh)
195 return streamclone.streamcloneapplier(fh)
196 else:
196 else:
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198
198
199 def getbundlespec(ui, fh):
199 def getbundlespec(ui, fh):
200 """Infer the bundlespec from a bundle file handle.
200 """Infer the bundlespec from a bundle file handle.
201
201
202 The input file handle is seeked and the original seek position is not
202 The input file handle is seeked and the original seek position is not
203 restored.
203 restored.
204 """
204 """
205 def speccompression(alg):
205 def speccompression(alg):
206 try:
206 try:
207 return util.compengines.forbundletype(alg).bundletype()[0]
207 return util.compengines.forbundletype(alg).bundletype()[0]
208 except KeyError:
208 except KeyError:
209 return None
209 return None
210
210
211 b = readbundle(ui, fh, None)
211 b = readbundle(ui, fh, None)
212 if isinstance(b, changegroup.cg1unpacker):
212 if isinstance(b, changegroup.cg1unpacker):
213 alg = b._type
213 alg = b._type
214 if alg == '_truncatedBZ':
214 if alg == '_truncatedBZ':
215 alg = 'BZ'
215 alg = 'BZ'
216 comp = speccompression(alg)
216 comp = speccompression(alg)
217 if not comp:
217 if not comp:
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 return '%s-v1' % comp
219 return '%s-v1' % comp
220 elif isinstance(b, bundle2.unbundle20):
220 elif isinstance(b, bundle2.unbundle20):
221 if 'Compression' in b.params:
221 if 'Compression' in b.params:
222 comp = speccompression(b.params['Compression'])
222 comp = speccompression(b.params['Compression'])
223 if not comp:
223 if not comp:
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 else:
225 else:
226 comp = 'none'
226 comp = 'none'
227
227
228 version = None
228 version = None
229 for part in b.iterparts():
229 for part in b.iterparts():
230 if part.type == 'changegroup':
230 if part.type == 'changegroup':
231 version = part.params['version']
231 version = part.params['version']
232 if version in ('01', '02'):
232 if version in ('01', '02'):
233 version = 'v2'
233 version = 'v2'
234 else:
234 else:
235 raise error.Abort(_('changegroup version %s does not have '
235 raise error.Abort(_('changegroup version %s does not have '
236 'a known bundlespec') % version,
236 'a known bundlespec') % version,
237 hint=_('try upgrading your Mercurial '
237 hint=_('try upgrading your Mercurial '
238 'client'))
238 'client'))
239
239
240 if not version:
240 if not version:
241 raise error.Abort(_('could not identify changegroup version in '
241 raise error.Abort(_('could not identify changegroup version in '
242 'bundle'))
242 'bundle'))
243
243
244 return '%s-%s' % (comp, version)
244 return '%s-%s' % (comp, version)
245 elif isinstance(b, streamclone.streamcloneapplier):
245 elif isinstance(b, streamclone.streamcloneapplier):
246 requirements = streamclone.readbundle1header(fh)[2]
246 requirements = streamclone.readbundle1header(fh)[2]
247 params = 'requirements=%s' % ','.join(sorted(requirements))
247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 return 'none-packed1;%s' % urlreq.quote(params)
248 return 'none-packed1;%s' % urlreq.quote(params)
249 else:
249 else:
250 raise error.Abort(_('unknown bundle type: %s') % b)
250 raise error.Abort(_('unknown bundle type: %s') % b)
251
251
252 def _computeoutgoing(repo, heads, common):
252 def _computeoutgoing(repo, heads, common):
253 """Computes which revs are outgoing given a set of common
253 """Computes which revs are outgoing given a set of common
254 and a set of heads.
254 and a set of heads.
255
255
256 This is a separate function so extensions can have access to
256 This is a separate function so extensions can have access to
257 the logic.
257 the logic.
258
258
259 Returns a discovery.outgoing object.
259 Returns a discovery.outgoing object.
260 """
260 """
261 cl = repo.changelog
261 cl = repo.changelog
262 if common:
262 if common:
263 hasnode = cl.hasnode
263 hasnode = cl.hasnode
264 common = [n for n in common if hasnode(n)]
264 common = [n for n in common if hasnode(n)]
265 else:
265 else:
266 common = [nullid]
266 common = [nullid]
267 if not heads:
267 if not heads:
268 heads = cl.heads()
268 heads = cl.heads()
269 return discovery.outgoing(repo, common, heads)
269 return discovery.outgoing(repo, common, heads)
270
270
271 def _forcebundle1(op):
271 def _forcebundle1(op):
272 """return true if a pull/push must use bundle1
272 """return true if a pull/push must use bundle1
273
273
274 This function is used to allow testing of the older bundle version"""
274 This function is used to allow testing of the older bundle version"""
275 ui = op.repo.ui
275 ui = op.repo.ui
276 forcebundle1 = False
276 forcebundle1 = False
277 # The goal is this config is to allow developer to choose the bundle
277 # The goal is this config is to allow developer to choose the bundle
278 # version used during exchanged. This is especially handy during test.
278 # version used during exchanged. This is especially handy during test.
279 # Value is a list of bundle version to be picked from, highest version
279 # Value is a list of bundle version to be picked from, highest version
280 # should be used.
280 # should be used.
281 #
281 #
282 # developer config: devel.legacy.exchange
282 # developer config: devel.legacy.exchange
283 exchange = ui.configlist('devel', 'legacy.exchange')
283 exchange = ui.configlist('devel', 'legacy.exchange')
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 return forcebundle1 or not op.remote.capable('bundle2')
285 return forcebundle1 or not op.remote.capable('bundle2')
286
286
287 class pushoperation(object):
287 class pushoperation(object):
288 """A object that represent a single push operation
288 """A object that represent a single push operation
289
289
290 Its purpose is to carry push related state and very common operations.
290 Its purpose is to carry push related state and very common operations.
291
291
292 A new pushoperation should be created at the beginning of each push and
292 A new pushoperation should be created at the beginning of each push and
293 discarded afterward.
293 discarded afterward.
294 """
294 """
295
295
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 bookmarks=(), pushvars=None):
297 bookmarks=(), pushvars=None):
298 # repo we push from
298 # repo we push from
299 self.repo = repo
299 self.repo = repo
300 self.ui = repo.ui
300 self.ui = repo.ui
301 # repo we push to
301 # repo we push to
302 self.remote = remote
302 self.remote = remote
303 # force option provided
303 # force option provided
304 self.force = force
304 self.force = force
305 # revs to be pushed (None is "all")
305 # revs to be pushed (None is "all")
306 self.revs = revs
306 self.revs = revs
307 # bookmark explicitly pushed
307 # bookmark explicitly pushed
308 self.bookmarks = bookmarks
308 self.bookmarks = bookmarks
309 # allow push of new branch
309 # allow push of new branch
310 self.newbranch = newbranch
310 self.newbranch = newbranch
311 # step already performed
311 # step already performed
312 # (used to check what steps have been already performed through bundle2)
312 # (used to check what steps have been already performed through bundle2)
313 self.stepsdone = set()
313 self.stepsdone = set()
314 # Integer version of the changegroup push result
314 # Integer version of the changegroup push result
315 # - None means nothing to push
315 # - None means nothing to push
316 # - 0 means HTTP error
316 # - 0 means HTTP error
317 # - 1 means we pushed and remote head count is unchanged *or*
317 # - 1 means we pushed and remote head count is unchanged *or*
318 # we have outgoing changesets but refused to push
318 # we have outgoing changesets but refused to push
319 # - other values as described by addchangegroup()
319 # - other values as described by addchangegroup()
320 self.cgresult = None
320 self.cgresult = None
321 # Boolean value for the bookmark push
321 # Boolean value for the bookmark push
322 self.bkresult = None
322 self.bkresult = None
323 # discover.outgoing object (contains common and outgoing data)
323 # discover.outgoing object (contains common and outgoing data)
324 self.outgoing = None
324 self.outgoing = None
325 # all remote topological heads before the push
325 # all remote topological heads before the push
326 self.remoteheads = None
326 self.remoteheads = None
327 # Details of the remote branch pre and post push
327 # Details of the remote branch pre and post push
328 #
328 #
329 # mapping: {'branch': ([remoteheads],
329 # mapping: {'branch': ([remoteheads],
330 # [newheads],
330 # [newheads],
331 # [unsyncedheads],
331 # [unsyncedheads],
332 # [discardedheads])}
332 # [discardedheads])}
333 # - branch: the branch name
333 # - branch: the branch name
334 # - remoteheads: the list of remote heads known locally
334 # - remoteheads: the list of remote heads known locally
335 # None if the branch is new
335 # None if the branch is new
336 # - newheads: the new remote heads (known locally) with outgoing pushed
336 # - newheads: the new remote heads (known locally) with outgoing pushed
337 # - unsyncedheads: the list of remote heads unknown locally.
337 # - unsyncedheads: the list of remote heads unknown locally.
338 # - discardedheads: the list of remote heads made obsolete by the push
338 # - discardedheads: the list of remote heads made obsolete by the push
339 self.pushbranchmap = None
339 self.pushbranchmap = None
340 # testable as a boolean indicating if any nodes are missing locally.
340 # testable as a boolean indicating if any nodes are missing locally.
341 self.incoming = None
341 self.incoming = None
342 # phases changes that must be pushed along side the changesets
342 # phases changes that must be pushed along side the changesets
343 self.outdatedphases = None
343 self.outdatedphases = None
344 # phases changes that must be pushed if changeset push fails
344 # phases changes that must be pushed if changeset push fails
345 self.fallbackoutdatedphases = None
345 self.fallbackoutdatedphases = None
346 # outgoing obsmarkers
346 # outgoing obsmarkers
347 self.outobsmarkers = set()
347 self.outobsmarkers = set()
348 # outgoing bookmarks
348 # outgoing bookmarks
349 self.outbookmarks = []
349 self.outbookmarks = []
350 # transaction manager
350 # transaction manager
351 self.trmanager = None
351 self.trmanager = None
352 # map { pushkey partid -> callback handling failure}
352 # map { pushkey partid -> callback handling failure}
353 # used to handle exception from mandatory pushkey part failure
353 # used to handle exception from mandatory pushkey part failure
354 self.pkfailcb = {}
354 self.pkfailcb = {}
355 # an iterable of pushvars or None
355 # an iterable of pushvars or None
356 self.pushvars = pushvars
356 self.pushvars = pushvars
357
357
358 @util.propertycache
358 @util.propertycache
359 def futureheads(self):
359 def futureheads(self):
360 """future remote heads if the changeset push succeeds"""
360 """future remote heads if the changeset push succeeds"""
361 return self.outgoing.missingheads
361 return self.outgoing.missingheads
362
362
363 @util.propertycache
363 @util.propertycache
364 def fallbackheads(self):
364 def fallbackheads(self):
365 """future remote heads if the changeset push fails"""
365 """future remote heads if the changeset push fails"""
366 if self.revs is None:
366 if self.revs is None:
367 # not target to push, all common are relevant
367 # not target to push, all common are relevant
368 return self.outgoing.commonheads
368 return self.outgoing.commonheads
369 unfi = self.repo.unfiltered()
369 unfi = self.repo.unfiltered()
370 # I want cheads = heads(::missingheads and ::commonheads)
370 # I want cheads = heads(::missingheads and ::commonheads)
371 # (missingheads is revs with secret changeset filtered out)
371 # (missingheads is revs with secret changeset filtered out)
372 #
372 #
373 # This can be expressed as:
373 # This can be expressed as:
374 # cheads = ( (missingheads and ::commonheads)
374 # cheads = ( (missingheads and ::commonheads)
375 # + (commonheads and ::missingheads))"
375 # + (commonheads and ::missingheads))"
376 # )
376 # )
377 #
377 #
378 # while trying to push we already computed the following:
378 # while trying to push we already computed the following:
379 # common = (::commonheads)
379 # common = (::commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
381 #
381 #
382 # We can pick:
382 # We can pick:
383 # * missingheads part of common (::commonheads)
383 # * missingheads part of common (::commonheads)
384 common = self.outgoing.common
384 common = self.outgoing.common
385 nm = self.repo.changelog.nodemap
385 nm = self.repo.changelog.nodemap
386 cheads = [node for node in self.revs if nm[node] in common]
386 cheads = [node for node in self.revs if nm[node] in common]
387 # and
387 # and
388 # * commonheads parents on missing
388 # * commonheads parents on missing
389 revset = unfi.set('%ln and parents(roots(%ln))',
389 revset = unfi.set('%ln and parents(roots(%ln))',
390 self.outgoing.commonheads,
390 self.outgoing.commonheads,
391 self.outgoing.missing)
391 self.outgoing.missing)
392 cheads.extend(c.node() for c in revset)
392 cheads.extend(c.node() for c in revset)
393 return cheads
393 return cheads
394
394
395 @property
395 @property
396 def commonheads(self):
396 def commonheads(self):
397 """set of all common heads after changeset bundle push"""
397 """set of all common heads after changeset bundle push"""
398 if self.cgresult:
398 if self.cgresult:
399 return self.futureheads
399 return self.futureheads
400 else:
400 else:
401 return self.fallbackheads
401 return self.fallbackheads
402
402
403 # mapping of message used when pushing bookmark
403 # mapping of message used when pushing bookmark
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 _('updating bookmark %s failed!\n')),
405 _('updating bookmark %s failed!\n')),
406 'export': (_("exporting bookmark %s\n"),
406 'export': (_("exporting bookmark %s\n"),
407 _('exporting bookmark %s failed!\n')),
407 _('exporting bookmark %s failed!\n')),
408 'delete': (_("deleting remote bookmark %s\n"),
408 'delete': (_("deleting remote bookmark %s\n"),
409 _('deleting remote bookmark %s failed!\n')),
409 _('deleting remote bookmark %s failed!\n')),
410 }
410 }
411
411
412
412
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 opargs=None):
414 opargs=None):
415 '''Push outgoing changesets (limited by revs) from a local
415 '''Push outgoing changesets (limited by revs) from a local
416 repository to remote. Return an integer:
416 repository to remote. Return an integer:
417 - None means nothing to push
417 - None means nothing to push
418 - 0 means HTTP error
418 - 0 means HTTP error
419 - 1 means we pushed and remote head count is unchanged *or*
419 - 1 means we pushed and remote head count is unchanged *or*
420 we have outgoing changesets but refused to push
420 we have outgoing changesets but refused to push
421 - other values as described by addchangegroup()
421 - other values as described by addchangegroup()
422 '''
422 '''
423 if opargs is None:
423 if opargs is None:
424 opargs = {}
424 opargs = {}
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 **pycompat.strkwargs(opargs))
426 **pycompat.strkwargs(opargs))
427 if pushop.remote.local():
427 if pushop.remote.local():
428 missing = (set(pushop.repo.requirements)
428 missing = (set(pushop.repo.requirements)
429 - pushop.remote.local().supported)
429 - pushop.remote.local().supported)
430 if missing:
430 if missing:
431 msg = _("required features are not"
431 msg = _("required features are not"
432 " supported in the destination:"
432 " supported in the destination:"
433 " %s") % (', '.join(sorted(missing)))
433 " %s") % (', '.join(sorted(missing)))
434 raise error.Abort(msg)
434 raise error.Abort(msg)
435
435
436 if not pushop.remote.canpush():
436 if not pushop.remote.canpush():
437 raise error.Abort(_("destination does not support push"))
437 raise error.Abort(_("destination does not support push"))
438
438
439 if not pushop.remote.capable('unbundle'):
439 if not pushop.remote.capable('unbundle'):
440 raise error.Abort(_('cannot push: destination does not support the '
440 raise error.Abort(_('cannot push: destination does not support the '
441 'unbundle wire protocol command'))
441 'unbundle wire protocol command'))
442
442
443 # get lock as we might write phase data
443 # get lock as we might write phase data
444 wlock = lock = None
444 wlock = lock = None
445 try:
445 try:
446 # bundle2 push may receive a reply bundle touching bookmarks or other
446 # bundle2 push may receive a reply bundle touching bookmarks or other
447 # things requiring the wlock. Take it now to ensure proper ordering.
447 # things requiring the wlock. Take it now to ensure proper ordering.
448 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
448 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
449 if (not _forcebundle1(pushop)) and maypushback:
449 if (not _forcebundle1(pushop)) and maypushback:
450 wlock = pushop.repo.wlock()
450 wlock = pushop.repo.wlock()
451 lock = pushop.repo.lock()
451 lock = pushop.repo.lock()
452 pushop.trmanager = transactionmanager(pushop.repo,
452 pushop.trmanager = transactionmanager(pushop.repo,
453 'push-response',
453 'push-response',
454 pushop.remote.url())
454 pushop.remote.url())
455 except IOError as err:
455 except IOError as err:
456 if err.errno != errno.EACCES:
456 if err.errno != errno.EACCES:
457 raise
457 raise
458 # source repo cannot be locked.
458 # source repo cannot be locked.
459 # We do not abort the push, but just disable the local phase
459 # We do not abort the push, but just disable the local phase
460 # synchronisation.
460 # synchronisation.
461 msg = 'cannot lock source repository: %s\n' % err
461 msg = 'cannot lock source repository: %s\n' % err
462 pushop.ui.debug(msg)
462 pushop.ui.debug(msg)
463
463
464 with wlock or util.nullcontextmanager(), \
464 with wlock or util.nullcontextmanager(), \
465 lock or util.nullcontextmanager(), \
465 lock or util.nullcontextmanager(), \
466 pushop.trmanager or util.nullcontextmanager():
466 pushop.trmanager or util.nullcontextmanager():
467 pushop.repo.checkpush(pushop)
467 pushop.repo.checkpush(pushop)
468 _pushdiscovery(pushop)
468 _pushdiscovery(pushop)
469 if not _forcebundle1(pushop):
469 if not _forcebundle1(pushop):
470 _pushbundle2(pushop)
470 _pushbundle2(pushop)
471 _pushchangeset(pushop)
471 _pushchangeset(pushop)
472 _pushsyncphase(pushop)
472 _pushsyncphase(pushop)
473 _pushobsolete(pushop)
473 _pushobsolete(pushop)
474 _pushbookmark(pushop)
474 _pushbookmark(pushop)
475
475
476 return pushop
476 return pushop
477
477
478 # list of steps to perform discovery before push
478 # list of steps to perform discovery before push
479 pushdiscoveryorder = []
479 pushdiscoveryorder = []
480
480
481 # Mapping between step name and function
481 # Mapping between step name and function
482 #
482 #
483 # This exists to help extensions wrap steps if necessary
483 # This exists to help extensions wrap steps if necessary
484 pushdiscoverymapping = {}
484 pushdiscoverymapping = {}
485
485
486 def pushdiscovery(stepname):
486 def pushdiscovery(stepname):
487 """decorator for function performing discovery before push
487 """decorator for function performing discovery before push
488
488
489 The function is added to the step -> function mapping and appended to the
489 The function is added to the step -> function mapping and appended to the
490 list of steps. Beware that decorated function will be added in order (this
490 list of steps. Beware that decorated function will be added in order (this
491 may matter).
491 may matter).
492
492
493 You can only use this decorator for a new step, if you want to wrap a step
493 You can only use this decorator for a new step, if you want to wrap a step
494 from an extension, change the pushdiscovery dictionary directly."""
494 from an extension, change the pushdiscovery dictionary directly."""
495 def dec(func):
495 def dec(func):
496 assert stepname not in pushdiscoverymapping
496 assert stepname not in pushdiscoverymapping
497 pushdiscoverymapping[stepname] = func
497 pushdiscoverymapping[stepname] = func
498 pushdiscoveryorder.append(stepname)
498 pushdiscoveryorder.append(stepname)
499 return func
499 return func
500 return dec
500 return dec
501
501
502 def _pushdiscovery(pushop):
502 def _pushdiscovery(pushop):
503 """Run all discovery steps"""
503 """Run all discovery steps"""
504 for stepname in pushdiscoveryorder:
504 for stepname in pushdiscoveryorder:
505 step = pushdiscoverymapping[stepname]
505 step = pushdiscoverymapping[stepname]
506 step(pushop)
506 step(pushop)
507
507
508 @pushdiscovery('changeset')
508 @pushdiscovery('changeset')
509 def _pushdiscoverychangeset(pushop):
509 def _pushdiscoverychangeset(pushop):
510 """discover the changeset that need to be pushed"""
510 """discover the changeset that need to be pushed"""
511 fci = discovery.findcommonincoming
511 fci = discovery.findcommonincoming
512 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
512 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
513 common, inc, remoteheads = commoninc
513 common, inc, remoteheads = commoninc
514 fco = discovery.findcommonoutgoing
514 fco = discovery.findcommonoutgoing
515 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
515 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
516 commoninc=commoninc, force=pushop.force)
516 commoninc=commoninc, force=pushop.force)
517 pushop.outgoing = outgoing
517 pushop.outgoing = outgoing
518 pushop.remoteheads = remoteheads
518 pushop.remoteheads = remoteheads
519 pushop.incoming = inc
519 pushop.incoming = inc
520
520
521 @pushdiscovery('phase')
521 @pushdiscovery('phase')
522 def _pushdiscoveryphase(pushop):
522 def _pushdiscoveryphase(pushop):
523 """discover the phase that needs to be pushed
523 """discover the phase that needs to be pushed
524
524
525 (computed for both success and failure case for changesets push)"""
525 (computed for both success and failure case for changesets push)"""
526 outgoing = pushop.outgoing
526 outgoing = pushop.outgoing
527 unfi = pushop.repo.unfiltered()
527 unfi = pushop.repo.unfiltered()
528 remotephases = pushop.remote.listkeys('phases')
528 remotephases = pushop.remote.listkeys('phases')
529 publishing = remotephases.get('publishing', False)
529 publishing = remotephases.get('publishing', False)
530 if (pushop.ui.configbool('ui', '_usedassubrepo')
530 if (pushop.ui.configbool('ui', '_usedassubrepo')
531 and remotephases # server supports phases
531 and remotephases # server supports phases
532 and not pushop.outgoing.missing # no changesets to be pushed
532 and not pushop.outgoing.missing # no changesets to be pushed
533 and publishing):
533 and publishing):
534 # When:
534 # When:
535 # - this is a subrepo push
535 # - this is a subrepo push
536 # - and remote support phase
536 # - and remote support phase
537 # - and no changeset are to be pushed
537 # - and no changeset are to be pushed
538 # - and remote is publishing
538 # - and remote is publishing
539 # We may be in issue 3871 case!
539 # We may be in issue 3871 case!
540 # We drop the possible phase synchronisation done by
540 # We drop the possible phase synchronisation done by
541 # courtesy to publish changesets possibly locally draft
541 # courtesy to publish changesets possibly locally draft
542 # on the remote.
542 # on the remote.
543 remotephases = {'publishing': 'True'}
543 remotephases = {'publishing': 'True'}
544 ana = phases.analyzeremotephases(pushop.repo,
544 ana = phases.analyzeremotephases(pushop.repo,
545 pushop.fallbackheads,
545 pushop.fallbackheads,
546 remotephases)
546 remotephases)
547 pheads, droots = ana
547 pheads, droots = ana
548 extracond = ''
548 extracond = ''
549 if not publishing:
549 if not publishing:
550 extracond = ' and public()'
550 extracond = ' and public()'
551 revset = 'heads((%%ln::%%ln) %s)' % extracond
551 revset = 'heads((%%ln::%%ln) %s)' % extracond
552 # Get the list of all revs draft on remote by public here.
552 # Get the list of all revs draft on remote by public here.
553 # XXX Beware that revset break if droots is not strictly
553 # XXX Beware that revset break if droots is not strictly
554 # XXX root we may want to ensure it is but it is costly
554 # XXX root we may want to ensure it is but it is costly
555 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
555 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
556 if not outgoing.missing:
556 if not outgoing.missing:
557 future = fallback
557 future = fallback
558 else:
558 else:
559 # adds changeset we are going to push as draft
559 # adds changeset we are going to push as draft
560 #
560 #
561 # should not be necessary for publishing server, but because of an
561 # should not be necessary for publishing server, but because of an
562 # issue fixed in xxxxx we have to do it anyway.
562 # issue fixed in xxxxx we have to do it anyway.
563 fdroots = list(unfi.set('roots(%ln + %ln::)',
563 fdroots = list(unfi.set('roots(%ln + %ln::)',
564 outgoing.missing, droots))
564 outgoing.missing, droots))
565 fdroots = [f.node() for f in fdroots]
565 fdroots = [f.node() for f in fdroots]
566 future = list(unfi.set(revset, fdroots, pushop.futureheads))
566 future = list(unfi.set(revset, fdroots, pushop.futureheads))
567 pushop.outdatedphases = future
567 pushop.outdatedphases = future
568 pushop.fallbackoutdatedphases = fallback
568 pushop.fallbackoutdatedphases = fallback
569
569
570 @pushdiscovery('obsmarker')
570 @pushdiscovery('obsmarker')
571 def _pushdiscoveryobsmarkers(pushop):
571 def _pushdiscoveryobsmarkers(pushop):
572 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
572 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
573 and pushop.repo.obsstore
573 and pushop.repo.obsstore
574 and 'obsolete' in pushop.remote.listkeys('namespaces')):
574 and 'obsolete' in pushop.remote.listkeys('namespaces')):
575 repo = pushop.repo
575 repo = pushop.repo
576 # very naive computation, that can be quite expensive on big repo.
576 # very naive computation, that can be quite expensive on big repo.
577 # However: evolution is currently slow on them anyway.
577 # However: evolution is currently slow on them anyway.
578 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
578 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
579 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
579 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
580
580
581 @pushdiscovery('bookmarks')
581 @pushdiscovery('bookmarks')
582 def _pushdiscoverybookmarks(pushop):
582 def _pushdiscoverybookmarks(pushop):
583 ui = pushop.ui
583 ui = pushop.ui
584 repo = pushop.repo.unfiltered()
584 repo = pushop.repo.unfiltered()
585 remote = pushop.remote
585 remote = pushop.remote
586 ui.debug("checking for updated bookmarks\n")
586 ui.debug("checking for updated bookmarks\n")
587 ancestors = ()
587 ancestors = ()
588 if pushop.revs:
588 if pushop.revs:
589 revnums = map(repo.changelog.rev, pushop.revs)
589 revnums = map(repo.changelog.rev, pushop.revs)
590 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
590 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
591 remotebookmark = remote.listkeys('bookmarks')
591 remotebookmark = remote.listkeys('bookmarks')
592
592
593 explicit = set([repo._bookmarks.expandname(bookmark)
593 explicit = set([repo._bookmarks.expandname(bookmark)
594 for bookmark in pushop.bookmarks])
594 for bookmark in pushop.bookmarks])
595
595
596 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
596 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
597 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
597 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
598
598
599 def safehex(x):
599 def safehex(x):
600 if x is None:
600 if x is None:
601 return x
601 return x
602 return hex(x)
602 return hex(x)
603
603
604 def hexifycompbookmarks(bookmarks):
604 def hexifycompbookmarks(bookmarks):
605 for b, scid, dcid in bookmarks:
605 for b, scid, dcid in bookmarks:
606 yield b, safehex(scid), safehex(dcid)
606 yield b, safehex(scid), safehex(dcid)
607
607
608 comp = [hexifycompbookmarks(marks) for marks in comp]
608 comp = [hexifycompbookmarks(marks) for marks in comp]
609 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
609 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
610
610
611 for b, scid, dcid in advsrc:
611 for b, scid, dcid in advsrc:
612 if b in explicit:
612 if b in explicit:
613 explicit.remove(b)
613 explicit.remove(b)
614 if not ancestors or repo[scid].rev() in ancestors:
614 if not ancestors or repo[scid].rev() in ancestors:
615 pushop.outbookmarks.append((b, dcid, scid))
615 pushop.outbookmarks.append((b, dcid, scid))
616 # search added bookmark
616 # search added bookmark
617 for b, scid, dcid in addsrc:
617 for b, scid, dcid in addsrc:
618 if b in explicit:
618 if b in explicit:
619 explicit.remove(b)
619 explicit.remove(b)
620 pushop.outbookmarks.append((b, '', scid))
620 pushop.outbookmarks.append((b, '', scid))
621 # search for overwritten bookmark
621 # search for overwritten bookmark
622 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
622 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
623 if b in explicit:
623 if b in explicit:
624 explicit.remove(b)
624 explicit.remove(b)
625 pushop.outbookmarks.append((b, dcid, scid))
625 pushop.outbookmarks.append((b, dcid, scid))
626 # search for bookmark to delete
626 # search for bookmark to delete
627 for b, scid, dcid in adddst:
627 for b, scid, dcid in adddst:
628 if b in explicit:
628 if b in explicit:
629 explicit.remove(b)
629 explicit.remove(b)
630 # treat as "deleted locally"
630 # treat as "deleted locally"
631 pushop.outbookmarks.append((b, dcid, ''))
631 pushop.outbookmarks.append((b, dcid, ''))
632 # identical bookmarks shouldn't get reported
632 # identical bookmarks shouldn't get reported
633 for b, scid, dcid in same:
633 for b, scid, dcid in same:
634 if b in explicit:
634 if b in explicit:
635 explicit.remove(b)
635 explicit.remove(b)
636
636
637 if explicit:
637 if explicit:
638 explicit = sorted(explicit)
638 explicit = sorted(explicit)
639 # we should probably list all of them
639 # we should probably list all of them
640 ui.warn(_('bookmark %s does not exist on the local '
640 ui.warn(_('bookmark %s does not exist on the local '
641 'or remote repository!\n') % explicit[0])
641 'or remote repository!\n') % explicit[0])
642 pushop.bkresult = 2
642 pushop.bkresult = 2
643
643
644 pushop.outbookmarks.sort()
644 pushop.outbookmarks.sort()
645
645
646 def _pushcheckoutgoing(pushop):
646 def _pushcheckoutgoing(pushop):
647 outgoing = pushop.outgoing
647 outgoing = pushop.outgoing
648 unfi = pushop.repo.unfiltered()
648 unfi = pushop.repo.unfiltered()
649 if not outgoing.missing:
649 if not outgoing.missing:
650 # nothing to push
650 # nothing to push
651 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
651 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
652 return False
652 return False
653 # something to push
653 # something to push
654 if not pushop.force:
654 if not pushop.force:
655 # if repo.obsstore == False --> no obsolete
655 # if repo.obsstore == False --> no obsolete
656 # then, save the iteration
656 # then, save the iteration
657 if unfi.obsstore:
657 if unfi.obsstore:
658 # this message are here for 80 char limit reason
658 # this message are here for 80 char limit reason
659 mso = _("push includes obsolete changeset: %s!")
659 mso = _("push includes obsolete changeset: %s!")
660 mspd = _("push includes phase-divergent changeset: %s!")
660 mspd = _("push includes phase-divergent changeset: %s!")
661 mscd = _("push includes content-divergent changeset: %s!")
661 mscd = _("push includes content-divergent changeset: %s!")
662 mst = {"orphan": _("push includes orphan changeset: %s!"),
662 mst = {"orphan": _("push includes orphan changeset: %s!"),
663 "phase-divergent": mspd,
663 "phase-divergent": mspd,
664 "content-divergent": mscd}
664 "content-divergent": mscd}
665 # If we are to push if there is at least one
665 # If we are to push if there is at least one
666 # obsolete or unstable changeset in missing, at
666 # obsolete or unstable changeset in missing, at
667 # least one of the missinghead will be obsolete or
667 # least one of the missinghead will be obsolete or
668 # unstable. So checking heads only is ok
668 # unstable. So checking heads only is ok
669 for node in outgoing.missingheads:
669 for node in outgoing.missingheads:
670 ctx = unfi[node]
670 ctx = unfi[node]
671 if ctx.obsolete():
671 if ctx.obsolete():
672 raise error.Abort(mso % ctx)
672 raise error.Abort(mso % ctx)
673 elif ctx.isunstable():
673 elif ctx.isunstable():
674 # TODO print more than one instability in the abort
674 # TODO print more than one instability in the abort
675 # message
675 # message
676 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
676 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
677
677
678 discovery.checkheads(pushop)
678 discovery.checkheads(pushop)
679 return True
679 return True
680
680
681 # List of names of steps to perform for an outgoing bundle2, order matters.
681 # List of names of steps to perform for an outgoing bundle2, order matters.
682 b2partsgenorder = []
682 b2partsgenorder = []
683
683
684 # Mapping between step name and function
684 # Mapping between step name and function
685 #
685 #
686 # This exists to help extensions wrap steps if necessary
686 # This exists to help extensions wrap steps if necessary
687 b2partsgenmapping = {}
687 b2partsgenmapping = {}
688
688
689 def b2partsgenerator(stepname, idx=None):
689 def b2partsgenerator(stepname, idx=None):
690 """decorator for function generating bundle2 part
690 """decorator for function generating bundle2 part
691
691
692 The function is added to the step -> function mapping and appended to the
692 The function is added to the step -> function mapping and appended to the
693 list of steps. Beware that decorated functions will be added in order
693 list of steps. Beware that decorated functions will be added in order
694 (this may matter).
694 (this may matter).
695
695
696 You can only use this decorator for new steps, if you want to wrap a step
696 You can only use this decorator for new steps, if you want to wrap a step
697 from an extension, attack the b2partsgenmapping dictionary directly."""
697 from an extension, attack the b2partsgenmapping dictionary directly."""
698 def dec(func):
698 def dec(func):
699 assert stepname not in b2partsgenmapping
699 assert stepname not in b2partsgenmapping
700 b2partsgenmapping[stepname] = func
700 b2partsgenmapping[stepname] = func
701 if idx is None:
701 if idx is None:
702 b2partsgenorder.append(stepname)
702 b2partsgenorder.append(stepname)
703 else:
703 else:
704 b2partsgenorder.insert(idx, stepname)
704 b2partsgenorder.insert(idx, stepname)
705 return func
705 return func
706 return dec
706 return dec
707
707
708 def _pushb2ctxcheckheads(pushop, bundler):
708 def _pushb2ctxcheckheads(pushop, bundler):
709 """Generate race condition checking parts
709 """Generate race condition checking parts
710
710
711 Exists as an independent function to aid extensions
711 Exists as an independent function to aid extensions
712 """
712 """
713 # * 'force' do not check for push race,
713 # * 'force' do not check for push race,
714 # * if we don't push anything, there are nothing to check.
714 # * if we don't push anything, there are nothing to check.
715 if not pushop.force and pushop.outgoing.missingheads:
715 if not pushop.force and pushop.outgoing.missingheads:
716 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
716 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
717 emptyremote = pushop.pushbranchmap is None
717 emptyremote = pushop.pushbranchmap is None
718 if not allowunrelated or emptyremote:
718 if not allowunrelated or emptyremote:
719 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
719 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
720 else:
720 else:
721 affected = set()
721 affected = set()
722 for branch, heads in pushop.pushbranchmap.iteritems():
722 for branch, heads in pushop.pushbranchmap.iteritems():
723 remoteheads, newheads, unsyncedheads, discardedheads = heads
723 remoteheads, newheads, unsyncedheads, discardedheads = heads
724 if remoteheads is not None:
724 if remoteheads is not None:
725 remote = set(remoteheads)
725 remote = set(remoteheads)
726 affected |= set(discardedheads) & remote
726 affected |= set(discardedheads) & remote
727 affected |= remote - set(newheads)
727 affected |= remote - set(newheads)
728 if affected:
728 if affected:
729 data = iter(sorted(affected))
729 data = iter(sorted(affected))
730 bundler.newpart('check:updated-heads', data=data)
730 bundler.newpart('check:updated-heads', data=data)
731
731
732 @b2partsgenerator('changeset')
732 @b2partsgenerator('changeset')
733 def _pushb2ctx(pushop, bundler):
733 def _pushb2ctx(pushop, bundler):
734 """handle changegroup push through bundle2
734 """handle changegroup push through bundle2
735
735
736 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
736 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
737 """
737 """
738 if 'changesets' in pushop.stepsdone:
738 if 'changesets' in pushop.stepsdone:
739 return
739 return
740 pushop.stepsdone.add('changesets')
740 pushop.stepsdone.add('changesets')
741 # Send known heads to the server for race detection.
741 # Send known heads to the server for race detection.
742 if not _pushcheckoutgoing(pushop):
742 if not _pushcheckoutgoing(pushop):
743 return
743 return
744 pushop.repo.prepushoutgoinghooks(pushop)
744 pushop.repo.prepushoutgoinghooks(pushop)
745
745
746 _pushb2ctxcheckheads(pushop, bundler)
746 _pushb2ctxcheckheads(pushop, bundler)
747
747
748 b2caps = bundle2.bundle2caps(pushop.remote)
748 b2caps = bundle2.bundle2caps(pushop.remote)
749 version = '01'
749 version = '01'
750 cgversions = b2caps.get('changegroup')
750 cgversions = b2caps.get('changegroup')
751 if cgversions: # 3.1 and 3.2 ship with an empty value
751 if cgversions: # 3.1 and 3.2 ship with an empty value
752 cgversions = [v for v in cgversions
752 cgversions = [v for v in cgversions
753 if v in changegroup.supportedoutgoingversions(
753 if v in changegroup.supportedoutgoingversions(
754 pushop.repo)]
754 pushop.repo)]
755 if not cgversions:
755 if not cgversions:
756 raise ValueError(_('no common changegroup version'))
756 raise ValueError(_('no common changegroup version'))
757 version = max(cgversions)
757 version = max(cgversions)
758 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
758 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
759 'push')
759 'push')
760 cgpart = bundler.newpart('changegroup', data=cgstream)
760 cgpart = bundler.newpart('changegroup', data=cgstream)
761 if cgversions:
761 if cgversions:
762 cgpart.addparam('version', version)
762 cgpart.addparam('version', version)
763 if 'treemanifest' in pushop.repo.requirements:
763 if 'treemanifest' in pushop.repo.requirements:
764 cgpart.addparam('treemanifest', '1')
764 cgpart.addparam('treemanifest', '1')
765 def handlereply(op):
765 def handlereply(op):
766 """extract addchangegroup returns from server reply"""
766 """extract addchangegroup returns from server reply"""
767 cgreplies = op.records.getreplies(cgpart.id)
767 cgreplies = op.records.getreplies(cgpart.id)
768 assert len(cgreplies['changegroup']) == 1
768 assert len(cgreplies['changegroup']) == 1
769 pushop.cgresult = cgreplies['changegroup'][0]['return']
769 pushop.cgresult = cgreplies['changegroup'][0]['return']
770 return handlereply
770 return handlereply
771
771
772 @b2partsgenerator('phase')
772 @b2partsgenerator('phase')
773 def _pushb2phases(pushop, bundler):
773 def _pushb2phases(pushop, bundler):
774 """handle phase push through bundle2"""
774 """handle phase push through bundle2"""
775 if 'phases' in pushop.stepsdone:
775 if 'phases' in pushop.stepsdone:
776 return
776 return
777 b2caps = bundle2.bundle2caps(pushop.remote)
777 b2caps = bundle2.bundle2caps(pushop.remote)
778 if not 'pushkey' in b2caps:
778 if not 'pushkey' in b2caps:
779 return
779 return
780 pushop.stepsdone.add('phases')
780 pushop.stepsdone.add('phases')
781 part2node = []
781 part2node = []
782
782
783 def handlefailure(pushop, exc):
783 def handlefailure(pushop, exc):
784 targetid = int(exc.partid)
784 targetid = int(exc.partid)
785 for partid, node in part2node:
785 for partid, node in part2node:
786 if partid == targetid:
786 if partid == targetid:
787 raise error.Abort(_('updating %s to public failed') % node)
787 raise error.Abort(_('updating %s to public failed') % node)
788
788
789 enc = pushkey.encode
789 enc = pushkey.encode
790 for newremotehead in pushop.outdatedphases:
790 for newremotehead in pushop.outdatedphases:
791 part = bundler.newpart('pushkey')
791 part = bundler.newpart('pushkey')
792 part.addparam('namespace', enc('phases'))
792 part.addparam('namespace', enc('phases'))
793 part.addparam('key', enc(newremotehead.hex()))
793 part.addparam('key', enc(newremotehead.hex()))
794 part.addparam('old', enc('%d' % phases.draft))
794 part.addparam('old', enc('%d' % phases.draft))
795 part.addparam('new', enc('%d' % phases.public))
795 part.addparam('new', enc('%d' % phases.public))
796 part2node.append((part.id, newremotehead))
796 part2node.append((part.id, newremotehead))
797 pushop.pkfailcb[part.id] = handlefailure
797 pushop.pkfailcb[part.id] = handlefailure
798
798
799 def handlereply(op):
799 def handlereply(op):
800 for partid, node in part2node:
800 for partid, node in part2node:
801 partrep = op.records.getreplies(partid)
801 partrep = op.records.getreplies(partid)
802 results = partrep['pushkey']
802 results = partrep['pushkey']
803 assert len(results) <= 1
803 assert len(results) <= 1
804 msg = None
804 msg = None
805 if not results:
805 if not results:
806 msg = _('server ignored update of %s to public!\n') % node
806 msg = _('server ignored update of %s to public!\n') % node
807 elif not int(results[0]['return']):
807 elif not int(results[0]['return']):
808 msg = _('updating %s to public failed!\n') % node
808 msg = _('updating %s to public failed!\n') % node
809 if msg is not None:
809 if msg is not None:
810 pushop.ui.warn(msg)
810 pushop.ui.warn(msg)
811 return handlereply
811 return handlereply
812
812
813 @b2partsgenerator('obsmarkers')
813 @b2partsgenerator('obsmarkers')
814 def _pushb2obsmarkers(pushop, bundler):
814 def _pushb2obsmarkers(pushop, bundler):
815 if 'obsmarkers' in pushop.stepsdone:
815 if 'obsmarkers' in pushop.stepsdone:
816 return
816 return
817 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
817 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
818 if obsolete.commonversion(remoteversions) is None:
818 if obsolete.commonversion(remoteversions) is None:
819 return
819 return
820 pushop.stepsdone.add('obsmarkers')
820 pushop.stepsdone.add('obsmarkers')
821 if pushop.outobsmarkers:
821 if pushop.outobsmarkers:
822 markers = sorted(pushop.outobsmarkers)
822 markers = sorted(pushop.outobsmarkers)
823 bundle2.buildobsmarkerspart(bundler, markers)
823 bundle2.buildobsmarkerspart(bundler, markers)
824
824
825 @b2partsgenerator('bookmarks')
825 @b2partsgenerator('bookmarks')
826 def _pushb2bookmarks(pushop, bundler):
826 def _pushb2bookmarks(pushop, bundler):
827 """handle bookmark push through bundle2"""
827 """handle bookmark push through bundle2"""
828 if 'bookmarks' in pushop.stepsdone:
828 if 'bookmarks' in pushop.stepsdone:
829 return
829 return
830 b2caps = bundle2.bundle2caps(pushop.remote)
830 b2caps = bundle2.bundle2caps(pushop.remote)
831 if 'pushkey' not in b2caps:
831 if 'pushkey' not in b2caps:
832 return
832 return
833 pushop.stepsdone.add('bookmarks')
833 pushop.stepsdone.add('bookmarks')
834 part2book = []
834 part2book = []
835 enc = pushkey.encode
835 enc = pushkey.encode
836
836
837 def handlefailure(pushop, exc):
837 def handlefailure(pushop, exc):
838 targetid = int(exc.partid)
838 targetid = int(exc.partid)
839 for partid, book, action in part2book:
839 for partid, book, action in part2book:
840 if partid == targetid:
840 if partid == targetid:
841 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
841 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
842 # we should not be called for part we did not generated
842 # we should not be called for part we did not generated
843 assert False
843 assert False
844
844
845 for book, old, new in pushop.outbookmarks:
845 for book, old, new in pushop.outbookmarks:
846 part = bundler.newpart('pushkey')
846 part = bundler.newpart('pushkey')
847 part.addparam('namespace', enc('bookmarks'))
847 part.addparam('namespace', enc('bookmarks'))
848 part.addparam('key', enc(book))
848 part.addparam('key', enc(book))
849 part.addparam('old', enc(old))
849 part.addparam('old', enc(old))
850 part.addparam('new', enc(new))
850 part.addparam('new', enc(new))
851 action = 'update'
851 action = 'update'
852 if not old:
852 if not old:
853 action = 'export'
853 action = 'export'
854 elif not new:
854 elif not new:
855 action = 'delete'
855 action = 'delete'
856 part2book.append((part.id, book, action))
856 part2book.append((part.id, book, action))
857 pushop.pkfailcb[part.id] = handlefailure
857 pushop.pkfailcb[part.id] = handlefailure
858
858
859 def handlereply(op):
859 def handlereply(op):
860 ui = pushop.ui
860 ui = pushop.ui
861 for partid, book, action in part2book:
861 for partid, book, action in part2book:
862 partrep = op.records.getreplies(partid)
862 partrep = op.records.getreplies(partid)
863 results = partrep['pushkey']
863 results = partrep['pushkey']
864 assert len(results) <= 1
864 assert len(results) <= 1
865 if not results:
865 if not results:
866 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
866 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
867 else:
867 else:
868 ret = int(results[0]['return'])
868 ret = int(results[0]['return'])
869 if ret:
869 if ret:
870 ui.status(bookmsgmap[action][0] % book)
870 ui.status(bookmsgmap[action][0] % book)
871 else:
871 else:
872 ui.warn(bookmsgmap[action][1] % book)
872 ui.warn(bookmsgmap[action][1] % book)
873 if pushop.bkresult is not None:
873 if pushop.bkresult is not None:
874 pushop.bkresult = 1
874 pushop.bkresult = 1
875 return handlereply
875 return handlereply
876
876
877 @b2partsgenerator('pushvars', idx=0)
877 @b2partsgenerator('pushvars', idx=0)
878 def _getbundlesendvars(pushop, bundler):
878 def _getbundlesendvars(pushop, bundler):
879 '''send shellvars via bundle2'''
879 '''send shellvars via bundle2'''
880 pushvars = pushop.pushvars
880 pushvars = pushop.pushvars
881 if pushvars:
881 if pushvars:
882 shellvars = {}
882 shellvars = {}
883 for raw in pushvars:
883 for raw in pushvars:
884 if '=' not in raw:
884 if '=' not in raw:
885 msg = ("unable to parse variable '%s', should follow "
885 msg = ("unable to parse variable '%s', should follow "
886 "'KEY=VALUE' or 'KEY=' format")
886 "'KEY=VALUE' or 'KEY=' format")
887 raise error.Abort(msg % raw)
887 raise error.Abort(msg % raw)
888 k, v = raw.split('=', 1)
888 k, v = raw.split('=', 1)
889 shellvars[k] = v
889 shellvars[k] = v
890
890
891 part = bundler.newpart('pushvars')
891 part = bundler.newpart('pushvars')
892
892
893 for key, value in shellvars.iteritems():
893 for key, value in shellvars.iteritems():
894 part.addparam(key, value, mandatory=False)
894 part.addparam(key, value, mandatory=False)
895
895
896 def _pushbundle2(pushop):
896 def _pushbundle2(pushop):
897 """push data to the remote using bundle2
897 """push data to the remote using bundle2
898
898
899 The only currently supported type of data is changegroup but this will
899 The only currently supported type of data is changegroup but this will
900 evolve in the future."""
900 evolve in the future."""
901 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
901 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
902 pushback = (pushop.trmanager
902 pushback = (pushop.trmanager
903 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
903 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
904
904
905 # create reply capability
905 # create reply capability
906 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
906 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
907 allowpushback=pushback))
907 allowpushback=pushback))
908 bundler.newpart('replycaps', data=capsblob)
908 bundler.newpart('replycaps', data=capsblob)
909 replyhandlers = []
909 replyhandlers = []
910 for partgenname in b2partsgenorder:
910 for partgenname in b2partsgenorder:
911 partgen = b2partsgenmapping[partgenname]
911 partgen = b2partsgenmapping[partgenname]
912 ret = partgen(pushop, bundler)
912 ret = partgen(pushop, bundler)
913 if callable(ret):
913 if callable(ret):
914 replyhandlers.append(ret)
914 replyhandlers.append(ret)
915 # do not push if nothing to push
915 # do not push if nothing to push
916 if bundler.nbparts <= 1:
916 if bundler.nbparts <= 1:
917 return
917 return
918 stream = util.chunkbuffer(bundler.getchunks())
918 stream = util.chunkbuffer(bundler.getchunks())
919 try:
919 try:
920 try:
920 try:
921 reply = pushop.remote.unbundle(
921 reply = pushop.remote.unbundle(
922 stream, ['force'], pushop.remote.url())
922 stream, ['force'], pushop.remote.url())
923 except error.BundleValueError as exc:
923 except error.BundleValueError as exc:
924 raise error.Abort(_('missing support for %s') % exc)
924 raise error.Abort(_('missing support for %s') % exc)
925 try:
925 try:
926 trgetter = None
926 trgetter = None
927 if pushback:
927 if pushback:
928 trgetter = pushop.trmanager.transaction
928 trgetter = pushop.trmanager.transaction
929 op = bundle2.processbundle(pushop.repo, reply, trgetter)
929 op = bundle2.processbundle(pushop.repo, reply, trgetter)
930 except error.BundleValueError as exc:
930 except error.BundleValueError as exc:
931 raise error.Abort(_('missing support for %s') % exc)
931 raise error.Abort(_('missing support for %s') % exc)
932 except bundle2.AbortFromPart as exc:
932 except bundle2.AbortFromPart as exc:
933 pushop.ui.status(_('remote: %s\n') % exc)
933 pushop.ui.status(_('remote: %s\n') % exc)
934 if exc.hint is not None:
934 if exc.hint is not None:
935 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
935 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
936 raise error.Abort(_('push failed on remote'))
936 raise error.Abort(_('push failed on remote'))
937 except error.PushkeyFailed as exc:
937 except error.PushkeyFailed as exc:
938 partid = int(exc.partid)
938 partid = int(exc.partid)
939 if partid not in pushop.pkfailcb:
939 if partid not in pushop.pkfailcb:
940 raise
940 raise
941 pushop.pkfailcb[partid](pushop, exc)
941 pushop.pkfailcb[partid](pushop, exc)
942 for rephand in replyhandlers:
942 for rephand in replyhandlers:
943 rephand(op)
943 rephand(op)
944
944
945 def _pushchangeset(pushop):
945 def _pushchangeset(pushop):
946 """Make the actual push of changeset bundle to remote repo"""
946 """Make the actual push of changeset bundle to remote repo"""
947 if 'changesets' in pushop.stepsdone:
947 if 'changesets' in pushop.stepsdone:
948 return
948 return
949 pushop.stepsdone.add('changesets')
949 pushop.stepsdone.add('changesets')
950 if not _pushcheckoutgoing(pushop):
950 if not _pushcheckoutgoing(pushop):
951 return
951 return
952
952
953 # Should have verified this in push().
953 # Should have verified this in push().
954 assert pushop.remote.capable('unbundle')
954 assert pushop.remote.capable('unbundle')
955
955
956 pushop.repo.prepushoutgoinghooks(pushop)
956 pushop.repo.prepushoutgoinghooks(pushop)
957 outgoing = pushop.outgoing
957 outgoing = pushop.outgoing
958 # TODO: get bundlecaps from remote
958 # TODO: get bundlecaps from remote
959 bundlecaps = None
959 bundlecaps = None
960 # create a changegroup from local
960 # create a changegroup from local
961 if pushop.revs is None and not (outgoing.excluded
961 if pushop.revs is None and not (outgoing.excluded
962 or pushop.repo.changelog.filteredrevs):
962 or pushop.repo.changelog.filteredrevs):
963 # push everything,
963 # push everything,
964 # use the fast path, no race possible on push
964 # use the fast path, no race possible on push
965 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
965 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
966 fastpath=True, bundlecaps=bundlecaps)
966 fastpath=True, bundlecaps=bundlecaps)
967 else:
967 else:
968 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
968 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
969 'push', bundlecaps=bundlecaps)
969 'push', bundlecaps=bundlecaps)
970
970
971 # apply changegroup to remote
971 # apply changegroup to remote
972 # local repo finds heads on server, finds out what
972 # local repo finds heads on server, finds out what
973 # revs it must push. once revs transferred, if server
973 # revs it must push. once revs transferred, if server
974 # finds it has different heads (someone else won
974 # finds it has different heads (someone else won
975 # commit/push race), server aborts.
975 # commit/push race), server aborts.
976 if pushop.force:
976 if pushop.force:
977 remoteheads = ['force']
977 remoteheads = ['force']
978 else:
978 else:
979 remoteheads = pushop.remoteheads
979 remoteheads = pushop.remoteheads
980 # ssh: return remote's addchangegroup()
980 # ssh: return remote's addchangegroup()
981 # http: return remote's addchangegroup() or 0 for error
981 # http: return remote's addchangegroup() or 0 for error
982 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
982 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
983 pushop.repo.url())
983 pushop.repo.url())
984
984
985 def _pushsyncphase(pushop):
985 def _pushsyncphase(pushop):
986 """synchronise phase information locally and remotely"""
986 """synchronise phase information locally and remotely"""
987 cheads = pushop.commonheads
987 cheads = pushop.commonheads
988 # even when we don't push, exchanging phase data is useful
988 # even when we don't push, exchanging phase data is useful
989 remotephases = pushop.remote.listkeys('phases')
989 remotephases = pushop.remote.listkeys('phases')
990 if (pushop.ui.configbool('ui', '_usedassubrepo')
990 if (pushop.ui.configbool('ui', '_usedassubrepo')
991 and remotephases # server supports phases
991 and remotephases # server supports phases
992 and pushop.cgresult is None # nothing was pushed
992 and pushop.cgresult is None # nothing was pushed
993 and remotephases.get('publishing', False)):
993 and remotephases.get('publishing', False)):
994 # When:
994 # When:
995 # - this is a subrepo push
995 # - this is a subrepo push
996 # - and remote support phase
996 # - and remote support phase
997 # - and no changeset was pushed
997 # - and no changeset was pushed
998 # - and remote is publishing
998 # - and remote is publishing
999 # We may be in issue 3871 case!
999 # We may be in issue 3871 case!
1000 # We drop the possible phase synchronisation done by
1000 # We drop the possible phase synchronisation done by
1001 # courtesy to publish changesets possibly locally draft
1001 # courtesy to publish changesets possibly locally draft
1002 # on the remote.
1002 # on the remote.
1003 remotephases = {'publishing': 'True'}
1003 remotephases = {'publishing': 'True'}
1004 if not remotephases: # old server or public only reply from non-publishing
1004 if not remotephases: # old server or public only reply from non-publishing
1005 _localphasemove(pushop, cheads)
1005 _localphasemove(pushop, cheads)
1006 # don't push any phase data as there is nothing to push
1006 # don't push any phase data as there is nothing to push
1007 else:
1007 else:
1008 ana = phases.analyzeremotephases(pushop.repo, cheads,
1008 ana = phases.analyzeremotephases(pushop.repo, cheads,
1009 remotephases)
1009 remotephases)
1010 pheads, droots = ana
1010 pheads, droots = ana
1011 ### Apply remote phase on local
1011 ### Apply remote phase on local
1012 if remotephases.get('publishing', False):
1012 if remotephases.get('publishing', False):
1013 _localphasemove(pushop, cheads)
1013 _localphasemove(pushop, cheads)
1014 else: # publish = False
1014 else: # publish = False
1015 _localphasemove(pushop, pheads)
1015 _localphasemove(pushop, pheads)
1016 _localphasemove(pushop, cheads, phases.draft)
1016 _localphasemove(pushop, cheads, phases.draft)
1017 ### Apply local phase on remote
1017 ### Apply local phase on remote
1018
1018
1019 if pushop.cgresult:
1019 if pushop.cgresult:
1020 if 'phases' in pushop.stepsdone:
1020 if 'phases' in pushop.stepsdone:
1021 # phases already pushed though bundle2
1021 # phases already pushed though bundle2
1022 return
1022 return
1023 outdated = pushop.outdatedphases
1023 outdated = pushop.outdatedphases
1024 else:
1024 else:
1025 outdated = pushop.fallbackoutdatedphases
1025 outdated = pushop.fallbackoutdatedphases
1026
1026
1027 pushop.stepsdone.add('phases')
1027 pushop.stepsdone.add('phases')
1028
1028
1029 # filter heads already turned public by the push
1029 # filter heads already turned public by the push
1030 outdated = [c for c in outdated if c.node() not in pheads]
1030 outdated = [c for c in outdated if c.node() not in pheads]
1031 # fallback to independent pushkey command
1031 # fallback to independent pushkey command
1032 for newremotehead in outdated:
1032 for newremotehead in outdated:
1033 r = pushop.remote.pushkey('phases',
1033 r = pushop.remote.pushkey('phases',
1034 newremotehead.hex(),
1034 newremotehead.hex(),
1035 str(phases.draft),
1035 str(phases.draft),
1036 str(phases.public))
1036 str(phases.public))
1037 if not r:
1037 if not r:
1038 pushop.ui.warn(_('updating %s to public failed!\n')
1038 pushop.ui.warn(_('updating %s to public failed!\n')
1039 % newremotehead)
1039 % newremotehead)
1040
1040
1041 def _localphasemove(pushop, nodes, phase=phases.public):
1041 def _localphasemove(pushop, nodes, phase=phases.public):
1042 """move <nodes> to <phase> in the local source repo"""
1042 """move <nodes> to <phase> in the local source repo"""
1043 if pushop.trmanager:
1043 if pushop.trmanager:
1044 phases.advanceboundary(pushop.repo,
1044 phases.advanceboundary(pushop.repo,
1045 pushop.trmanager.transaction(),
1045 pushop.trmanager.transaction(),
1046 phase,
1046 phase,
1047 nodes)
1047 nodes)
1048 else:
1048 else:
1049 # repo is not locked, do not change any phases!
1049 # repo is not locked, do not change any phases!
1050 # Informs the user that phases should have been moved when
1050 # Informs the user that phases should have been moved when
1051 # applicable.
1051 # applicable.
1052 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1052 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1053 phasestr = phases.phasenames[phase]
1053 phasestr = phases.phasenames[phase]
1054 if actualmoves:
1054 if actualmoves:
1055 pushop.ui.status(_('cannot lock source repo, skipping '
1055 pushop.ui.status(_('cannot lock source repo, skipping '
1056 'local %s phase update\n') % phasestr)
1056 'local %s phase update\n') % phasestr)
1057
1057
1058 def _pushobsolete(pushop):
1058 def _pushobsolete(pushop):
1059 """utility function to push obsolete markers to a remote"""
1059 """utility function to push obsolete markers to a remote"""
1060 if 'obsmarkers' in pushop.stepsdone:
1060 if 'obsmarkers' in pushop.stepsdone:
1061 return
1061 return
1062 repo = pushop.repo
1062 repo = pushop.repo
1063 remote = pushop.remote
1063 remote = pushop.remote
1064 pushop.stepsdone.add('obsmarkers')
1064 pushop.stepsdone.add('obsmarkers')
1065 if pushop.outobsmarkers:
1065 if pushop.outobsmarkers:
1066 pushop.ui.debug('try to push obsolete markers to remote\n')
1066 pushop.ui.debug('try to push obsolete markers to remote\n')
1067 rslts = []
1067 rslts = []
1068 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1068 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1069 for key in sorted(remotedata, reverse=True):
1069 for key in sorted(remotedata, reverse=True):
1070 # reverse sort to ensure we end with dump0
1070 # reverse sort to ensure we end with dump0
1071 data = remotedata[key]
1071 data = remotedata[key]
1072 rslts.append(remote.pushkey('obsolete', key, '', data))
1072 rslts.append(remote.pushkey('obsolete', key, '', data))
1073 if [r for r in rslts if not r]:
1073 if [r for r in rslts if not r]:
1074 msg = _('failed to push some obsolete markers!\n')
1074 msg = _('failed to push some obsolete markers!\n')
1075 repo.ui.warn(msg)
1075 repo.ui.warn(msg)
1076
1076
1077 def _pushbookmark(pushop):
1077 def _pushbookmark(pushop):
1078 """Update bookmark position on remote"""
1078 """Update bookmark position on remote"""
1079 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1079 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1080 return
1080 return
1081 pushop.stepsdone.add('bookmarks')
1081 pushop.stepsdone.add('bookmarks')
1082 ui = pushop.ui
1082 ui = pushop.ui
1083 remote = pushop.remote
1083 remote = pushop.remote
1084
1084
1085 for b, old, new in pushop.outbookmarks:
1085 for b, old, new in pushop.outbookmarks:
1086 action = 'update'
1086 action = 'update'
1087 if not old:
1087 if not old:
1088 action = 'export'
1088 action = 'export'
1089 elif not new:
1089 elif not new:
1090 action = 'delete'
1090 action = 'delete'
1091 if remote.pushkey('bookmarks', b, old, new):
1091 if remote.pushkey('bookmarks', b, old, new):
1092 ui.status(bookmsgmap[action][0] % b)
1092 ui.status(bookmsgmap[action][0] % b)
1093 else:
1093 else:
1094 ui.warn(bookmsgmap[action][1] % b)
1094 ui.warn(bookmsgmap[action][1] % b)
1095 # discovery can have set the value form invalid entry
1095 # discovery can have set the value form invalid entry
1096 if pushop.bkresult is not None:
1096 if pushop.bkresult is not None:
1097 pushop.bkresult = 1
1097 pushop.bkresult = 1
1098
1098
1099 class pulloperation(object):
1099 class pulloperation(object):
1100 """A object that represent a single pull operation
1100 """A object that represent a single pull operation
1101
1101
1102 It purpose is to carry pull related state and very common operation.
1102 It purpose is to carry pull related state and very common operation.
1103
1103
1104 A new should be created at the beginning of each pull and discarded
1104 A new should be created at the beginning of each pull and discarded
1105 afterward.
1105 afterward.
1106 """
1106 """
1107
1107
1108 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1108 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1109 remotebookmarks=None, streamclonerequested=None):
1109 remotebookmarks=None, streamclonerequested=None):
1110 # repo we pull into
1110 # repo we pull into
1111 self.repo = repo
1111 self.repo = repo
1112 # repo we pull from
1112 # repo we pull from
1113 self.remote = remote
1113 self.remote = remote
1114 # revision we try to pull (None is "all")
1114 # revision we try to pull (None is "all")
1115 self.heads = heads
1115 self.heads = heads
1116 # bookmark pulled explicitly
1116 # bookmark pulled explicitly
1117 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1117 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1118 for bookmark in bookmarks]
1118 for bookmark in bookmarks]
1119 # do we force pull?
1119 # do we force pull?
1120 self.force = force
1120 self.force = force
1121 # whether a streaming clone was requested
1121 # whether a streaming clone was requested
1122 self.streamclonerequested = streamclonerequested
1122 self.streamclonerequested = streamclonerequested
1123 # transaction manager
1123 # transaction manager
1124 self.trmanager = None
1124 self.trmanager = None
1125 # set of common changeset between local and remote before pull
1125 # set of common changeset between local and remote before pull
1126 self.common = None
1126 self.common = None
1127 # set of pulled head
1127 # set of pulled head
1128 self.rheads = None
1128 self.rheads = None
1129 # list of missing changeset to fetch remotely
1129 # list of missing changeset to fetch remotely
1130 self.fetch = None
1130 self.fetch = None
1131 # remote bookmarks data
1131 # remote bookmarks data
1132 self.remotebookmarks = remotebookmarks
1132 self.remotebookmarks = remotebookmarks
1133 # result of changegroup pulling (used as return code by pull)
1133 # result of changegroup pulling (used as return code by pull)
1134 self.cgresult = None
1134 self.cgresult = None
1135 # list of step already done
1135 # list of step already done
1136 self.stepsdone = set()
1136 self.stepsdone = set()
1137 # Whether we attempted a clone from pre-generated bundles.
1137 # Whether we attempted a clone from pre-generated bundles.
1138 self.clonebundleattempted = False
1138 self.clonebundleattempted = False
1139
1139
1140 @util.propertycache
1140 @util.propertycache
1141 def pulledsubset(self):
1141 def pulledsubset(self):
1142 """heads of the set of changeset target by the pull"""
1142 """heads of the set of changeset target by the pull"""
1143 # compute target subset
1143 # compute target subset
1144 if self.heads is None:
1144 if self.heads is None:
1145 # We pulled every thing possible
1145 # We pulled every thing possible
1146 # sync on everything common
1146 # sync on everything common
1147 c = set(self.common)
1147 c = set(self.common)
1148 ret = list(self.common)
1148 ret = list(self.common)
1149 for n in self.rheads:
1149 for n in self.rheads:
1150 if n not in c:
1150 if n not in c:
1151 ret.append(n)
1151 ret.append(n)
1152 return ret
1152 return ret
1153 else:
1153 else:
1154 # We pulled a specific subset
1154 # We pulled a specific subset
1155 # sync on this subset
1155 # sync on this subset
1156 return self.heads
1156 return self.heads
1157
1157
1158 @util.propertycache
1158 @util.propertycache
1159 def canusebundle2(self):
1159 def canusebundle2(self):
1160 return not _forcebundle1(self)
1160 return not _forcebundle1(self)
1161
1161
1162 @util.propertycache
1162 @util.propertycache
1163 def remotebundle2caps(self):
1163 def remotebundle2caps(self):
1164 return bundle2.bundle2caps(self.remote)
1164 return bundle2.bundle2caps(self.remote)
1165
1165
1166 def gettransaction(self):
1166 def gettransaction(self):
1167 # deprecated; talk to trmanager directly
1167 # deprecated; talk to trmanager directly
1168 return self.trmanager.transaction()
1168 return self.trmanager.transaction()
1169
1169
1170 class transactionmanager(util.transactional):
1170 class transactionmanager(util.transactional):
1171 """An object to manage the life cycle of a transaction
1171 """An object to manage the life cycle of a transaction
1172
1172
1173 It creates the transaction on demand and calls the appropriate hooks when
1173 It creates the transaction on demand and calls the appropriate hooks when
1174 closing the transaction."""
1174 closing the transaction."""
1175 def __init__(self, repo, source, url):
1175 def __init__(self, repo, source, url):
1176 self.repo = repo
1176 self.repo = repo
1177 self.source = source
1177 self.source = source
1178 self.url = url
1178 self.url = url
1179 self._tr = None
1179 self._tr = None
1180
1180
1181 def transaction(self):
1181 def transaction(self):
1182 """Return an open transaction object, constructing if necessary"""
1182 """Return an open transaction object, constructing if necessary"""
1183 if not self._tr:
1183 if not self._tr:
1184 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1184 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1185 self._tr = self.repo.transaction(trname)
1185 self._tr = self.repo.transaction(trname)
1186 self._tr.hookargs['source'] = self.source
1186 self._tr.hookargs['source'] = self.source
1187 self._tr.hookargs['url'] = self.url
1187 self._tr.hookargs['url'] = self.url
1188 return self._tr
1188 return self._tr
1189
1189
1190 def close(self):
1190 def close(self):
1191 """close transaction if created"""
1191 """close transaction if created"""
1192 if self._tr is not None:
1192 if self._tr is not None:
1193 self._tr.close()
1193 self._tr.close()
1194
1194
1195 def release(self):
1195 def release(self):
1196 """release transaction if created"""
1196 """release transaction if created"""
1197 if self._tr is not None:
1197 if self._tr is not None:
1198 self._tr.release()
1198 self._tr.release()
1199
1199
1200 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1200 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1201 streamclonerequested=None):
1201 streamclonerequested=None):
1202 """Fetch repository data from a remote.
1202 """Fetch repository data from a remote.
1203
1203
1204 This is the main function used to retrieve data from a remote repository.
1204 This is the main function used to retrieve data from a remote repository.
1205
1205
1206 ``repo`` is the local repository to clone into.
1206 ``repo`` is the local repository to clone into.
1207 ``remote`` is a peer instance.
1207 ``remote`` is a peer instance.
1208 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1208 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1209 default) means to pull everything from the remote.
1209 default) means to pull everything from the remote.
1210 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1210 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1211 default, all remote bookmarks are pulled.
1211 default, all remote bookmarks are pulled.
1212 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1212 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1213 initialization.
1213 initialization.
1214 ``streamclonerequested`` is a boolean indicating whether a "streaming
1214 ``streamclonerequested`` is a boolean indicating whether a "streaming
1215 clone" is requested. A "streaming clone" is essentially a raw file copy
1215 clone" is requested. A "streaming clone" is essentially a raw file copy
1216 of revlogs from the server. This only works when the local repository is
1216 of revlogs from the server. This only works when the local repository is
1217 empty. The default value of ``None`` means to respect the server
1217 empty. The default value of ``None`` means to respect the server
1218 configuration for preferring stream clones.
1218 configuration for preferring stream clones.
1219
1219
1220 Returns the ``pulloperation`` created for this pull.
1220 Returns the ``pulloperation`` created for this pull.
1221 """
1221 """
1222 if opargs is None:
1222 if opargs is None:
1223 opargs = {}
1223 opargs = {}
1224 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1224 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1225 streamclonerequested=streamclonerequested, **opargs)
1225 streamclonerequested=streamclonerequested, **opargs)
1226
1226
1227 peerlocal = pullop.remote.local()
1227 peerlocal = pullop.remote.local()
1228 if peerlocal:
1228 if peerlocal:
1229 missing = set(peerlocal.requirements) - pullop.repo.supported
1229 missing = set(peerlocal.requirements) - pullop.repo.supported
1230 if missing:
1230 if missing:
1231 msg = _("required features are not"
1231 msg = _("required features are not"
1232 " supported in the destination:"
1232 " supported in the destination:"
1233 " %s") % (', '.join(sorted(missing)))
1233 " %s") % (', '.join(sorted(missing)))
1234 raise error.Abort(msg)
1234 raise error.Abort(msg)
1235
1235
1236 wlock = lock = None
1236 wlock = lock = None
1237 try:
1237 try:
1238 wlock = pullop.repo.wlock()
1238 wlock = pullop.repo.wlock()
1239 lock = pullop.repo.lock()
1239 lock = pullop.repo.lock()
1240 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1240 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1241 streamclone.maybeperformlegacystreamclone(pullop)
1241 streamclone.maybeperformlegacystreamclone(pullop)
1242 # This should ideally be in _pullbundle2(). However, it needs to run
1242 # This should ideally be in _pullbundle2(). However, it needs to run
1243 # before discovery to avoid extra work.
1243 # before discovery to avoid extra work.
1244 _maybeapplyclonebundle(pullop)
1244 _maybeapplyclonebundle(pullop)
1245 _pulldiscovery(pullop)
1245 _pulldiscovery(pullop)
1246 if pullop.canusebundle2:
1246 if pullop.canusebundle2:
1247 _pullbundle2(pullop)
1247 _pullbundle2(pullop)
1248 _pullchangeset(pullop)
1248 _pullchangeset(pullop)
1249 _pullphase(pullop)
1249 _pullphase(pullop)
1250 _pullbookmarks(pullop)
1250 _pullbookmarks(pullop)
1251 _pullobsolete(pullop)
1251 _pullobsolete(pullop)
1252 pullop.trmanager.close()
1252 pullop.trmanager.close()
1253 finally:
1253 finally:
1254 lockmod.release(pullop.trmanager, lock, wlock)
1254 lockmod.release(pullop.trmanager, lock, wlock)
1255
1255
1256 return pullop
1256 return pullop
1257
1257
1258 # list of steps to perform discovery before pull
1258 # list of steps to perform discovery before pull
1259 pulldiscoveryorder = []
1259 pulldiscoveryorder = []
1260
1260
1261 # Mapping between step name and function
1261 # Mapping between step name and function
1262 #
1262 #
1263 # This exists to help extensions wrap steps if necessary
1263 # This exists to help extensions wrap steps if necessary
1264 pulldiscoverymapping = {}
1264 pulldiscoverymapping = {}
1265
1265
1266 def pulldiscovery(stepname):
1266 def pulldiscovery(stepname):
1267 """decorator for function performing discovery before pull
1267 """decorator for function performing discovery before pull
1268
1268
1269 The function is added to the step -> function mapping and appended to the
1269 The function is added to the step -> function mapping and appended to the
1270 list of steps. Beware that decorated function will be added in order (this
1270 list of steps. Beware that decorated function will be added in order (this
1271 may matter).
1271 may matter).
1272
1272
1273 You can only use this decorator for a new step, if you want to wrap a step
1273 You can only use this decorator for a new step, if you want to wrap a step
1274 from an extension, change the pulldiscovery dictionary directly."""
1274 from an extension, change the pulldiscovery dictionary directly."""
1275 def dec(func):
1275 def dec(func):
1276 assert stepname not in pulldiscoverymapping
1276 assert stepname not in pulldiscoverymapping
1277 pulldiscoverymapping[stepname] = func
1277 pulldiscoverymapping[stepname] = func
1278 pulldiscoveryorder.append(stepname)
1278 pulldiscoveryorder.append(stepname)
1279 return func
1279 return func
1280 return dec
1280 return dec
1281
1281
1282 def _pulldiscovery(pullop):
1282 def _pulldiscovery(pullop):
1283 """Run all discovery steps"""
1283 """Run all discovery steps"""
1284 for stepname in pulldiscoveryorder:
1284 for stepname in pulldiscoveryorder:
1285 step = pulldiscoverymapping[stepname]
1285 step = pulldiscoverymapping[stepname]
1286 step(pullop)
1286 step(pullop)
1287
1287
1288 @pulldiscovery('b1:bookmarks')
1288 @pulldiscovery('b1:bookmarks')
1289 def _pullbookmarkbundle1(pullop):
1289 def _pullbookmarkbundle1(pullop):
1290 """fetch bookmark data in bundle1 case
1290 """fetch bookmark data in bundle1 case
1291
1291
1292 If not using bundle2, we have to fetch bookmarks before changeset
1292 If not using bundle2, we have to fetch bookmarks before changeset
1293 discovery to reduce the chance and impact of race conditions."""
1293 discovery to reduce the chance and impact of race conditions."""
1294 if pullop.remotebookmarks is not None:
1294 if pullop.remotebookmarks is not None:
1295 return
1295 return
1296 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1296 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1297 # all known bundle2 servers now support listkeys, but lets be nice with
1297 # all known bundle2 servers now support listkeys, but lets be nice with
1298 # new implementation.
1298 # new implementation.
1299 return
1299 return
1300 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1300 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1301
1301
1302
1302
1303 @pulldiscovery('changegroup')
1303 @pulldiscovery('changegroup')
1304 def _pulldiscoverychangegroup(pullop):
1304 def _pulldiscoverychangegroup(pullop):
1305 """discovery phase for the pull
1305 """discovery phase for the pull
1306
1306
1307 Current handle changeset discovery only, will change handle all discovery
1307 Current handle changeset discovery only, will change handle all discovery
1308 at some point."""
1308 at some point."""
1309 tmp = discovery.findcommonincoming(pullop.repo,
1309 tmp = discovery.findcommonincoming(pullop.repo,
1310 pullop.remote,
1310 pullop.remote,
1311 heads=pullop.heads,
1311 heads=pullop.heads,
1312 force=pullop.force)
1312 force=pullop.force)
1313 common, fetch, rheads = tmp
1313 common, fetch, rheads = tmp
1314 nm = pullop.repo.unfiltered().changelog.nodemap
1314 nm = pullop.repo.unfiltered().changelog.nodemap
1315 if fetch and rheads:
1315 if fetch and rheads:
1316 # If a remote heads in filtered locally, lets drop it from the unknown
1316 # If a remote heads is filtered locally, put in back in common.
1317 # remote heads and put in back in common.
1318 #
1317 #
1319 # This is a hackish solution to catch most of "common but locally
1318 # This is a hackish solution to catch most of "common but locally
1320 # hidden situation". We do not performs discovery on unfiltered
1319 # hidden situation". We do not performs discovery on unfiltered
1321 # repository because it end up doing a pathological amount of round
1320 # repository because it end up doing a pathological amount of round
1322 # trip for w huge amount of changeset we do not care about.
1321 # trip for w huge amount of changeset we do not care about.
1323 #
1322 #
1324 # If a set of such "common but filtered" changeset exist on the server
1323 # If a set of such "common but filtered" changeset exist on the server
1325 # but are not including a remote heads, we'll not be able to detect it,
1324 # but are not including a remote heads, we'll not be able to detect it,
1326 scommon = set(common)
1325 scommon = set(common)
1327 filteredrheads = []
1328 for n in rheads:
1326 for n in rheads:
1329 if n in nm:
1327 if n in nm:
1330 if n not in scommon:
1328 if n not in scommon:
1331 common.append(n)
1329 common.append(n)
1332 else:
1330 if set(rheads).issubset(set(common)):
1333 filteredrheads.append(n)
1334 if not filteredrheads:
1335 fetch = []
1331 fetch = []
1336 rheads = filteredrheads
1337 pullop.common = common
1332 pullop.common = common
1338 pullop.fetch = fetch
1333 pullop.fetch = fetch
1339 pullop.rheads = rheads
1334 pullop.rheads = rheads
1340
1335
1341 def _pullbundle2(pullop):
1336 def _pullbundle2(pullop):
1342 """pull data using bundle2
1337 """pull data using bundle2
1343
1338
1344 For now, the only supported data are changegroup."""
1339 For now, the only supported data are changegroup."""
1345 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1340 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1346
1341
1347 # At the moment we don't do stream clones over bundle2. If that is
1342 # At the moment we don't do stream clones over bundle2. If that is
1348 # implemented then here's where the check for that will go.
1343 # implemented then here's where the check for that will go.
1349 streaming = False
1344 streaming = False
1350
1345
1351 # pulling changegroup
1346 # pulling changegroup
1352 pullop.stepsdone.add('changegroup')
1347 pullop.stepsdone.add('changegroup')
1353
1348
1354 kwargs['common'] = pullop.common
1349 kwargs['common'] = pullop.common
1355 kwargs['heads'] = pullop.heads or pullop.rheads
1350 kwargs['heads'] = pullop.heads or pullop.rheads
1356 kwargs['cg'] = pullop.fetch
1351 kwargs['cg'] = pullop.fetch
1357 if 'listkeys' in pullop.remotebundle2caps:
1352 if 'listkeys' in pullop.remotebundle2caps:
1358 kwargs['listkeys'] = ['phases']
1353 kwargs['listkeys'] = ['phases']
1359 if pullop.remotebookmarks is None:
1354 if pullop.remotebookmarks is None:
1360 # make sure to always includes bookmark data when migrating
1355 # make sure to always includes bookmark data when migrating
1361 # `hg incoming --bundle` to using this function.
1356 # `hg incoming --bundle` to using this function.
1362 kwargs['listkeys'].append('bookmarks')
1357 kwargs['listkeys'].append('bookmarks')
1363
1358
1364 # If this is a full pull / clone and the server supports the clone bundles
1359 # If this is a full pull / clone and the server supports the clone bundles
1365 # feature, tell the server whether we attempted a clone bundle. The
1360 # feature, tell the server whether we attempted a clone bundle. The
1366 # presence of this flag indicates the client supports clone bundles. This
1361 # presence of this flag indicates the client supports clone bundles. This
1367 # will enable the server to treat clients that support clone bundles
1362 # will enable the server to treat clients that support clone bundles
1368 # differently from those that don't.
1363 # differently from those that don't.
1369 if (pullop.remote.capable('clonebundles')
1364 if (pullop.remote.capable('clonebundles')
1370 and pullop.heads is None and list(pullop.common) == [nullid]):
1365 and pullop.heads is None and list(pullop.common) == [nullid]):
1371 kwargs['cbattempted'] = pullop.clonebundleattempted
1366 kwargs['cbattempted'] = pullop.clonebundleattempted
1372
1367
1373 if streaming:
1368 if streaming:
1374 pullop.repo.ui.status(_('streaming all changes\n'))
1369 pullop.repo.ui.status(_('streaming all changes\n'))
1375 elif not pullop.fetch:
1370 elif not pullop.fetch:
1376 pullop.repo.ui.status(_("no changes found\n"))
1371 pullop.repo.ui.status(_("no changes found\n"))
1377 pullop.cgresult = 0
1372 pullop.cgresult = 0
1378 else:
1373 else:
1379 if pullop.heads is None and list(pullop.common) == [nullid]:
1374 if pullop.heads is None and list(pullop.common) == [nullid]:
1380 pullop.repo.ui.status(_("requesting all changes\n"))
1375 pullop.repo.ui.status(_("requesting all changes\n"))
1381 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1376 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1382 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1377 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1383 if obsolete.commonversion(remoteversions) is not None:
1378 if obsolete.commonversion(remoteversions) is not None:
1384 kwargs['obsmarkers'] = True
1379 kwargs['obsmarkers'] = True
1385 pullop.stepsdone.add('obsmarkers')
1380 pullop.stepsdone.add('obsmarkers')
1386 _pullbundle2extraprepare(pullop, kwargs)
1381 _pullbundle2extraprepare(pullop, kwargs)
1387 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1382 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1388 try:
1383 try:
1389 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1384 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1390 except bundle2.AbortFromPart as exc:
1385 except bundle2.AbortFromPart as exc:
1391 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1386 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1392 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1387 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1393 except error.BundleValueError as exc:
1388 except error.BundleValueError as exc:
1394 raise error.Abort(_('missing support for %s') % exc)
1389 raise error.Abort(_('missing support for %s') % exc)
1395
1390
1396 if pullop.fetch:
1391 if pullop.fetch:
1397 pullop.cgresult = bundle2.combinechangegroupresults(op)
1392 pullop.cgresult = bundle2.combinechangegroupresults(op)
1398
1393
1399 # If the bundle had a phase-heads part, then phase exchange is already done
1394 # If the bundle had a phase-heads part, then phase exchange is already done
1400 if op.records['phase-heads']:
1395 if op.records['phase-heads']:
1401 pullop.stepsdone.add('phases')
1396 pullop.stepsdone.add('phases')
1402
1397
1403 # processing phases change
1398 # processing phases change
1404 for namespace, value in op.records['listkeys']:
1399 for namespace, value in op.records['listkeys']:
1405 if namespace == 'phases':
1400 if namespace == 'phases':
1406 _pullapplyphases(pullop, value)
1401 _pullapplyphases(pullop, value)
1407
1402
1408 # processing bookmark update
1403 # processing bookmark update
1409 for namespace, value in op.records['listkeys']:
1404 for namespace, value in op.records['listkeys']:
1410 if namespace == 'bookmarks':
1405 if namespace == 'bookmarks':
1411 pullop.remotebookmarks = value
1406 pullop.remotebookmarks = value
1412
1407
1413 # bookmark data were either already there or pulled in the bundle
1408 # bookmark data were either already there or pulled in the bundle
1414 if pullop.remotebookmarks is not None:
1409 if pullop.remotebookmarks is not None:
1415 _pullbookmarks(pullop)
1410 _pullbookmarks(pullop)
1416
1411
1417 def _pullbundle2extraprepare(pullop, kwargs):
1412 def _pullbundle2extraprepare(pullop, kwargs):
1418 """hook function so that extensions can extend the getbundle call"""
1413 """hook function so that extensions can extend the getbundle call"""
1419 pass
1414 pass
1420
1415
1421 def _pullchangeset(pullop):
1416 def _pullchangeset(pullop):
1422 """pull changeset from unbundle into the local repo"""
1417 """pull changeset from unbundle into the local repo"""
1423 # We delay the open of the transaction as late as possible so we
1418 # We delay the open of the transaction as late as possible so we
1424 # don't open transaction for nothing or you break future useful
1419 # don't open transaction for nothing or you break future useful
1425 # rollback call
1420 # rollback call
1426 if 'changegroup' in pullop.stepsdone:
1421 if 'changegroup' in pullop.stepsdone:
1427 return
1422 return
1428 pullop.stepsdone.add('changegroup')
1423 pullop.stepsdone.add('changegroup')
1429 if not pullop.fetch:
1424 if not pullop.fetch:
1430 pullop.repo.ui.status(_("no changes found\n"))
1425 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.cgresult = 0
1426 pullop.cgresult = 0
1432 return
1427 return
1433 tr = pullop.gettransaction()
1428 tr = pullop.gettransaction()
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1429 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1430 pullop.repo.ui.status(_("requesting all changes\n"))
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1431 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 # issue1320, avoid a race if remote changed after discovery
1432 # issue1320, avoid a race if remote changed after discovery
1438 pullop.heads = pullop.rheads
1433 pullop.heads = pullop.rheads
1439
1434
1440 if pullop.remote.capable('getbundle'):
1435 if pullop.remote.capable('getbundle'):
1441 # TODO: get bundlecaps from remote
1436 # TODO: get bundlecaps from remote
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1437 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 heads=pullop.heads or pullop.rheads)
1438 heads=pullop.heads or pullop.rheads)
1444 elif pullop.heads is None:
1439 elif pullop.heads is None:
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1440 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 elif not pullop.remote.capable('changegroupsubset'):
1441 elif not pullop.remote.capable('changegroupsubset'):
1447 raise error.Abort(_("partial pull cannot be done because "
1442 raise error.Abort(_("partial pull cannot be done because "
1448 "other repository doesn't support "
1443 "other repository doesn't support "
1449 "changegroupsubset."))
1444 "changegroupsubset."))
1450 else:
1445 else:
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1446 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1447 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 pullop.remote.url())
1448 pullop.remote.url())
1454 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1449 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455
1450
1456 def _pullphase(pullop):
1451 def _pullphase(pullop):
1457 # Get remote phases data from remote
1452 # Get remote phases data from remote
1458 if 'phases' in pullop.stepsdone:
1453 if 'phases' in pullop.stepsdone:
1459 return
1454 return
1460 remotephases = pullop.remote.listkeys('phases')
1455 remotephases = pullop.remote.listkeys('phases')
1461 _pullapplyphases(pullop, remotephases)
1456 _pullapplyphases(pullop, remotephases)
1462
1457
1463 def _pullapplyphases(pullop, remotephases):
1458 def _pullapplyphases(pullop, remotephases):
1464 """apply phase movement from observed remote state"""
1459 """apply phase movement from observed remote state"""
1465 if 'phases' in pullop.stepsdone:
1460 if 'phases' in pullop.stepsdone:
1466 return
1461 return
1467 pullop.stepsdone.add('phases')
1462 pullop.stepsdone.add('phases')
1468 publishing = bool(remotephases.get('publishing', False))
1463 publishing = bool(remotephases.get('publishing', False))
1469 if remotephases and not publishing:
1464 if remotephases and not publishing:
1470 # remote is new and non-publishing
1465 # remote is new and non-publishing
1471 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1466 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 pullop.pulledsubset,
1467 pullop.pulledsubset,
1473 remotephases)
1468 remotephases)
1474 dheads = pullop.pulledsubset
1469 dheads = pullop.pulledsubset
1475 else:
1470 else:
1476 # Remote is old or publishing all common changesets
1471 # Remote is old or publishing all common changesets
1477 # should be seen as public
1472 # should be seen as public
1478 pheads = pullop.pulledsubset
1473 pheads = pullop.pulledsubset
1479 dheads = []
1474 dheads = []
1480 unfi = pullop.repo.unfiltered()
1475 unfi = pullop.repo.unfiltered()
1481 phase = unfi._phasecache.phase
1476 phase = unfi._phasecache.phase
1482 rev = unfi.changelog.nodemap.get
1477 rev = unfi.changelog.nodemap.get
1483 public = phases.public
1478 public = phases.public
1484 draft = phases.draft
1479 draft = phases.draft
1485
1480
1486 # exclude changesets already public locally and update the others
1481 # exclude changesets already public locally and update the others
1487 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1482 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 if pheads:
1483 if pheads:
1489 tr = pullop.gettransaction()
1484 tr = pullop.gettransaction()
1490 phases.advanceboundary(pullop.repo, tr, public, pheads)
1485 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491
1486
1492 # exclude changesets already draft locally and update the others
1487 # exclude changesets already draft locally and update the others
1493 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1488 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 if dheads:
1489 if dheads:
1495 tr = pullop.gettransaction()
1490 tr = pullop.gettransaction()
1496 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1491 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497
1492
1498 def _pullbookmarks(pullop):
1493 def _pullbookmarks(pullop):
1499 """process the remote bookmark information to update the local one"""
1494 """process the remote bookmark information to update the local one"""
1500 if 'bookmarks' in pullop.stepsdone:
1495 if 'bookmarks' in pullop.stepsdone:
1501 return
1496 return
1502 pullop.stepsdone.add('bookmarks')
1497 pullop.stepsdone.add('bookmarks')
1503 repo = pullop.repo
1498 repo = pullop.repo
1504 remotebookmarks = pullop.remotebookmarks
1499 remotebookmarks = pullop.remotebookmarks
1505 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1500 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1501 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 pullop.remote.url(),
1502 pullop.remote.url(),
1508 pullop.gettransaction,
1503 pullop.gettransaction,
1509 explicit=pullop.explicitbookmarks)
1504 explicit=pullop.explicitbookmarks)
1510
1505
1511 def _pullobsolete(pullop):
1506 def _pullobsolete(pullop):
1512 """utility function to pull obsolete markers from a remote
1507 """utility function to pull obsolete markers from a remote
1513
1508
1514 The `gettransaction` is function that return the pull transaction, creating
1509 The `gettransaction` is function that return the pull transaction, creating
1515 one if necessary. We return the transaction to inform the calling code that
1510 one if necessary. We return the transaction to inform the calling code that
1516 a new transaction have been created (when applicable).
1511 a new transaction have been created (when applicable).
1517
1512
1518 Exists mostly to allow overriding for experimentation purpose"""
1513 Exists mostly to allow overriding for experimentation purpose"""
1519 if 'obsmarkers' in pullop.stepsdone:
1514 if 'obsmarkers' in pullop.stepsdone:
1520 return
1515 return
1521 pullop.stepsdone.add('obsmarkers')
1516 pullop.stepsdone.add('obsmarkers')
1522 tr = None
1517 tr = None
1523 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1518 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1519 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 remoteobs = pullop.remote.listkeys('obsolete')
1520 remoteobs = pullop.remote.listkeys('obsolete')
1526 if 'dump0' in remoteobs:
1521 if 'dump0' in remoteobs:
1527 tr = pullop.gettransaction()
1522 tr = pullop.gettransaction()
1528 markers = []
1523 markers = []
1529 for key in sorted(remoteobs, reverse=True):
1524 for key in sorted(remoteobs, reverse=True):
1530 if key.startswith('dump'):
1525 if key.startswith('dump'):
1531 data = util.b85decode(remoteobs[key])
1526 data = util.b85decode(remoteobs[key])
1532 version, newmarks = obsolete._readmarkers(data)
1527 version, newmarks = obsolete._readmarkers(data)
1533 markers += newmarks
1528 markers += newmarks
1534 if markers:
1529 if markers:
1535 pullop.repo.obsstore.add(tr, markers)
1530 pullop.repo.obsstore.add(tr, markers)
1536 pullop.repo.invalidatevolatilesets()
1531 pullop.repo.invalidatevolatilesets()
1537 return tr
1532 return tr
1538
1533
1539 def caps20to10(repo):
1534 def caps20to10(repo):
1540 """return a set with appropriate options to use bundle20 during getbundle"""
1535 """return a set with appropriate options to use bundle20 during getbundle"""
1541 caps = {'HG20'}
1536 caps = {'HG20'}
1542 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1537 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 caps.add('bundle2=' + urlreq.quote(capsblob))
1538 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 return caps
1539 return caps
1545
1540
1546 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1541 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 getbundle2partsorder = []
1542 getbundle2partsorder = []
1548
1543
1549 # Mapping between step name and function
1544 # Mapping between step name and function
1550 #
1545 #
1551 # This exists to help extensions wrap steps if necessary
1546 # This exists to help extensions wrap steps if necessary
1552 getbundle2partsmapping = {}
1547 getbundle2partsmapping = {}
1553
1548
1554 def getbundle2partsgenerator(stepname, idx=None):
1549 def getbundle2partsgenerator(stepname, idx=None):
1555 """decorator for function generating bundle2 part for getbundle
1550 """decorator for function generating bundle2 part for getbundle
1556
1551
1557 The function is added to the step -> function mapping and appended to the
1552 The function is added to the step -> function mapping and appended to the
1558 list of steps. Beware that decorated functions will be added in order
1553 list of steps. Beware that decorated functions will be added in order
1559 (this may matter).
1554 (this may matter).
1560
1555
1561 You can only use this decorator for new steps, if you want to wrap a step
1556 You can only use this decorator for new steps, if you want to wrap a step
1562 from an extension, attack the getbundle2partsmapping dictionary directly."""
1557 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 def dec(func):
1558 def dec(func):
1564 assert stepname not in getbundle2partsmapping
1559 assert stepname not in getbundle2partsmapping
1565 getbundle2partsmapping[stepname] = func
1560 getbundle2partsmapping[stepname] = func
1566 if idx is None:
1561 if idx is None:
1567 getbundle2partsorder.append(stepname)
1562 getbundle2partsorder.append(stepname)
1568 else:
1563 else:
1569 getbundle2partsorder.insert(idx, stepname)
1564 getbundle2partsorder.insert(idx, stepname)
1570 return func
1565 return func
1571 return dec
1566 return dec
1572
1567
1573 def bundle2requested(bundlecaps):
1568 def bundle2requested(bundlecaps):
1574 if bundlecaps is not None:
1569 if bundlecaps is not None:
1575 return any(cap.startswith('HG2') for cap in bundlecaps)
1570 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 return False
1571 return False
1577
1572
1578 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1573 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 **kwargs):
1574 **kwargs):
1580 """Return chunks constituting a bundle's raw data.
1575 """Return chunks constituting a bundle's raw data.
1581
1576
1582 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1577 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 passed.
1578 passed.
1584
1579
1585 Returns an iterator over raw chunks (of varying sizes).
1580 Returns an iterator over raw chunks (of varying sizes).
1586 """
1581 """
1587 kwargs = pycompat.byteskwargs(kwargs)
1582 kwargs = pycompat.byteskwargs(kwargs)
1588 usebundle2 = bundle2requested(bundlecaps)
1583 usebundle2 = bundle2requested(bundlecaps)
1589 # bundle10 case
1584 # bundle10 case
1590 if not usebundle2:
1585 if not usebundle2:
1591 if bundlecaps and not kwargs.get('cg', True):
1586 if bundlecaps and not kwargs.get('cg', True):
1592 raise ValueError(_('request for bundle10 must include changegroup'))
1587 raise ValueError(_('request for bundle10 must include changegroup'))
1593
1588
1594 if kwargs:
1589 if kwargs:
1595 raise ValueError(_('unsupported getbundle arguments: %s')
1590 raise ValueError(_('unsupported getbundle arguments: %s')
1596 % ', '.join(sorted(kwargs.keys())))
1591 % ', '.join(sorted(kwargs.keys())))
1597 outgoing = _computeoutgoing(repo, heads, common)
1592 outgoing = _computeoutgoing(repo, heads, common)
1598 return changegroup.makestream(repo, outgoing, '01', source,
1593 return changegroup.makestream(repo, outgoing, '01', source,
1599 bundlecaps=bundlecaps)
1594 bundlecaps=bundlecaps)
1600
1595
1601 # bundle20 case
1596 # bundle20 case
1602 b2caps = {}
1597 b2caps = {}
1603 for bcaps in bundlecaps:
1598 for bcaps in bundlecaps:
1604 if bcaps.startswith('bundle2='):
1599 if bcaps.startswith('bundle2='):
1605 blob = urlreq.unquote(bcaps[len('bundle2='):])
1600 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 b2caps.update(bundle2.decodecaps(blob))
1601 b2caps.update(bundle2.decodecaps(blob))
1607 bundler = bundle2.bundle20(repo.ui, b2caps)
1602 bundler = bundle2.bundle20(repo.ui, b2caps)
1608
1603
1609 kwargs['heads'] = heads
1604 kwargs['heads'] = heads
1610 kwargs['common'] = common
1605 kwargs['common'] = common
1611
1606
1612 for name in getbundle2partsorder:
1607 for name in getbundle2partsorder:
1613 func = getbundle2partsmapping[name]
1608 func = getbundle2partsmapping[name]
1614 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1609 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 **pycompat.strkwargs(kwargs))
1610 **pycompat.strkwargs(kwargs))
1616
1611
1617 return bundler.getchunks()
1612 return bundler.getchunks()
1618
1613
1619 @getbundle2partsgenerator('changegroup')
1614 @getbundle2partsgenerator('changegroup')
1620 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1615 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 b2caps=None, heads=None, common=None, **kwargs):
1616 b2caps=None, heads=None, common=None, **kwargs):
1622 """add a changegroup part to the requested bundle"""
1617 """add a changegroup part to the requested bundle"""
1623 cgstream = None
1618 cgstream = None
1624 if kwargs.get('cg', True):
1619 if kwargs.get('cg', True):
1625 # build changegroup bundle here.
1620 # build changegroup bundle here.
1626 version = '01'
1621 version = '01'
1627 cgversions = b2caps.get('changegroup')
1622 cgversions = b2caps.get('changegroup')
1628 if cgversions: # 3.1 and 3.2 ship with an empty value
1623 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 cgversions = [v for v in cgversions
1624 cgversions = [v for v in cgversions
1630 if v in changegroup.supportedoutgoingversions(repo)]
1625 if v in changegroup.supportedoutgoingversions(repo)]
1631 if not cgversions:
1626 if not cgversions:
1632 raise ValueError(_('no common changegroup version'))
1627 raise ValueError(_('no common changegroup version'))
1633 version = max(cgversions)
1628 version = max(cgversions)
1634 outgoing = _computeoutgoing(repo, heads, common)
1629 outgoing = _computeoutgoing(repo, heads, common)
1635 if outgoing.missing:
1630 if outgoing.missing:
1636 cgstream = changegroup.makestream(repo, outgoing, version, source,
1631 cgstream = changegroup.makestream(repo, outgoing, version, source,
1637 bundlecaps=bundlecaps)
1632 bundlecaps=bundlecaps)
1638
1633
1639 if cgstream:
1634 if cgstream:
1640 part = bundler.newpart('changegroup', data=cgstream)
1635 part = bundler.newpart('changegroup', data=cgstream)
1641 if cgversions:
1636 if cgversions:
1642 part.addparam('version', version)
1637 part.addparam('version', version)
1643 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1638 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1644 mandatory=False)
1639 mandatory=False)
1645 if 'treemanifest' in repo.requirements:
1640 if 'treemanifest' in repo.requirements:
1646 part.addparam('treemanifest', '1')
1641 part.addparam('treemanifest', '1')
1647
1642
1648 @getbundle2partsgenerator('listkeys')
1643 @getbundle2partsgenerator('listkeys')
1649 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1644 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1650 b2caps=None, **kwargs):
1645 b2caps=None, **kwargs):
1651 """add parts containing listkeys namespaces to the requested bundle"""
1646 """add parts containing listkeys namespaces to the requested bundle"""
1652 listkeys = kwargs.get('listkeys', ())
1647 listkeys = kwargs.get('listkeys', ())
1653 for namespace in listkeys:
1648 for namespace in listkeys:
1654 part = bundler.newpart('listkeys')
1649 part = bundler.newpart('listkeys')
1655 part.addparam('namespace', namespace)
1650 part.addparam('namespace', namespace)
1656 keys = repo.listkeys(namespace).items()
1651 keys = repo.listkeys(namespace).items()
1657 part.data = pushkey.encodekeys(keys)
1652 part.data = pushkey.encodekeys(keys)
1658
1653
1659 @getbundle2partsgenerator('obsmarkers')
1654 @getbundle2partsgenerator('obsmarkers')
1660 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1655 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1661 b2caps=None, heads=None, **kwargs):
1656 b2caps=None, heads=None, **kwargs):
1662 """add an obsolescence markers part to the requested bundle"""
1657 """add an obsolescence markers part to the requested bundle"""
1663 if kwargs.get('obsmarkers', False):
1658 if kwargs.get('obsmarkers', False):
1664 if heads is None:
1659 if heads is None:
1665 heads = repo.heads()
1660 heads = repo.heads()
1666 subset = [c.node() for c in repo.set('::%ln', heads)]
1661 subset = [c.node() for c in repo.set('::%ln', heads)]
1667 markers = repo.obsstore.relevantmarkers(subset)
1662 markers = repo.obsstore.relevantmarkers(subset)
1668 markers = sorted(markers)
1663 markers = sorted(markers)
1669 bundle2.buildobsmarkerspart(bundler, markers)
1664 bundle2.buildobsmarkerspart(bundler, markers)
1670
1665
1671 @getbundle2partsgenerator('hgtagsfnodes')
1666 @getbundle2partsgenerator('hgtagsfnodes')
1672 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1667 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1673 b2caps=None, heads=None, common=None,
1668 b2caps=None, heads=None, common=None,
1674 **kwargs):
1669 **kwargs):
1675 """Transfer the .hgtags filenodes mapping.
1670 """Transfer the .hgtags filenodes mapping.
1676
1671
1677 Only values for heads in this bundle will be transferred.
1672 Only values for heads in this bundle will be transferred.
1678
1673
1679 The part data consists of pairs of 20 byte changeset node and .hgtags
1674 The part data consists of pairs of 20 byte changeset node and .hgtags
1680 filenodes raw values.
1675 filenodes raw values.
1681 """
1676 """
1682 # Don't send unless:
1677 # Don't send unless:
1683 # - changeset are being exchanged,
1678 # - changeset are being exchanged,
1684 # - the client supports it.
1679 # - the client supports it.
1685 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1680 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1686 return
1681 return
1687
1682
1688 outgoing = _computeoutgoing(repo, heads, common)
1683 outgoing = _computeoutgoing(repo, heads, common)
1689 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1684 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1690
1685
1691 def _getbookmarks(repo, **kwargs):
1686 def _getbookmarks(repo, **kwargs):
1692 """Returns bookmark to node mapping.
1687 """Returns bookmark to node mapping.
1693
1688
1694 This function is primarily used to generate `bookmarks` bundle2 part.
1689 This function is primarily used to generate `bookmarks` bundle2 part.
1695 It is a separate function in order to make it easy to wrap it
1690 It is a separate function in order to make it easy to wrap it
1696 in extensions. Passing `kwargs` to the function makes it easy to
1691 in extensions. Passing `kwargs` to the function makes it easy to
1697 add new parameters in extensions.
1692 add new parameters in extensions.
1698 """
1693 """
1699
1694
1700 return dict(bookmod.listbinbookmarks(repo))
1695 return dict(bookmod.listbinbookmarks(repo))
1701
1696
1702 def check_heads(repo, their_heads, context):
1697 def check_heads(repo, their_heads, context):
1703 """check if the heads of a repo have been modified
1698 """check if the heads of a repo have been modified
1704
1699
1705 Used by peer for unbundling.
1700 Used by peer for unbundling.
1706 """
1701 """
1707 heads = repo.heads()
1702 heads = repo.heads()
1708 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1703 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1709 if not (their_heads == ['force'] or their_heads == heads or
1704 if not (their_heads == ['force'] or their_heads == heads or
1710 their_heads == ['hashed', heads_hash]):
1705 their_heads == ['hashed', heads_hash]):
1711 # someone else committed/pushed/unbundled while we
1706 # someone else committed/pushed/unbundled while we
1712 # were transferring data
1707 # were transferring data
1713 raise error.PushRaced('repository changed while %s - '
1708 raise error.PushRaced('repository changed while %s - '
1714 'please try again' % context)
1709 'please try again' % context)
1715
1710
1716 def unbundle(repo, cg, heads, source, url):
1711 def unbundle(repo, cg, heads, source, url):
1717 """Apply a bundle to a repo.
1712 """Apply a bundle to a repo.
1718
1713
1719 this function makes sure the repo is locked during the application and have
1714 this function makes sure the repo is locked during the application and have
1720 mechanism to check that no push race occurred between the creation of the
1715 mechanism to check that no push race occurred between the creation of the
1721 bundle and its application.
1716 bundle and its application.
1722
1717
1723 If the push was raced as PushRaced exception is raised."""
1718 If the push was raced as PushRaced exception is raised."""
1724 r = 0
1719 r = 0
1725 # need a transaction when processing a bundle2 stream
1720 # need a transaction when processing a bundle2 stream
1726 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1721 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1727 lockandtr = [None, None, None]
1722 lockandtr = [None, None, None]
1728 recordout = None
1723 recordout = None
1729 # quick fix for output mismatch with bundle2 in 3.4
1724 # quick fix for output mismatch with bundle2 in 3.4
1730 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1725 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1731 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1726 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1732 captureoutput = True
1727 captureoutput = True
1733 try:
1728 try:
1734 # note: outside bundle1, 'heads' is expected to be empty and this
1729 # note: outside bundle1, 'heads' is expected to be empty and this
1735 # 'check_heads' call wil be a no-op
1730 # 'check_heads' call wil be a no-op
1736 check_heads(repo, heads, 'uploading changes')
1731 check_heads(repo, heads, 'uploading changes')
1737 # push can proceed
1732 # push can proceed
1738 if not isinstance(cg, bundle2.unbundle20):
1733 if not isinstance(cg, bundle2.unbundle20):
1739 # legacy case: bundle1 (changegroup 01)
1734 # legacy case: bundle1 (changegroup 01)
1740 txnname = "\n".join([source, util.hidepassword(url)])
1735 txnname = "\n".join([source, util.hidepassword(url)])
1741 with repo.lock(), repo.transaction(txnname) as tr:
1736 with repo.lock(), repo.transaction(txnname) as tr:
1742 op = bundle2.applybundle(repo, cg, tr, source, url)
1737 op = bundle2.applybundle(repo, cg, tr, source, url)
1743 r = bundle2.combinechangegroupresults(op)
1738 r = bundle2.combinechangegroupresults(op)
1744 else:
1739 else:
1745 r = None
1740 r = None
1746 try:
1741 try:
1747 def gettransaction():
1742 def gettransaction():
1748 if not lockandtr[2]:
1743 if not lockandtr[2]:
1749 lockandtr[0] = repo.wlock()
1744 lockandtr[0] = repo.wlock()
1750 lockandtr[1] = repo.lock()
1745 lockandtr[1] = repo.lock()
1751 lockandtr[2] = repo.transaction(source)
1746 lockandtr[2] = repo.transaction(source)
1752 lockandtr[2].hookargs['source'] = source
1747 lockandtr[2].hookargs['source'] = source
1753 lockandtr[2].hookargs['url'] = url
1748 lockandtr[2].hookargs['url'] = url
1754 lockandtr[2].hookargs['bundle2'] = '1'
1749 lockandtr[2].hookargs['bundle2'] = '1'
1755 return lockandtr[2]
1750 return lockandtr[2]
1756
1751
1757 # Do greedy locking by default until we're satisfied with lazy
1752 # Do greedy locking by default until we're satisfied with lazy
1758 # locking.
1753 # locking.
1759 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1754 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1760 gettransaction()
1755 gettransaction()
1761
1756
1762 op = bundle2.bundleoperation(repo, gettransaction,
1757 op = bundle2.bundleoperation(repo, gettransaction,
1763 captureoutput=captureoutput)
1758 captureoutput=captureoutput)
1764 try:
1759 try:
1765 op = bundle2.processbundle(repo, cg, op=op)
1760 op = bundle2.processbundle(repo, cg, op=op)
1766 finally:
1761 finally:
1767 r = op.reply
1762 r = op.reply
1768 if captureoutput and r is not None:
1763 if captureoutput and r is not None:
1769 repo.ui.pushbuffer(error=True, subproc=True)
1764 repo.ui.pushbuffer(error=True, subproc=True)
1770 def recordout(output):
1765 def recordout(output):
1771 r.newpart('output', data=output, mandatory=False)
1766 r.newpart('output', data=output, mandatory=False)
1772 if lockandtr[2] is not None:
1767 if lockandtr[2] is not None:
1773 lockandtr[2].close()
1768 lockandtr[2].close()
1774 except BaseException as exc:
1769 except BaseException as exc:
1775 exc.duringunbundle2 = True
1770 exc.duringunbundle2 = True
1776 if captureoutput and r is not None:
1771 if captureoutput and r is not None:
1777 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1772 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1778 def recordout(output):
1773 def recordout(output):
1779 part = bundle2.bundlepart('output', data=output,
1774 part = bundle2.bundlepart('output', data=output,
1780 mandatory=False)
1775 mandatory=False)
1781 parts.append(part)
1776 parts.append(part)
1782 raise
1777 raise
1783 finally:
1778 finally:
1784 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1779 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1785 if recordout is not None:
1780 if recordout is not None:
1786 recordout(repo.ui.popbuffer())
1781 recordout(repo.ui.popbuffer())
1787 return r
1782 return r
1788
1783
1789 def _maybeapplyclonebundle(pullop):
1784 def _maybeapplyclonebundle(pullop):
1790 """Apply a clone bundle from a remote, if possible."""
1785 """Apply a clone bundle from a remote, if possible."""
1791
1786
1792 repo = pullop.repo
1787 repo = pullop.repo
1793 remote = pullop.remote
1788 remote = pullop.remote
1794
1789
1795 if not repo.ui.configbool('ui', 'clonebundles'):
1790 if not repo.ui.configbool('ui', 'clonebundles'):
1796 return
1791 return
1797
1792
1798 # Only run if local repo is empty.
1793 # Only run if local repo is empty.
1799 if len(repo):
1794 if len(repo):
1800 return
1795 return
1801
1796
1802 if pullop.heads:
1797 if pullop.heads:
1803 return
1798 return
1804
1799
1805 if not remote.capable('clonebundles'):
1800 if not remote.capable('clonebundles'):
1806 return
1801 return
1807
1802
1808 res = remote._call('clonebundles')
1803 res = remote._call('clonebundles')
1809
1804
1810 # If we call the wire protocol command, that's good enough to record the
1805 # If we call the wire protocol command, that's good enough to record the
1811 # attempt.
1806 # attempt.
1812 pullop.clonebundleattempted = True
1807 pullop.clonebundleattempted = True
1813
1808
1814 entries = parseclonebundlesmanifest(repo, res)
1809 entries = parseclonebundlesmanifest(repo, res)
1815 if not entries:
1810 if not entries:
1816 repo.ui.note(_('no clone bundles available on remote; '
1811 repo.ui.note(_('no clone bundles available on remote; '
1817 'falling back to regular clone\n'))
1812 'falling back to regular clone\n'))
1818 return
1813 return
1819
1814
1820 entries = filterclonebundleentries(repo, entries)
1815 entries = filterclonebundleentries(repo, entries)
1821 if not entries:
1816 if not entries:
1822 # There is a thundering herd concern here. However, if a server
1817 # There is a thundering herd concern here. However, if a server
1823 # operator doesn't advertise bundles appropriate for its clients,
1818 # operator doesn't advertise bundles appropriate for its clients,
1824 # they deserve what's coming. Furthermore, from a client's
1819 # they deserve what's coming. Furthermore, from a client's
1825 # perspective, no automatic fallback would mean not being able to
1820 # perspective, no automatic fallback would mean not being able to
1826 # clone!
1821 # clone!
1827 repo.ui.warn(_('no compatible clone bundles available on server; '
1822 repo.ui.warn(_('no compatible clone bundles available on server; '
1828 'falling back to regular clone\n'))
1823 'falling back to regular clone\n'))
1829 repo.ui.warn(_('(you may want to report this to the server '
1824 repo.ui.warn(_('(you may want to report this to the server '
1830 'operator)\n'))
1825 'operator)\n'))
1831 return
1826 return
1832
1827
1833 entries = sortclonebundleentries(repo.ui, entries)
1828 entries = sortclonebundleentries(repo.ui, entries)
1834
1829
1835 url = entries[0]['URL']
1830 url = entries[0]['URL']
1836 repo.ui.status(_('applying clone bundle from %s\n') % url)
1831 repo.ui.status(_('applying clone bundle from %s\n') % url)
1837 if trypullbundlefromurl(repo.ui, repo, url):
1832 if trypullbundlefromurl(repo.ui, repo, url):
1838 repo.ui.status(_('finished applying clone bundle\n'))
1833 repo.ui.status(_('finished applying clone bundle\n'))
1839 # Bundle failed.
1834 # Bundle failed.
1840 #
1835 #
1841 # We abort by default to avoid the thundering herd of
1836 # We abort by default to avoid the thundering herd of
1842 # clients flooding a server that was expecting expensive
1837 # clients flooding a server that was expecting expensive
1843 # clone load to be offloaded.
1838 # clone load to be offloaded.
1844 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1839 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1845 repo.ui.warn(_('falling back to normal clone\n'))
1840 repo.ui.warn(_('falling back to normal clone\n'))
1846 else:
1841 else:
1847 raise error.Abort(_('error applying bundle'),
1842 raise error.Abort(_('error applying bundle'),
1848 hint=_('if this error persists, consider contacting '
1843 hint=_('if this error persists, consider contacting '
1849 'the server operator or disable clone '
1844 'the server operator or disable clone '
1850 'bundles via '
1845 'bundles via '
1851 '"--config ui.clonebundles=false"'))
1846 '"--config ui.clonebundles=false"'))
1852
1847
1853 def parseclonebundlesmanifest(repo, s):
1848 def parseclonebundlesmanifest(repo, s):
1854 """Parses the raw text of a clone bundles manifest.
1849 """Parses the raw text of a clone bundles manifest.
1855
1850
1856 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1851 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1857 to the URL and other keys are the attributes for the entry.
1852 to the URL and other keys are the attributes for the entry.
1858 """
1853 """
1859 m = []
1854 m = []
1860 for line in s.splitlines():
1855 for line in s.splitlines():
1861 fields = line.split()
1856 fields = line.split()
1862 if not fields:
1857 if not fields:
1863 continue
1858 continue
1864 attrs = {'URL': fields[0]}
1859 attrs = {'URL': fields[0]}
1865 for rawattr in fields[1:]:
1860 for rawattr in fields[1:]:
1866 key, value = rawattr.split('=', 1)
1861 key, value = rawattr.split('=', 1)
1867 key = urlreq.unquote(key)
1862 key = urlreq.unquote(key)
1868 value = urlreq.unquote(value)
1863 value = urlreq.unquote(value)
1869 attrs[key] = value
1864 attrs[key] = value
1870
1865
1871 # Parse BUNDLESPEC into components. This makes client-side
1866 # Parse BUNDLESPEC into components. This makes client-side
1872 # preferences easier to specify since you can prefer a single
1867 # preferences easier to specify since you can prefer a single
1873 # component of the BUNDLESPEC.
1868 # component of the BUNDLESPEC.
1874 if key == 'BUNDLESPEC':
1869 if key == 'BUNDLESPEC':
1875 try:
1870 try:
1876 comp, version, params = parsebundlespec(repo, value,
1871 comp, version, params = parsebundlespec(repo, value,
1877 externalnames=True)
1872 externalnames=True)
1878 attrs['COMPRESSION'] = comp
1873 attrs['COMPRESSION'] = comp
1879 attrs['VERSION'] = version
1874 attrs['VERSION'] = version
1880 except error.InvalidBundleSpecification:
1875 except error.InvalidBundleSpecification:
1881 pass
1876 pass
1882 except error.UnsupportedBundleSpecification:
1877 except error.UnsupportedBundleSpecification:
1883 pass
1878 pass
1884
1879
1885 m.append(attrs)
1880 m.append(attrs)
1886
1881
1887 return m
1882 return m
1888
1883
1889 def filterclonebundleentries(repo, entries):
1884 def filterclonebundleentries(repo, entries):
1890 """Remove incompatible clone bundle manifest entries.
1885 """Remove incompatible clone bundle manifest entries.
1891
1886
1892 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1887 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1893 and returns a new list consisting of only the entries that this client
1888 and returns a new list consisting of only the entries that this client
1894 should be able to apply.
1889 should be able to apply.
1895
1890
1896 There is no guarantee we'll be able to apply all returned entries because
1891 There is no guarantee we'll be able to apply all returned entries because
1897 the metadata we use to filter on may be missing or wrong.
1892 the metadata we use to filter on may be missing or wrong.
1898 """
1893 """
1899 newentries = []
1894 newentries = []
1900 for entry in entries:
1895 for entry in entries:
1901 spec = entry.get('BUNDLESPEC')
1896 spec = entry.get('BUNDLESPEC')
1902 if spec:
1897 if spec:
1903 try:
1898 try:
1904 parsebundlespec(repo, spec, strict=True)
1899 parsebundlespec(repo, spec, strict=True)
1905 except error.InvalidBundleSpecification as e:
1900 except error.InvalidBundleSpecification as e:
1906 repo.ui.debug(str(e) + '\n')
1901 repo.ui.debug(str(e) + '\n')
1907 continue
1902 continue
1908 except error.UnsupportedBundleSpecification as e:
1903 except error.UnsupportedBundleSpecification as e:
1909 repo.ui.debug('filtering %s because unsupported bundle '
1904 repo.ui.debug('filtering %s because unsupported bundle '
1910 'spec: %s\n' % (entry['URL'], str(e)))
1905 'spec: %s\n' % (entry['URL'], str(e)))
1911 continue
1906 continue
1912
1907
1913 if 'REQUIRESNI' in entry and not sslutil.hassni:
1908 if 'REQUIRESNI' in entry and not sslutil.hassni:
1914 repo.ui.debug('filtering %s because SNI not supported\n' %
1909 repo.ui.debug('filtering %s because SNI not supported\n' %
1915 entry['URL'])
1910 entry['URL'])
1916 continue
1911 continue
1917
1912
1918 newentries.append(entry)
1913 newentries.append(entry)
1919
1914
1920 return newentries
1915 return newentries
1921
1916
1922 class clonebundleentry(object):
1917 class clonebundleentry(object):
1923 """Represents an item in a clone bundles manifest.
1918 """Represents an item in a clone bundles manifest.
1924
1919
1925 This rich class is needed to support sorting since sorted() in Python 3
1920 This rich class is needed to support sorting since sorted() in Python 3
1926 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1921 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1927 won't work.
1922 won't work.
1928 """
1923 """
1929
1924
1930 def __init__(self, value, prefers):
1925 def __init__(self, value, prefers):
1931 self.value = value
1926 self.value = value
1932 self.prefers = prefers
1927 self.prefers = prefers
1933
1928
1934 def _cmp(self, other):
1929 def _cmp(self, other):
1935 for prefkey, prefvalue in self.prefers:
1930 for prefkey, prefvalue in self.prefers:
1936 avalue = self.value.get(prefkey)
1931 avalue = self.value.get(prefkey)
1937 bvalue = other.value.get(prefkey)
1932 bvalue = other.value.get(prefkey)
1938
1933
1939 # Special case for b missing attribute and a matches exactly.
1934 # Special case for b missing attribute and a matches exactly.
1940 if avalue is not None and bvalue is None and avalue == prefvalue:
1935 if avalue is not None and bvalue is None and avalue == prefvalue:
1941 return -1
1936 return -1
1942
1937
1943 # Special case for a missing attribute and b matches exactly.
1938 # Special case for a missing attribute and b matches exactly.
1944 if bvalue is not None and avalue is None and bvalue == prefvalue:
1939 if bvalue is not None and avalue is None and bvalue == prefvalue:
1945 return 1
1940 return 1
1946
1941
1947 # We can't compare unless attribute present on both.
1942 # We can't compare unless attribute present on both.
1948 if avalue is None or bvalue is None:
1943 if avalue is None or bvalue is None:
1949 continue
1944 continue
1950
1945
1951 # Same values should fall back to next attribute.
1946 # Same values should fall back to next attribute.
1952 if avalue == bvalue:
1947 if avalue == bvalue:
1953 continue
1948 continue
1954
1949
1955 # Exact matches come first.
1950 # Exact matches come first.
1956 if avalue == prefvalue:
1951 if avalue == prefvalue:
1957 return -1
1952 return -1
1958 if bvalue == prefvalue:
1953 if bvalue == prefvalue:
1959 return 1
1954 return 1
1960
1955
1961 # Fall back to next attribute.
1956 # Fall back to next attribute.
1962 continue
1957 continue
1963
1958
1964 # If we got here we couldn't sort by attributes and prefers. Fall
1959 # If we got here we couldn't sort by attributes and prefers. Fall
1965 # back to index order.
1960 # back to index order.
1966 return 0
1961 return 0
1967
1962
1968 def __lt__(self, other):
1963 def __lt__(self, other):
1969 return self._cmp(other) < 0
1964 return self._cmp(other) < 0
1970
1965
1971 def __gt__(self, other):
1966 def __gt__(self, other):
1972 return self._cmp(other) > 0
1967 return self._cmp(other) > 0
1973
1968
1974 def __eq__(self, other):
1969 def __eq__(self, other):
1975 return self._cmp(other) == 0
1970 return self._cmp(other) == 0
1976
1971
1977 def __le__(self, other):
1972 def __le__(self, other):
1978 return self._cmp(other) <= 0
1973 return self._cmp(other) <= 0
1979
1974
1980 def __ge__(self, other):
1975 def __ge__(self, other):
1981 return self._cmp(other) >= 0
1976 return self._cmp(other) >= 0
1982
1977
1983 def __ne__(self, other):
1978 def __ne__(self, other):
1984 return self._cmp(other) != 0
1979 return self._cmp(other) != 0
1985
1980
1986 def sortclonebundleentries(ui, entries):
1981 def sortclonebundleentries(ui, entries):
1987 prefers = ui.configlist('ui', 'clonebundleprefers')
1982 prefers = ui.configlist('ui', 'clonebundleprefers')
1988 if not prefers:
1983 if not prefers:
1989 return list(entries)
1984 return list(entries)
1990
1985
1991 prefers = [p.split('=', 1) for p in prefers]
1986 prefers = [p.split('=', 1) for p in prefers]
1992
1987
1993 items = sorted(clonebundleentry(v, prefers) for v in entries)
1988 items = sorted(clonebundleentry(v, prefers) for v in entries)
1994 return [i.value for i in items]
1989 return [i.value for i in items]
1995
1990
1996 def trypullbundlefromurl(ui, repo, url):
1991 def trypullbundlefromurl(ui, repo, url):
1997 """Attempt to apply a bundle from a URL."""
1992 """Attempt to apply a bundle from a URL."""
1998 with repo.lock(), repo.transaction('bundleurl') as tr:
1993 with repo.lock(), repo.transaction('bundleurl') as tr:
1999 try:
1994 try:
2000 fh = urlmod.open(ui, url)
1995 fh = urlmod.open(ui, url)
2001 cg = readbundle(ui, fh, 'stream')
1996 cg = readbundle(ui, fh, 'stream')
2002
1997
2003 if isinstance(cg, streamclone.streamcloneapplier):
1998 if isinstance(cg, streamclone.streamcloneapplier):
2004 cg.apply(repo)
1999 cg.apply(repo)
2005 else:
2000 else:
2006 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2001 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2007 return True
2002 return True
2008 except urlerr.httperror as e:
2003 except urlerr.httperror as e:
2009 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2004 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2010 except urlerr.urlerror as e:
2005 except urlerr.urlerror as e:
2011 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2006 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2012
2007
2013 return False
2008 return False
@@ -1,570 +1,570 b''
1 #require killdaemons
1 #require killdaemons
2
2
3 Tests discovery against servers without getbundle support:
3 Tests discovery against servers without getbundle support:
4
4
5 $ CAP="getbundle bundle2"
5 $ CAP="getbundle bundle2"
6 $ . "$TESTDIR/notcapable"
6 $ . "$TESTDIR/notcapable"
7 $ cat >> $HGRCPATH <<EOF
7 $ cat >> $HGRCPATH <<EOF
8 > [ui]
8 > [ui]
9 > logtemplate="{rev} {node|short}: {desc} {branches}\n"
9 > logtemplate="{rev} {node|short}: {desc} {branches}\n"
10 > EOF
10 > EOF
11
11
12 Setup HTTP server control:
12 Setup HTTP server control:
13
13
14 $ remote=http://localhost:$HGPORT/
14 $ remote=http://localhost:$HGPORT/
15 $ export remote
15 $ export remote
16 $ tstart() {
16 $ tstart() {
17 > echo '[web]' > $1/.hg/hgrc
17 > echo '[web]' > $1/.hg/hgrc
18 > echo 'push_ssl = false' >> $1/.hg/hgrc
18 > echo 'push_ssl = false' >> $1/.hg/hgrc
19 > echo 'allow_push = *' >> $1/.hg/hgrc
19 > echo 'allow_push = *' >> $1/.hg/hgrc
20 > hg serve -R $1 -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
20 > hg serve -R $1 -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
21 > cat hg.pid >> $DAEMON_PIDS
21 > cat hg.pid >> $DAEMON_PIDS
22 > }
22 > }
23 $ tstop() {
23 $ tstop() {
24 > killdaemons.py
24 > killdaemons.py
25 > [ "$1" ] && cut -d' ' -f6- access.log && cat errors.log
25 > [ "$1" ] && cut -d' ' -f6- access.log && cat errors.log
26 > rm access.log errors.log
26 > rm access.log errors.log
27 > }
27 > }
28
28
29 Both are empty:
29 Both are empty:
30
30
31 $ hg init empty1
31 $ hg init empty1
32 $ hg init empty2
32 $ hg init empty2
33 $ tstart empty2
33 $ tstart empty2
34 $ hg incoming -R empty1 $remote
34 $ hg incoming -R empty1 $remote
35 comparing with http://localhost:$HGPORT/
35 comparing with http://localhost:$HGPORT/
36 no changes found
36 no changes found
37 [1]
37 [1]
38 $ hg outgoing -R empty1 $remote
38 $ hg outgoing -R empty1 $remote
39 comparing with http://localhost:$HGPORT/
39 comparing with http://localhost:$HGPORT/
40 no changes found
40 no changes found
41 [1]
41 [1]
42 $ hg pull -R empty1 $remote
42 $ hg pull -R empty1 $remote
43 pulling from http://localhost:$HGPORT/
43 pulling from http://localhost:$HGPORT/
44 no changes found
44 no changes found
45 $ hg push -R empty1 $remote
45 $ hg push -R empty1 $remote
46 pushing to http://localhost:$HGPORT/
46 pushing to http://localhost:$HGPORT/
47 no changes found
47 no changes found
48 [1]
48 [1]
49 $ tstop
49 $ tstop
50
50
51 Base repo:
51 Base repo:
52
52
53 $ hg init main
53 $ hg init main
54 $ cd main
54 $ cd main
55 $ hg debugbuilddag -mo '+2:tbase @name1 +3:thead1 <tbase @name2 +4:thead2 @both /thead1 +2:tmaintip'
55 $ hg debugbuilddag -mo '+2:tbase @name1 +3:thead1 <tbase @name2 +4:thead2 @both /thead1 +2:tmaintip'
56 $ hg log -G
56 $ hg log -G
57 o 11 a19bfa7e7328: r11 both
57 o 11 a19bfa7e7328: r11 both
58 |
58 |
59 o 10 8b6bad1512e1: r10 both
59 o 10 8b6bad1512e1: r10 both
60 |
60 |
61 o 9 025829e08038: r9 both
61 o 9 025829e08038: r9 both
62 |\
62 |\
63 | o 8 d8f638ac69e9: r8 name2
63 | o 8 d8f638ac69e9: r8 name2
64 | |
64 | |
65 | o 7 b6b4d315a2ac: r7 name2
65 | o 7 b6b4d315a2ac: r7 name2
66 | |
66 | |
67 | o 6 6c6f5d5f3c11: r6 name2
67 | o 6 6c6f5d5f3c11: r6 name2
68 | |
68 | |
69 | o 5 70314b29987d: r5 name2
69 | o 5 70314b29987d: r5 name2
70 | |
70 | |
71 o | 4 e71dbbc70e03: r4 name1
71 o | 4 e71dbbc70e03: r4 name1
72 | |
72 | |
73 o | 3 2c8d5d5ec612: r3 name1
73 o | 3 2c8d5d5ec612: r3 name1
74 | |
74 | |
75 o | 2 a7892891da29: r2 name1
75 o | 2 a7892891da29: r2 name1
76 |/
76 |/
77 o 1 0019a3b924fd: r1
77 o 1 0019a3b924fd: r1
78 |
78 |
79 o 0 d57206cc072a: r0
79 o 0 d57206cc072a: r0
80
80
81 $ cd ..
81 $ cd ..
82 $ tstart main
82 $ tstart main
83
83
84 Full clone:
84 Full clone:
85
85
86 $ hg clone main full
86 $ hg clone main full
87 updating to branch default
87 updating to branch default
88 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
88 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
89 $ cd full
89 $ cd full
90 $ hg incoming $remote
90 $ hg incoming $remote
91 comparing with http://localhost:$HGPORT/
91 comparing with http://localhost:$HGPORT/
92 searching for changes
92 searching for changes
93 no changes found
93 no changes found
94 [1]
94 [1]
95 $ hg outgoing $remote
95 $ hg outgoing $remote
96 comparing with http://localhost:$HGPORT/
96 comparing with http://localhost:$HGPORT/
97 searching for changes
97 searching for changes
98 no changes found
98 no changes found
99 [1]
99 [1]
100 $ hg pull $remote
100 $ hg pull $remote
101 pulling from http://localhost:$HGPORT/
101 pulling from http://localhost:$HGPORT/
102 searching for changes
102 searching for changes
103 no changes found
103 no changes found
104 $ hg push $remote
104 $ hg push $remote
105 pushing to http://localhost:$HGPORT/
105 pushing to http://localhost:$HGPORT/
106 searching for changes
106 searching for changes
107 no changes found
107 no changes found
108 [1]
108 [1]
109 $ cd ..
109 $ cd ..
110
110
111 Local is empty:
111 Local is empty:
112
112
113 $ cd empty1
113 $ cd empty1
114 $ hg incoming $remote
114 $ hg incoming $remote
115 comparing with http://localhost:$HGPORT/
115 comparing with http://localhost:$HGPORT/
116 0 d57206cc072a: r0
116 0 d57206cc072a: r0
117 1 0019a3b924fd: r1
117 1 0019a3b924fd: r1
118 2 a7892891da29: r2 name1
118 2 a7892891da29: r2 name1
119 3 2c8d5d5ec612: r3 name1
119 3 2c8d5d5ec612: r3 name1
120 4 e71dbbc70e03: r4 name1
120 4 e71dbbc70e03: r4 name1
121 5 70314b29987d: r5 name2
121 5 70314b29987d: r5 name2
122 6 6c6f5d5f3c11: r6 name2
122 6 6c6f5d5f3c11: r6 name2
123 7 b6b4d315a2ac: r7 name2
123 7 b6b4d315a2ac: r7 name2
124 8 d8f638ac69e9: r8 name2
124 8 d8f638ac69e9: r8 name2
125 9 025829e08038: r9 both
125 9 025829e08038: r9 both
126 10 8b6bad1512e1: r10 both
126 10 8b6bad1512e1: r10 both
127 11 a19bfa7e7328: r11 both
127 11 a19bfa7e7328: r11 both
128 $ hg outgoing $remote
128 $ hg outgoing $remote
129 comparing with http://localhost:$HGPORT/
129 comparing with http://localhost:$HGPORT/
130 no changes found
130 no changes found
131 [1]
131 [1]
132 $ hg push $remote
132 $ hg push $remote
133 pushing to http://localhost:$HGPORT/
133 pushing to http://localhost:$HGPORT/
134 no changes found
134 no changes found
135 [1]
135 [1]
136 $ hg pull $remote
136 $ hg pull $remote
137 pulling from http://localhost:$HGPORT/
137 pulling from http://localhost:$HGPORT/
138 requesting all changes
138 requesting all changes
139 adding changesets
139 adding changesets
140 adding manifests
140 adding manifests
141 adding file changes
141 adding file changes
142 added 12 changesets with 24 changes to 2 files
142 added 12 changesets with 24 changes to 2 files
143 (run 'hg update' to get a working copy)
143 (run 'hg update' to get a working copy)
144 $ hg incoming $remote
144 $ hg incoming $remote
145 comparing with http://localhost:$HGPORT/
145 comparing with http://localhost:$HGPORT/
146 searching for changes
146 searching for changes
147 no changes found
147 no changes found
148 [1]
148 [1]
149 $ cd ..
149 $ cd ..
150
150
151 Local is subset:
151 Local is subset:
152
152
153 $ hg clone main subset --rev name2 ; cd subset
153 $ hg clone main subset --rev name2 ; cd subset
154 adding changesets
154 adding changesets
155 adding manifests
155 adding manifests
156 adding file changes
156 adding file changes
157 added 6 changesets with 12 changes to 2 files
157 added 6 changesets with 12 changes to 2 files
158 updating to branch name2
158 updating to branch name2
159 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
160 $ hg incoming $remote
160 $ hg incoming $remote
161 comparing with http://localhost:$HGPORT/
161 comparing with http://localhost:$HGPORT/
162 searching for changes
162 searching for changes
163 6 a7892891da29: r2 name1
163 6 a7892891da29: r2 name1
164 7 2c8d5d5ec612: r3 name1
164 7 2c8d5d5ec612: r3 name1
165 8 e71dbbc70e03: r4 name1
165 8 e71dbbc70e03: r4 name1
166 9 025829e08038: r9 both
166 9 025829e08038: r9 both
167 10 8b6bad1512e1: r10 both
167 10 8b6bad1512e1: r10 both
168 11 a19bfa7e7328: r11 both
168 11 a19bfa7e7328: r11 both
169 $ hg outgoing $remote
169 $ hg outgoing $remote
170 comparing with http://localhost:$HGPORT/
170 comparing with http://localhost:$HGPORT/
171 searching for changes
171 searching for changes
172 no changes found
172 no changes found
173 [1]
173 [1]
174 $ hg push $remote
174 $ hg push $remote
175 pushing to http://localhost:$HGPORT/
175 pushing to http://localhost:$HGPORT/
176 searching for changes
176 searching for changes
177 no changes found
177 no changes found
178 [1]
178 [1]
179 $ hg pull $remote
179 $ hg pull $remote
180 pulling from http://localhost:$HGPORT/
180 pulling from http://localhost:$HGPORT/
181 searching for changes
181 searching for changes
182 adding changesets
182 adding changesets
183 adding manifests
183 adding manifests
184 adding file changes
184 adding file changes
185 added 6 changesets with 12 changes to 2 files
185 added 6 changesets with 12 changes to 2 files
186 (run 'hg update' to get a working copy)
186 (run 'hg update' to get a working copy)
187 $ hg incoming $remote
187 $ hg incoming $remote
188 comparing with http://localhost:$HGPORT/
188 comparing with http://localhost:$HGPORT/
189 searching for changes
189 searching for changes
190 no changes found
190 no changes found
191 [1]
191 [1]
192 $ cd ..
192 $ cd ..
193 $ tstop
193 $ tstop
194
194
195 Remote is empty:
195 Remote is empty:
196
196
197 $ tstart empty2
197 $ tstart empty2
198 $ cd main
198 $ cd main
199 $ hg incoming $remote
199 $ hg incoming $remote
200 comparing with http://localhost:$HGPORT/
200 comparing with http://localhost:$HGPORT/
201 searching for changes
201 searching for changes
202 no changes found
202 no changes found
203 [1]
203 [1]
204 $ hg outgoing $remote
204 $ hg outgoing $remote
205 comparing with http://localhost:$HGPORT/
205 comparing with http://localhost:$HGPORT/
206 searching for changes
206 searching for changes
207 0 d57206cc072a: r0
207 0 d57206cc072a: r0
208 1 0019a3b924fd: r1
208 1 0019a3b924fd: r1
209 2 a7892891da29: r2 name1
209 2 a7892891da29: r2 name1
210 3 2c8d5d5ec612: r3 name1
210 3 2c8d5d5ec612: r3 name1
211 4 e71dbbc70e03: r4 name1
211 4 e71dbbc70e03: r4 name1
212 5 70314b29987d: r5 name2
212 5 70314b29987d: r5 name2
213 6 6c6f5d5f3c11: r6 name2
213 6 6c6f5d5f3c11: r6 name2
214 7 b6b4d315a2ac: r7 name2
214 7 b6b4d315a2ac: r7 name2
215 8 d8f638ac69e9: r8 name2
215 8 d8f638ac69e9: r8 name2
216 9 025829e08038: r9 both
216 9 025829e08038: r9 both
217 10 8b6bad1512e1: r10 both
217 10 8b6bad1512e1: r10 both
218 11 a19bfa7e7328: r11 both
218 11 a19bfa7e7328: r11 both
219 $ hg pull $remote
219 $ hg pull $remote
220 pulling from http://localhost:$HGPORT/
220 pulling from http://localhost:$HGPORT/
221 searching for changes
221 searching for changes
222 no changes found
222 no changes found
223 $ hg push $remote
223 $ hg push $remote
224 pushing to http://localhost:$HGPORT/
224 pushing to http://localhost:$HGPORT/
225 searching for changes
225 searching for changes
226 remote: adding changesets
226 remote: adding changesets
227 remote: adding manifests
227 remote: adding manifests
228 remote: adding file changes
228 remote: adding file changes
229 remote: added 12 changesets with 24 changes to 2 files
229 remote: added 12 changesets with 24 changes to 2 files
230 $ hg outgoing $remote
230 $ hg outgoing $remote
231 comparing with http://localhost:$HGPORT/
231 comparing with http://localhost:$HGPORT/
232 searching for changes
232 searching for changes
233 no changes found
233 no changes found
234 [1]
234 [1]
235 $ cd ..
235 $ cd ..
236 $ tstop
236 $ tstop
237
237
238 Local is superset:
238 Local is superset:
239
239
240 $ hg clone main subset2 --rev name2
240 $ hg clone main subset2 --rev name2
241 adding changesets
241 adding changesets
242 adding manifests
242 adding manifests
243 adding file changes
243 adding file changes
244 added 6 changesets with 12 changes to 2 files
244 added 6 changesets with 12 changes to 2 files
245 updating to branch name2
245 updating to branch name2
246 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
246 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
247 $ tstart subset2
247 $ tstart subset2
248 $ cd main
248 $ cd main
249 $ hg incoming $remote
249 $ hg incoming $remote
250 comparing with http://localhost:$HGPORT/
250 comparing with http://localhost:$HGPORT/
251 searching for changes
251 searching for changes
252 no changes found
252 no changes found
253 [1]
253 [1]
254 $ hg outgoing $remote
254 $ hg outgoing $remote
255 comparing with http://localhost:$HGPORT/
255 comparing with http://localhost:$HGPORT/
256 searching for changes
256 searching for changes
257 2 a7892891da29: r2 name1
257 2 a7892891da29: r2 name1
258 3 2c8d5d5ec612: r3 name1
258 3 2c8d5d5ec612: r3 name1
259 4 e71dbbc70e03: r4 name1
259 4 e71dbbc70e03: r4 name1
260 9 025829e08038: r9 both
260 9 025829e08038: r9 both
261 10 8b6bad1512e1: r10 both
261 10 8b6bad1512e1: r10 both
262 11 a19bfa7e7328: r11 both
262 11 a19bfa7e7328: r11 both
263 $ hg pull $remote
263 $ hg pull $remote
264 pulling from http://localhost:$HGPORT/
264 pulling from http://localhost:$HGPORT/
265 searching for changes
265 searching for changes
266 no changes found
266 no changes found
267 $ hg push $remote
267 $ hg push $remote
268 pushing to http://localhost:$HGPORT/
268 pushing to http://localhost:$HGPORT/
269 searching for changes
269 searching for changes
270 abort: push creates new remote branches: both, name1!
270 abort: push creates new remote branches: both, name1!
271 (use 'hg push --new-branch' to create new remote branches)
271 (use 'hg push --new-branch' to create new remote branches)
272 [255]
272 [255]
273 $ hg push $remote --new-branch
273 $ hg push $remote --new-branch
274 pushing to http://localhost:$HGPORT/
274 pushing to http://localhost:$HGPORT/
275 searching for changes
275 searching for changes
276 remote: adding changesets
276 remote: adding changesets
277 remote: adding manifests
277 remote: adding manifests
278 remote: adding file changes
278 remote: adding file changes
279 remote: added 6 changesets with 12 changes to 2 files
279 remote: added 6 changesets with 12 changes to 2 files
280 $ hg outgoing $remote
280 $ hg outgoing $remote
281 comparing with http://localhost:$HGPORT/
281 comparing with http://localhost:$HGPORT/
282 searching for changes
282 searching for changes
283 no changes found
283 no changes found
284 [1]
284 [1]
285 $ cd ..
285 $ cd ..
286 $ tstop
286 $ tstop
287
287
288 Partial pull:
288 Partial pull:
289
289
290 $ tstart main
290 $ tstart main
291 $ hg clone $remote partial --rev name2
291 $ hg clone $remote partial --rev name2
292 adding changesets
292 adding changesets
293 adding manifests
293 adding manifests
294 adding file changes
294 adding file changes
295 added 6 changesets with 12 changes to 2 files
295 added 6 changesets with 12 changes to 2 files
296 updating to branch name2
296 updating to branch name2
297 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
297 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
298 $ cd partial
298 $ cd partial
299 $ hg incoming $remote
299 $ hg incoming $remote
300 comparing with http://localhost:$HGPORT/
300 comparing with http://localhost:$HGPORT/
301 searching for changes
301 searching for changes
302 6 a7892891da29: r2 name1
302 6 a7892891da29: r2 name1
303 7 2c8d5d5ec612: r3 name1
303 7 2c8d5d5ec612: r3 name1
304 8 e71dbbc70e03: r4 name1
304 8 e71dbbc70e03: r4 name1
305 9 025829e08038: r9 both
305 9 025829e08038: r9 both
306 10 8b6bad1512e1: r10 both
306 10 8b6bad1512e1: r10 both
307 11 a19bfa7e7328: r11 both
307 11 a19bfa7e7328: r11 both
308 $ hg incoming $remote --rev name1
308 $ hg incoming $remote --rev name1
309 comparing with http://localhost:$HGPORT/
309 comparing with http://localhost:$HGPORT/
310 searching for changes
310 searching for changes
311 6 a7892891da29: r2 name1
311 6 a7892891da29: r2 name1
312 7 2c8d5d5ec612: r3 name1
312 7 2c8d5d5ec612: r3 name1
313 8 e71dbbc70e03: r4 name1
313 8 e71dbbc70e03: r4 name1
314 $ hg pull $remote --rev name1
314 $ hg pull $remote --rev name1
315 pulling from http://localhost:$HGPORT/
315 pulling from http://localhost:$HGPORT/
316 searching for changes
316 searching for changes
317 adding changesets
317 adding changesets
318 adding manifests
318 adding manifests
319 adding file changes
319 adding file changes
320 added 3 changesets with 6 changes to 2 files (+1 heads)
320 added 3 changesets with 6 changes to 2 files (+1 heads)
321 (run 'hg heads' to see heads)
321 (run 'hg heads' to see heads)
322 $ hg incoming $remote
322 $ hg incoming $remote
323 comparing with http://localhost:$HGPORT/
323 comparing with http://localhost:$HGPORT/
324 searching for changes
324 searching for changes
325 9 025829e08038: r9 both
325 9 025829e08038: r9 both
326 10 8b6bad1512e1: r10 both
326 10 8b6bad1512e1: r10 both
327 11 a19bfa7e7328: r11 both
327 11 a19bfa7e7328: r11 both
328 $ cd ..
328 $ cd ..
329 $ tstop
329 $ tstop
330
330
331 Both have new stuff in new named branches:
331 Both have new stuff in new named branches:
332
332
333 $ hg clone main repo1a --rev name1 -q
333 $ hg clone main repo1a --rev name1 -q
334 $ hg clone repo1a repo1b -q
334 $ hg clone repo1a repo1b -q
335 $ hg clone main repo2a --rev name2 -q
335 $ hg clone main repo2a --rev name2 -q
336 $ hg clone repo2a repo2b -q
336 $ hg clone repo2a repo2b -q
337 $ tstart repo1a
337 $ tstart repo1a
338
338
339 $ cd repo2a
339 $ cd repo2a
340 $ hg incoming $remote
340 $ hg incoming $remote
341 comparing with http://localhost:$HGPORT/
341 comparing with http://localhost:$HGPORT/
342 searching for changes
342 searching for changes
343 6 a7892891da29: r2 name1
343 6 a7892891da29: r2 name1
344 7 2c8d5d5ec612: r3 name1
344 7 2c8d5d5ec612: r3 name1
345 8 e71dbbc70e03: r4 name1
345 8 e71dbbc70e03: r4 name1
346 $ hg outgoing $remote
346 $ hg outgoing $remote
347 comparing with http://localhost:$HGPORT/
347 comparing with http://localhost:$HGPORT/
348 searching for changes
348 searching for changes
349 2 70314b29987d: r5 name2
349 2 70314b29987d: r5 name2
350 3 6c6f5d5f3c11: r6 name2
350 3 6c6f5d5f3c11: r6 name2
351 4 b6b4d315a2ac: r7 name2
351 4 b6b4d315a2ac: r7 name2
352 5 d8f638ac69e9: r8 name2
352 5 d8f638ac69e9: r8 name2
353 $ hg push $remote --new-branch
353 $ hg push $remote --new-branch
354 pushing to http://localhost:$HGPORT/
354 pushing to http://localhost:$HGPORT/
355 searching for changes
355 searching for changes
356 remote: adding changesets
356 remote: adding changesets
357 remote: adding manifests
357 remote: adding manifests
358 remote: adding file changes
358 remote: adding file changes
359 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
359 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
360 $ hg pull $remote
360 $ hg pull $remote
361 pulling from http://localhost:$HGPORT/
361 pulling from http://localhost:$HGPORT/
362 searching for changes
362 searching for changes
363 adding changesets
363 adding changesets
364 adding manifests
364 adding manifests
365 adding file changes
365 adding file changes
366 added 3 changesets with 6 changes to 2 files (+1 heads)
366 added 3 changesets with 6 changes to 2 files (+1 heads)
367 (run 'hg heads' to see heads)
367 (run 'hg heads' to see heads)
368 $ hg incoming $remote
368 $ hg incoming $remote
369 comparing with http://localhost:$HGPORT/
369 comparing with http://localhost:$HGPORT/
370 searching for changes
370 searching for changes
371 no changes found
371 no changes found
372 [1]
372 [1]
373 $ hg outgoing $remote
373 $ hg outgoing $remote
374 comparing with http://localhost:$HGPORT/
374 comparing with http://localhost:$HGPORT/
375 searching for changes
375 searching for changes
376 no changes found
376 no changes found
377 [1]
377 [1]
378 $ cd ..
378 $ cd ..
379 $ tstop
379 $ tstop
380
380
381 $ tstart repo1b
381 $ tstart repo1b
382 $ cd repo2b
382 $ cd repo2b
383 $ hg incoming $remote
383 $ hg incoming $remote
384 comparing with http://localhost:$HGPORT/
384 comparing with http://localhost:$HGPORT/
385 searching for changes
385 searching for changes
386 6 a7892891da29: r2 name1
386 6 a7892891da29: r2 name1
387 7 2c8d5d5ec612: r3 name1
387 7 2c8d5d5ec612: r3 name1
388 8 e71dbbc70e03: r4 name1
388 8 e71dbbc70e03: r4 name1
389 $ hg outgoing $remote
389 $ hg outgoing $remote
390 comparing with http://localhost:$HGPORT/
390 comparing with http://localhost:$HGPORT/
391 searching for changes
391 searching for changes
392 2 70314b29987d: r5 name2
392 2 70314b29987d: r5 name2
393 3 6c6f5d5f3c11: r6 name2
393 3 6c6f5d5f3c11: r6 name2
394 4 b6b4d315a2ac: r7 name2
394 4 b6b4d315a2ac: r7 name2
395 5 d8f638ac69e9: r8 name2
395 5 d8f638ac69e9: r8 name2
396 $ hg pull $remote
396 $ hg pull $remote
397 pulling from http://localhost:$HGPORT/
397 pulling from http://localhost:$HGPORT/
398 searching for changes
398 searching for changes
399 adding changesets
399 adding changesets
400 adding manifests
400 adding manifests
401 adding file changes
401 adding file changes
402 added 3 changesets with 6 changes to 2 files (+1 heads)
402 added 3 changesets with 6 changes to 2 files (+1 heads)
403 (run 'hg heads' to see heads)
403 (run 'hg heads' to see heads)
404 $ hg push $remote --new-branch
404 $ hg push $remote --new-branch
405 pushing to http://localhost:$HGPORT/
405 pushing to http://localhost:$HGPORT/
406 searching for changes
406 searching for changes
407 remote: adding changesets
407 remote: adding changesets
408 remote: adding manifests
408 remote: adding manifests
409 remote: adding file changes
409 remote: adding file changes
410 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
410 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
411 $ hg incoming $remote
411 $ hg incoming $remote
412 comparing with http://localhost:$HGPORT/
412 comparing with http://localhost:$HGPORT/
413 searching for changes
413 searching for changes
414 no changes found
414 no changes found
415 [1]
415 [1]
416 $ hg outgoing $remote
416 $ hg outgoing $remote
417 comparing with http://localhost:$HGPORT/
417 comparing with http://localhost:$HGPORT/
418 searching for changes
418 searching for changes
419 no changes found
419 no changes found
420 [1]
420 [1]
421 $ cd ..
421 $ cd ..
422 $ tstop
422 $ tstop
423
423
424 Both have new stuff in existing named branches:
424 Both have new stuff in existing named branches:
425
425
426 $ rm -r repo1a repo1b repo2a repo2b
426 $ rm -r repo1a repo1b repo2a repo2b
427 $ hg clone main repo1a --rev 3 --rev 8 -q
427 $ hg clone main repo1a --rev 3 --rev 8 -q
428 $ hg clone repo1a repo1b -q
428 $ hg clone repo1a repo1b -q
429 $ hg clone main repo2a --rev 4 --rev 7 -q
429 $ hg clone main repo2a --rev 4 --rev 7 -q
430 $ hg clone repo2a repo2b -q
430 $ hg clone repo2a repo2b -q
431 $ tstart repo1a
431 $ tstart repo1a
432
432
433 $ cd repo2a
433 $ cd repo2a
434 $ hg incoming $remote
434 $ hg incoming $remote
435 comparing with http://localhost:$HGPORT/
435 comparing with http://localhost:$HGPORT/
436 searching for changes
436 searching for changes
437 8 d8f638ac69e9: r8 name2
437 8 d8f638ac69e9: r8 name2
438 $ hg outgoing $remote
438 $ hg outgoing $remote
439 comparing with http://localhost:$HGPORT/
439 comparing with http://localhost:$HGPORT/
440 searching for changes
440 searching for changes
441 4 e71dbbc70e03: r4 name1
441 4 e71dbbc70e03: r4 name1
442 $ hg push $remote --new-branch
442 $ hg push $remote --new-branch
443 pushing to http://localhost:$HGPORT/
443 pushing to http://localhost:$HGPORT/
444 searching for changes
444 searching for changes
445 remote: adding changesets
445 remote: adding changesets
446 remote: adding manifests
446 remote: adding manifests
447 remote: adding file changes
447 remote: adding file changes
448 remote: added 1 changesets with 2 changes to 2 files
448 remote: added 1 changesets with 2 changes to 2 files
449 $ hg pull $remote
449 $ hg pull $remote
450 pulling from http://localhost:$HGPORT/
450 pulling from http://localhost:$HGPORT/
451 searching for changes
451 searching for changes
452 adding changesets
452 adding changesets
453 adding manifests
453 adding manifests
454 adding file changes
454 adding file changes
455 added 1 changesets with 2 changes to 2 files
455 added 1 changesets with 2 changes to 2 files
456 (run 'hg update' to get a working copy)
456 (run 'hg update' to get a working copy)
457 $ hg incoming $remote
457 $ hg incoming $remote
458 comparing with http://localhost:$HGPORT/
458 comparing with http://localhost:$HGPORT/
459 searching for changes
459 searching for changes
460 no changes found
460 no changes found
461 [1]
461 [1]
462 $ hg outgoing $remote
462 $ hg outgoing $remote
463 comparing with http://localhost:$HGPORT/
463 comparing with http://localhost:$HGPORT/
464 searching for changes
464 searching for changes
465 no changes found
465 no changes found
466 [1]
466 [1]
467 $ cd ..
467 $ cd ..
468 $ tstop
468 $ tstop
469
469
470 $ tstart repo1b
470 $ tstart repo1b
471 $ cd repo2b
471 $ cd repo2b
472 $ hg incoming $remote
472 $ hg incoming $remote
473 comparing with http://localhost:$HGPORT/
473 comparing with http://localhost:$HGPORT/
474 searching for changes
474 searching for changes
475 8 d8f638ac69e9: r8 name2
475 8 d8f638ac69e9: r8 name2
476 $ hg outgoing $remote
476 $ hg outgoing $remote
477 comparing with http://localhost:$HGPORT/
477 comparing with http://localhost:$HGPORT/
478 searching for changes
478 searching for changes
479 4 e71dbbc70e03: r4 name1
479 4 e71dbbc70e03: r4 name1
480 $ hg pull $remote
480 $ hg pull $remote
481 pulling from http://localhost:$HGPORT/
481 pulling from http://localhost:$HGPORT/
482 searching for changes
482 searching for changes
483 adding changesets
483 adding changesets
484 adding manifests
484 adding manifests
485 adding file changes
485 adding file changes
486 added 1 changesets with 2 changes to 2 files
486 added 1 changesets with 2 changes to 2 files
487 (run 'hg update' to get a working copy)
487 (run 'hg update' to get a working copy)
488 $ hg push $remote --new-branch
488 $ hg push $remote --new-branch
489 pushing to http://localhost:$HGPORT/
489 pushing to http://localhost:$HGPORT/
490 searching for changes
490 searching for changes
491 remote: adding changesets
491 remote: adding changesets
492 remote: adding manifests
492 remote: adding manifests
493 remote: adding file changes
493 remote: adding file changes
494 remote: added 1 changesets with 2 changes to 2 files
494 remote: added 1 changesets with 2 changes to 2 files
495 $ hg incoming $remote
495 $ hg incoming $remote
496 comparing with http://localhost:$HGPORT/
496 comparing with http://localhost:$HGPORT/
497 searching for changes
497 searching for changes
498 no changes found
498 no changes found
499 [1]
499 [1]
500 $ hg outgoing $remote
500 $ hg outgoing $remote
501 comparing with http://localhost:$HGPORT/
501 comparing with http://localhost:$HGPORT/
502 searching for changes
502 searching for changes
503 no changes found
503 no changes found
504 [1]
504 [1]
505 $ cd ..
505 $ cd ..
506 #if zstd
506 #if zstd
507 $ tstop show
507 $ tstop show
508 "GET /?cmd=capabilities HTTP/1.1" 200 -
508 "GET /?cmd=capabilities HTTP/1.1" 200 -
509 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
509 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
510 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
510 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
511 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
511 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
512 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
512 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
513 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
513 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
514 "GET /?cmd=capabilities HTTP/1.1" 200 -
514 "GET /?cmd=capabilities HTTP/1.1" 200 -
515 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
515 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
516 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
516 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
517 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
517 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
518 "GET /?cmd=capabilities HTTP/1.1" 200 -
518 "GET /?cmd=capabilities HTTP/1.1" 200 -
519 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
519 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
520 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
520 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
521 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
521 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
522 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
522 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
523 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
523 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961+2c8d5d5ec612be65cdfdeac78b7662ab1696324a x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
524 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
524 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
525 "GET /?cmd=capabilities HTTP/1.1" 200 -
525 "GET /?cmd=capabilities HTTP/1.1" 200 -
526 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
526 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
527 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
527 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
528 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
528 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
529 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
529 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
530 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
530 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
531 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
531 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
532 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+1827a5bb63e602382eb89dd58f2ac9f3b007ad91* (glob)
532 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+1827a5bb63e602382eb89dd58f2ac9f3b007ad91* (glob)
533 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
533 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
534 "GET /?cmd=capabilities HTTP/1.1" 200 -
534 "GET /?cmd=capabilities HTTP/1.1" 200 -
535 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
535 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
536 "GET /?cmd=capabilities HTTP/1.1" 200 -
536 "GET /?cmd=capabilities HTTP/1.1" 200 -
537 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
537 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zstd,zlib,none,bzip2
538 #else
538 #else
539 $ tstop show
539 $ tstop show
540 "GET /?cmd=capabilities HTTP/1.1" 200 -
540 "GET /?cmd=capabilities HTTP/1.1" 200 -
541 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
541 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
542 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
542 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
543 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
543 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
544 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
544 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
545 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
545 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
546 "GET /?cmd=capabilities HTTP/1.1" 200 -
546 "GET /?cmd=capabilities HTTP/1.1" 200 -
547 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
547 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
548 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
548 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
549 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
549 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
550 "GET /?cmd=capabilities HTTP/1.1" 200 -
550 "GET /?cmd=capabilities HTTP/1.1" 200 -
551 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
551 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
552 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
552 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
553 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
553 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
554 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
554 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
555 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
555 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961 x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
556 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
556 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
557 "GET /?cmd=capabilities HTTP/1.1" 200 -
557 "GET /?cmd=capabilities HTTP/1.1" 200 -
558 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
558 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
559 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
559 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
560 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
560 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
561 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
561 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
562 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
562 "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
563 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
563 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
564 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+1827a5bb63e602382eb89dd58f2ac9f3b007ad91* (glob)
564 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+1827a5bb63e602382eb89dd58f2ac9f3b007ad91* (glob)
565 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
565 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
566 "GET /?cmd=capabilities HTTP/1.1" 200 -
566 "GET /?cmd=capabilities HTTP/1.1" 200 -
567 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
567 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
568 "GET /?cmd=capabilities HTTP/1.1" 200 -
568 "GET /?cmd=capabilities HTTP/1.1" 200 -
569 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
569 "GET /?cmd=heads HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=zlib,none,bzip2
570 #endif
570 #endif
General Comments 0
You need to be logged in to leave comments. Login now